Import lib jsoncpp version 1.7.0
This commit is contained in:
@@ -5,7 +5,7 @@ cmake_minimum_required (VERSION 2.8.11)
|
||||
|
||||
#add_subdirectory (wolfssl/builders/cmake)
|
||||
add_subdirectory (json/builders/cmake)
|
||||
#add_subdirectory (jsoncpp/builders/cmake)
|
||||
add_subdirectory (jsoncpp/builders/cmake)
|
||||
add_subdirectory (libubox/builders/cmake)
|
||||
add_subdirectory (ubus/libubus/builders/cmake)
|
||||
add_subdirectory (ubus/ubus/builders/cmake)
|
||||
|
||||
11
3P/jsoncpp/.gitattributes
vendored
Normal file
11
3P/jsoncpp/.gitattributes
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
* text=auto
|
||||
*.h text
|
||||
*.cpp text
|
||||
*.json text
|
||||
*.in text
|
||||
*.sh eol=lf
|
||||
*.bat eol=crlf
|
||||
*.vcproj eol=crlf
|
||||
*.vcxproj eol=crlf
|
||||
*.sln eol=crlf
|
||||
devtools/agent_vm* eol=crlf
|
||||
11
3P/jsoncpp/.gitignore
vendored
11
3P/jsoncpp/.gitignore
vendored
@@ -34,3 +34,14 @@ CTestTestFile.cmake
|
||||
cmake_install.cmake
|
||||
pkg-config/jsoncpp.pc
|
||||
jsoncpp_lib_static.dir/
|
||||
|
||||
# In case someone runs cmake in the root-dir:
|
||||
/CMakeCache.txt
|
||||
/Makefile
|
||||
/include/Makefile
|
||||
/src/Makefile
|
||||
/src/jsontestrunner/Makefile
|
||||
/src/jsontestrunner/jsontestrunner_exe
|
||||
/src/lib_json/Makefile
|
||||
/src/test_lib_json/Makefile
|
||||
/src/test_lib_json/jsoncpp_test
|
||||
|
||||
@@ -2,16 +2,42 @@
|
||||
# http://about.travis-ci.org/docs/user/build-configuration/
|
||||
# This file can be validated on:
|
||||
# http://lint.travis-ci.org/
|
||||
before_install: sudo apt-get install cmake
|
||||
# See also
|
||||
# http://stackoverflow.com/questions/22111549/travis-ci-with-clang-3-4-and-c11/30925448#30925448
|
||||
# to allow C++11, though we are not yet building with -std=c++11
|
||||
|
||||
install:
|
||||
# /usr/bin/gcc is 4.6 always, but gcc-X.Y is available.
|
||||
- if [ "$CXX" = "g++" ]; then export CXX="g++-4.9" CC="gcc-4.9"; fi
|
||||
# /usr/bin/clang is our version already, and clang-X.Y does not exist.
|
||||
#- if [ "$CXX" = "clang++" ]; then export CXX="clang++-3.7" CC="clang-3.7"; fi
|
||||
- echo ${PATH}
|
||||
- ls /usr/local
|
||||
- ls /usr/local/bin
|
||||
- export PATH=/usr/local/bin:/usr/bin:${PATH}
|
||||
- echo ${CXX}
|
||||
- ${CXX} --version
|
||||
- which valgrind
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
- gcc-4.9
|
||||
- g++-4.9
|
||||
- clang
|
||||
- valgrind
|
||||
os:
|
||||
- linux
|
||||
language: cpp
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
script: cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make && make jsoncpp_check
|
||||
script: ./travis.sh
|
||||
env:
|
||||
matrix:
|
||||
- SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
||||
- SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE
|
||||
notifications:
|
||||
email:
|
||||
- aaronjjacobs@gmail.com
|
||||
email: false
|
||||
sudo: false
|
||||
|
||||
@@ -7,8 +7,11 @@ ENABLE_TESTING()
|
||||
OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON)
|
||||
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
||||
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
|
||||
OPTION(JSONCPP_WITH_STRICT_ISO "Issue all the warnings demanded by strict ISO C and ISO C++" ON)
|
||||
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
|
||||
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
|
||||
OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
|
||||
OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON)
|
||||
|
||||
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
|
||||
IF(NOT WIN32)
|
||||
@@ -16,18 +19,19 @@ IF(NOT WIN32)
|
||||
SET(CMAKE_BUILD_TYPE Release CACHE STRING
|
||||
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
|
||||
FORCE)
|
||||
ENDIF(NOT CMAKE_BUILD_TYPE)
|
||||
ENDIF(NOT WIN32)
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
|
||||
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
|
||||
|
||||
SET(RUNTIME_INSTALL_DIR bin
|
||||
CACHE PATH "Install dir for executables and dlls")
|
||||
SET(ARCHIVE_INSTALL_DIR lib${LIB_SUFFIX}
|
||||
SET(ARCHIVE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
|
||||
CACHE PATH "Install dir for static libraries")
|
||||
SET(LIBRARY_INSTALL_DIR lib${LIB_SUFFIX}
|
||||
SET(LIBRARY_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
|
||||
CACHE PATH "Install dir for shared libraries")
|
||||
SET(INCLUDE_INSTALL_DIR include
|
||||
SET(INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/include
|
||||
CACHE PATH "Install dir for headers")
|
||||
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake
|
||||
CACHE PATH "Install dir for cmake package config files")
|
||||
@@ -36,7 +40,7 @@ MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PA
|
||||
# Set variable named ${VAR_NAME} to value ${VALUE}
|
||||
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
||||
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
||||
ENDFUNCTION(set_using_dynamic_name)
|
||||
ENDFUNCTION()
|
||||
|
||||
# Extract major, minor, patch from version text
|
||||
# Parse a version string "X.Y.Z" and outputs
|
||||
@@ -52,15 +56,15 @@ MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
|
||||
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
|
||||
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
|
||||
ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||
ENDMACRO(jsoncpp_parse_version)
|
||||
ENDIF()
|
||||
ENDMACRO()
|
||||
|
||||
# Read out version from "version" file
|
||||
#FILE(STRINGS "version" JSONCPP_VERSION)
|
||||
#SET( JSONCPP_VERSION_MAJOR X )
|
||||
#SET( JSONCPP_VERSION_MINOR Y )
|
||||
#SET( JSONCPP_VERSION_PATCH Z )
|
||||
SET( JSONCPP_VERSION 1.6.0 )
|
||||
SET( JSONCPP_VERSION 1.7.0 )
|
||||
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
|
||||
#IF(NOT JSONCPP_VERSION_FOUND)
|
||||
# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
|
||||
@@ -80,7 +84,12 @@ macro(UseCompilationWarningAsError)
|
||||
# Only enabled in debug because some old versions of VS STL generate
|
||||
# warnings when compiled in release configuration.
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
|
||||
endif( MSVC )
|
||||
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
|
||||
if (JSONCPP_WITH_STRICT_ISO)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic-errors")
|
||||
endif ()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Include our configuration header
|
||||
@@ -90,34 +99,45 @@ if ( MSVC )
|
||||
# Only enabled in debug because some old versions of VS STL generate
|
||||
# unreachable code warning when compiled in release configuration.
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
|
||||
endif( MSVC )
|
||||
endif()
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
# using regular Clang or AppleClang
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall")
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wconversion -Wshadow -Werror=conversion -Werror=sign-compare")
|
||||
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
# using GCC
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -pedantic")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wconversion -Wshadow -Wextra -Werror=conversion")
|
||||
# not yet ready for -Wsign-conversion
|
||||
|
||||
if (JSONCPP_WITH_STRICT_ISO AND NOT JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
if(CCACHE_FOUND)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
|
||||
endif(CCACHE_FOUND)
|
||||
|
||||
IF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
UseCompilationWarningAsError()
|
||||
ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
ENDIF()
|
||||
|
||||
IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||
CONFIGURE_FILE(
|
||||
"pkg-config/jsoncpp.pc.in"
|
||||
"pkg-config/jsoncpp.pc"
|
||||
@ONLY)
|
||||
INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc"
|
||||
INSTALL(FILES "${CMAKE_CURRENT_BINARY_DIR}/pkg-config/jsoncpp.pc"
|
||||
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig")
|
||||
ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||
ENDIF()
|
||||
|
||||
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
INSTALL(EXPORT jsoncpp
|
||||
DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp
|
||||
FILE jsoncppConfig.cmake)
|
||||
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
ENDIF()
|
||||
|
||||
# Build the different applications
|
||||
ADD_SUBDIRECTORY( src )
|
||||
|
||||
@@ -19,12 +19,14 @@ format to store user input files.
|
||||
* `0.y.z` can be used with older compilers.
|
||||
* Major versions maintain binary-compatibility.
|
||||
|
||||
Using JsonCpp in your project
|
||||
# Using JsonCpp in your project
|
||||
-----------------------------
|
||||
The recommended approach to integrating JsonCpp in your project is to build
|
||||
the amalgamated source (a single `.cpp` file) with your own build system. This
|
||||
ensures consistency of compilation flags and ABI compatibility. See the section
|
||||
"Generating amalgamated source and header" for instructions.
|
||||
The recommended approach to integrating JsonCpp in your project is to include
|
||||
the [amalgamated source](#generating-amalgamated-source-and-header) (a single
|
||||
`.cpp` file and two `.h` files) in your project, and compile and build as you
|
||||
would any other source file. This ensures consistency of compilation flags and
|
||||
ABI compatibility, issues which arise when building shared or static
|
||||
libraries. See the next section for instructions.
|
||||
|
||||
The `include/` should be added to your compiler include path. Jsoncpp headers
|
||||
should be included as follow:
|
||||
@@ -34,6 +36,31 @@ should be included as follow:
|
||||
If JsonCpp was built as a dynamic library on Windows, then your project needs to
|
||||
define the macro `JSON_DLL`.
|
||||
|
||||
Generating amalgamated source and header
|
||||
----------------------------------------
|
||||
JsonCpp is provided with a script to generate a single header and a single
|
||||
source file to ease inclusion into an existing project. The amalgamated source
|
||||
can be generated at any time by running the following command from the
|
||||
top-directory (this requires Python 2.6):
|
||||
|
||||
python amalgamate.py
|
||||
|
||||
It is possible to specify header name. See the `-h` option for detail.
|
||||
|
||||
By default, the following files are generated:
|
||||
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
|
||||
* `dist/json/json.h`: corresponding header file for use in your project. It is
|
||||
equivalent to including `json/json.h` in non-amalgamated source. This header
|
||||
only depends on standard headers.
|
||||
* `dist/json/json-forwards.h`: header that provides forward declaration of all
|
||||
JsonCpp types.
|
||||
|
||||
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
|
||||
of other headers.
|
||||
|
||||
# Contributing to JsonCpp
|
||||
|
||||
Building and testing with CMake
|
||||
-------------------------------
|
||||
[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most
|
||||
@@ -58,7 +85,7 @@ Steps for generating solution/makefiles using `cmake-gui`:
|
||||
* Make "source code" point to the source directory.
|
||||
* Make "where to build the binary" point to the directory to use for the build.
|
||||
* Click on the "Grouped" check box.
|
||||
* Review JsonCpp build options (tick `JSONCPP_LIB_BUILD_SHARED` to build as a
|
||||
* Review JsonCpp build options (tick `BUILD_SHARED_LIBS` to build as a
|
||||
dynamic library).
|
||||
* Click the configure button at the bottom, then the generate button.
|
||||
* The generated solution/makefiles can be found in the binary directory.
|
||||
@@ -67,10 +94,10 @@ Alternatively, from the command-line on Unix in the source directory:
|
||||
|
||||
mkdir -p build/debug
|
||||
cd build/debug
|
||||
cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_STATIC=ON -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../..
|
||||
cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_STATIC_LIBS=ON -DBUILD_SHARED_LIBS=OFF -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" ../..
|
||||
make
|
||||
|
||||
Running `cmake -`" will display the list of available generators (passed using
|
||||
Running `cmake -h` will display the list of available generators (passed using
|
||||
the `-G` option).
|
||||
|
||||
By default CMake hides compilation commands. This can be modified by specifying
|
||||
@@ -106,7 +133,7 @@ If you are building with Microsoft Visual Studio 2008, you need to set up the
|
||||
environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before
|
||||
running SCons.
|
||||
|
||||
# Running the tests manually
|
||||
## Running the tests manually
|
||||
You need to run tests manually only if you are troubleshooting an issue.
|
||||
|
||||
In the instructions below, replace `path/to/jsontest` with the path of the
|
||||
@@ -142,29 +169,6 @@ Run the Python script `doxybuild.py` from the top directory:
|
||||
|
||||
See `doxybuild.py --help` for options.
|
||||
|
||||
Generating amalgamated source and header
|
||||
----------------------------------------
|
||||
JsonCpp is provided with a script to generate a single header and a single
|
||||
source file to ease inclusion into an existing project. The amalgamated source
|
||||
can be generated at any time by running the following command from the
|
||||
top-directory (this requires Python 2.6):
|
||||
|
||||
python amalgamate.py
|
||||
|
||||
It is possible to specify header name. See the `-h` option for detail.
|
||||
|
||||
By default, the following files are generated:
|
||||
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
|
||||
* `dist/json/json.h`: corresponding header file for use in your project. It is
|
||||
equivalent to including `json/json.h` in non-amalgamated source. This header
|
||||
only depends on standard headers.
|
||||
* `dist/json/json-forwards.h`: header that provides forward declaration of all
|
||||
JsonCpp types.
|
||||
|
||||
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
|
||||
of other headers.
|
||||
|
||||
Adding a reader/writer test
|
||||
---------------------------
|
||||
To add a test, you need to create two files in test/data:
|
||||
|
||||
35
3P/jsoncpp/appveyor.yml
Normal file
35
3P/jsoncpp/appveyor.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
# This is a comment.
|
||||
|
||||
version: build.{build}
|
||||
|
||||
os: Windows Server 2012 R2
|
||||
|
||||
clone_folder: c:\projects\jsoncpp
|
||||
|
||||
platform:
|
||||
- Win32
|
||||
- x64
|
||||
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
|
||||
# scripts to run before build
|
||||
before_build:
|
||||
- echo "Running cmake..."
|
||||
- cd c:\projects\jsoncpp
|
||||
- cmake --version
|
||||
- set PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH%
|
||||
- if %PLATFORM% == Win32 cmake .
|
||||
- if %PLATFORM% == x64 cmake -G "Visual Studio 12 2013 Win64" .
|
||||
|
||||
build:
|
||||
project: jsoncpp.sln # path to Visual Studio solution or project
|
||||
|
||||
deploy:
|
||||
provider: GitHub
|
||||
auth_token:
|
||||
secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg
|
||||
on:
|
||||
branch: master
|
||||
appveyor_repo_tag: true
|
||||
@@ -14,7 +14,7 @@ file(
|
||||
../../src/lib_json/json_writer.cpp
|
||||
)
|
||||
|
||||
set(CMAKE_C_FLAGS "-std=c++0x -Wall -Wextra -pedantic -Werror=strict-aliasing")
|
||||
set(CMAKE_CXX_FLAGS "-std=c++11 -Wall -Wextra -pedantic -Werror=strict-aliasing")
|
||||
|
||||
add_library(
|
||||
jsoncpp
|
||||
|
||||
@@ -16,7 +16,7 @@ dox:
|
||||
# Then 'git add -A' and 'git push' in jsoncpp-docs.
|
||||
build:
|
||||
mkdir -p build/debug
|
||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../..
|
||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../..
|
||||
make -C build/debug
|
||||
|
||||
# Currently, this depends on include/json/version.h generated
|
||||
@@ -26,6 +26,9 @@ test-amalgamate:
|
||||
python3.4 amalgamate.py
|
||||
cd dist; gcc -I. -c jsoncpp.cpp
|
||||
|
||||
valgrind:
|
||||
valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test
|
||||
|
||||
clean:
|
||||
\rm -rf *.gz *.asc dist/
|
||||
|
||||
|
||||
32
3P/jsoncpp/dev.makefile.orig
Normal file
32
3P/jsoncpp/dev.makefile.orig
Normal file
@@ -0,0 +1,32 @@
|
||||
# This is only for jsoncpp developers/contributors.
|
||||
# We use this to sign releases, generate documentation, etc.
|
||||
VER?=$(shell cat version)
|
||||
|
||||
default:
|
||||
@echo "VER=${VER}"
|
||||
sign: jsoncpp-${VER}.tar.gz
|
||||
gpg --armor --detach-sign $<
|
||||
gpg --verify $<.asc
|
||||
# Then upload .asc to the release.
|
||||
jsoncpp-%.tar.gz:
|
||||
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
|
||||
dox:
|
||||
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
|
||||
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
|
||||
# Then 'git add -A' and 'git push' in jsoncpp-docs.
|
||||
build:
|
||||
mkdir -p build/debug
|
||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../..
|
||||
make -C build/debug
|
||||
|
||||
# Currently, this depends on include/json/version.h generated
|
||||
# by cmake.
|
||||
test-amalgamate:
|
||||
python2.7 amalgamate.py
|
||||
python3.4 amalgamate.py
|
||||
cd dist; gcc -I. -c jsoncpp.cpp
|
||||
|
||||
clean:
|
||||
\rm -rf *.gz *.asc dist/
|
||||
|
||||
.PHONY: build
|
||||
@@ -1 +1,6 @@
|
||||
# module
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
# module
|
||||
|
||||
1
3P/jsoncpp/devtools/__init__.py.orig
Normal file
1
3P/jsoncpp/devtools/__init__.py.orig
Normal file
@@ -0,0 +1 @@
|
||||
# module
|
||||
@@ -1,33 +1,33 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 7 .NET 2003",
|
||||
"Visual Studio 9 2008",
|
||||
"Visual Studio 9 2008 Win64",
|
||||
"Visual Studio 10",
|
||||
"Visual Studio 10 Win64",
|
||||
"Visual Studio 11",
|
||||
"Visual Studio 11 Win64"
|
||||
]
|
||||
},
|
||||
{"generator": ["MinGW Makefiles"],
|
||||
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 7 .NET 2003",
|
||||
"Visual Studio 9 2008",
|
||||
"Visual Studio 9 2008 Win64",
|
||||
"Visual Studio 10",
|
||||
"Visual Studio 10 Win64",
|
||||
"Visual Studio 11",
|
||||
"Visual Studio 11 Win64"
|
||||
]
|
||||
},
|
||||
{"generator": ["MinGW Makefiles"],
|
||||
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["BUILD_SHARED_LIBS=true"],
|
||||
["BUILD_SHARED_LIBS=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
33
3P/jsoncpp/devtools/agent_vmw7.json.orig
Normal file
33
3P/jsoncpp/devtools/agent_vmw7.json.orig
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 7 .NET 2003",
|
||||
"Visual Studio 9 2008",
|
||||
"Visual Studio 9 2008 Win64",
|
||||
"Visual Studio 10",
|
||||
"Visual Studio 10 Win64",
|
||||
"Visual Studio 11",
|
||||
"Visual Studio 11 Win64"
|
||||
]
|
||||
},
|
||||
{"generator": ["MinGW Makefiles"],
|
||||
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,26 +1,26 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 6",
|
||||
"Visual Studio 7",
|
||||
"Visual Studio 8 2005"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 6",
|
||||
"Visual Studio 7",
|
||||
"Visual Studio 8 2005"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["BUILD_SHARED_LIBS=true"],
|
||||
["BUILD_SHARED_LIBS=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
26
3P/jsoncpp/devtools/agent_vmxp.json.orig
Normal file
26
3P/jsoncpp/devtools/agent_vmxp.json.orig
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 6",
|
||||
"Visual Studio 7",
|
||||
"Visual Studio 8 2005"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Baptiste Lepilleur, 2009
|
||||
# Copyright 2009 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
from dircache import listdir
|
||||
|
||||
202
3P/jsoncpp/devtools/antglob.py.orig
Normal file
202
3P/jsoncpp/devtools/antglob.py.orig
Normal file
@@ -0,0 +1,202 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Baptiste Lepilleur, 2009
|
||||
|
||||
from __future__ import print_function
|
||||
from dircache import listdir
|
||||
import re
|
||||
import fnmatch
|
||||
import os.path
|
||||
|
||||
|
||||
# These fnmatch expressions are used by default to prune the directory tree
|
||||
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||
prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS '
|
||||
|
||||
# These fnmatch expressions are used by default to exclude files and dirs
|
||||
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||
##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
|
||||
|
||||
# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree
|
||||
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||
default_excludes = '''
|
||||
**/*~
|
||||
**/#*#
|
||||
**/.#*
|
||||
**/%*%
|
||||
**/._*
|
||||
**/CVS
|
||||
**/CVS/**
|
||||
**/.cvsignore
|
||||
**/SCCS
|
||||
**/SCCS/**
|
||||
**/vssver.scc
|
||||
**/.svn
|
||||
**/.svn/**
|
||||
**/.git
|
||||
**/.git/**
|
||||
**/.gitignore
|
||||
**/.bzr
|
||||
**/.bzr/**
|
||||
**/.hg
|
||||
**/.hg/**
|
||||
**/_MTN
|
||||
**/_MTN/**
|
||||
**/_darcs
|
||||
**/_darcs/**
|
||||
**/.DS_Store '''
|
||||
|
||||
DIR = 1
|
||||
FILE = 2
|
||||
DIR_LINK = 4
|
||||
FILE_LINK = 8
|
||||
LINKS = DIR_LINK | FILE_LINK
|
||||
ALL_NO_LINK = DIR | FILE
|
||||
ALL = DIR | FILE | LINKS
|
||||
|
||||
_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
|
||||
|
||||
def ant_pattern_to_re(ant_pattern):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||
*.py: match 'script.py' but not 'a/script.py'
|
||||
"""
|
||||
rex = ['^']
|
||||
next_pos = 0
|
||||
sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer(ant_pattern):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
if match.start(0) != next_pos:
|
||||
raise ValueError("Invalid ant pattern")
|
||||
if match.group(1): # /**/
|
||||
rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
|
||||
elif match.group(2): # **/
|
||||
rex.append('(?:.*%s)?' % sep_rex)
|
||||
elif match.group(3): # /**
|
||||
rex.append(sep_rex + '.*')
|
||||
elif match.group(4): # *
|
||||
rex.append('[^/%s]*' % re.escape(os.path.sep))
|
||||
elif match.group(5): # /
|
||||
rex.append(sep_rex)
|
||||
else: # somepath
|
||||
rex.append(re.escape(match.group(6)))
|
||||
next_pos = match.end()
|
||||
rex.append('$')
|
||||
return re.compile(''.join(rex))
|
||||
|
||||
def _as_list(l):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
|
||||
def glob(dir_path,
|
||||
includes = '**/*',
|
||||
excludes = default_excludes,
|
||||
entry_type = FILE,
|
||||
prune_dirs = prune_dirs,
|
||||
max_depth = 25):
|
||||
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||
dir_path = dir_path.replace('/',os.path.sep)
|
||||
entry_type_filter = entry_type
|
||||
|
||||
def is_pruned_dir(dir_name):
|
||||
for pattern in prune_dirs:
|
||||
if fnmatch.fnmatch(dir_name, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def apply_filter(full_path, filter_rexs):
|
||||
"""Return True if at least one of the filter regular expression match full_path."""
|
||||
for rex in filter_rexs:
|
||||
if rex.match(full_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def glob_impl(root_dir_path):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
dir_path = child_dirs.pop()
|
||||
for entry in listdir(dir_path):
|
||||
full_path = os.path.join(dir_path, entry)
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir(full_path)
|
||||
if is_dir and not is_pruned_dir(entry): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append(full_path)
|
||||
included = apply_filter(full_path, include_filter)
|
||||
rejected = apply_filter(full_path, exclude_filter)
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink(full_path)
|
||||
is_file = os.path.isfile(full_path)
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
if link:
|
||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||
else:
|
||||
entry_type = is_file and FILE or DIR
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join(dir_path, entry)
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
return list(glob_impl(dir_path))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
|
||||
class AntPatternToRETest(unittest.TestCase):
|
||||
## def test_conversion(self):
|
||||
## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern)
|
||||
|
||||
def test_matching(self):
|
||||
test_cases = [ ('path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path']),
|
||||
('*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']),
|
||||
('**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']),
|
||||
('path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']),
|
||||
('/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath']),
|
||||
('a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c']),
|
||||
('**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b']),
|
||||
('src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py']),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path(paths):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches)))
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re(ant_pattern)
|
||||
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
|
||||
for accepted_match in accepted_matches:
|
||||
print('Accepted?:', accepted_match)
|
||||
self.assertTrue(rex.match(accepted_match) is not None)
|
||||
for rejected_match in rejected_matches:
|
||||
print('Rejected?:', rejected_match)
|
||||
self.assertTrue(rex.match(rejected_match) is None)
|
||||
|
||||
unittest.main()
|
||||
@@ -1,5 +1,11 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
|
||||
64
3P/jsoncpp/devtools/fixeol.py.orig
Normal file
64
3P/jsoncpp/devtools/fixeol.py.orig
Normal file
@@ -0,0 +1,64 @@
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
|
||||
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile(path):
|
||||
raise ValueError('Path "%s" is not a file' % path)
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError as msg:
|
||||
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print('%s =>' % path, end=' ')
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
f.writelines(fixed_lines)
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print(is_dry_run and ' NEED FIX' or ' FIXED')
|
||||
return True
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix(is_dry_run = True):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob('.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
|
||||
## for path in python_sources:
|
||||
## _fix_python_source(path, is_dry_run)
|
||||
##
|
||||
## cpp_sources = antglob.glob('.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol(path, is_dry_run)
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix(is_dry_run = True)
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix(is_dry_run = False)
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
##
|
||||
##def check(context):
|
||||
## # Unit tests are run when "check" target is used
|
||||
## ut = UnitTest.unit_test()
|
||||
## ut.change_to_testfile_dir = True
|
||||
## ut.want_to_see_test_output = True
|
||||
## ut.want_to_see_test_error = True
|
||||
## ut.run()
|
||||
## ut.print_results()
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from contextlib import closing
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
47
3P/jsoncpp/devtools/tarball.py.orig
Normal file
47
3P/jsoncpp/devtools/tarball.py.orig
Normal file
@@ -0,0 +1,47 @@
|
||||
from contextlib import closing
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
|
||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||
"""Parameters:
|
||||
tarball_path: output path of the .tar.gz file
|
||||
sources: list of sources to include in the tarball, relative to the current directory
|
||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||
from path in the tarball.
|
||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||
to make them child of root.
|
||||
"""
|
||||
base_dir = os.path.normpath(os.path.abspath(base_dir))
|
||||
def archive_name(path):
|
||||
"""Makes path relative to base_dir."""
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
common_path = os.path.commonprefix((base_dir, path))
|
||||
archive_name = path[len(common_path):]
|
||||
if os.path.isabs(archive_name):
|
||||
archive_name = archive_name[1:]
|
||||
return os.path.join(prefix_dir, archive_name)
|
||||
def visit(tar, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
path_in_tar = archive_name(path)
|
||||
tar.add(path, path_in_tar)
|
||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||
with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
|
||||
compresslevel=compression)) as tar:
|
||||
for source in sources:
|
||||
source_path = source
|
||||
if os.path.isdir(source):
|
||||
for dirpath, dirnames, filenames in os.walk(source_path):
|
||||
visit(tar, dirpath, filenames)
|
||||
else:
|
||||
path_in_tar = archive_name(source_path)
|
||||
tar.add(source_path, path_in_tar) # filename, arcname
|
||||
|
||||
def decompress(tarball_path, base_dir):
|
||||
"""Decompress the gzipped tarball into directory base_dir.
|
||||
"""
|
||||
with closing(tarfile.TarFile.open(tarball_path)) as tar:
|
||||
tar.extractall(base_dir)
|
||||
@@ -31,7 +31,7 @@ def find_program(*filenames):
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
|
||||
for filename in filenames:
|
||||
for name in [filename+ext for ext in suffixes.split()]:
|
||||
for name in [filename+ext for ext in suffixes.split(' ')]:
|
||||
for directory in paths:
|
||||
full_path = os.path.join(directory, name)
|
||||
if os.path.isfile(full_path):
|
||||
@@ -72,7 +72,7 @@ def run_cmd(cmd, silent=False):
|
||||
if silent:
|
||||
status, output = getstatusoutput(cmd)
|
||||
else:
|
||||
status, output = os.system(' '.join(cmd)), ''
|
||||
status, output = subprocess.call(cmd), ''
|
||||
if status:
|
||||
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
|
||||
raise Exception(msg)
|
||||
|
||||
189
3P/jsoncpp/doxybuild.py.orig
Normal file
189
3P/jsoncpp/doxybuild.py.orig
Normal file
@@ -0,0 +1,189 @@
|
||||
"""Script to generate doxygen documentation.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from devtools import tarball
|
||||
from contextlib import contextmanager
|
||||
import subprocess
|
||||
import traceback
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
@contextmanager
|
||||
def cd(newdir):
|
||||
"""
|
||||
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
|
||||
"""
|
||||
prevdir = os.getcwd()
|
||||
os.chdir(newdir)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(prevdir)
|
||||
|
||||
def find_program(*filenames):
|
||||
"""find a program in folders path_lst, and sets env[var]
|
||||
@param filenames: a list of possible names of the program to search for
|
||||
@return: the full path of the filename if found, or '' if filename could not be found
|
||||
"""
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
|
||||
for filename in filenames:
|
||||
for name in [filename+ext for ext in suffixes.split()]:
|
||||
for directory in paths:
|
||||
full_path = os.path.join(directory, name)
|
||||
if os.path.isfile(full_path):
|
||||
return full_path
|
||||
return ''
|
||||
|
||||
def do_subst_in_file(targetfile, sourcefile, dict):
|
||||
"""Replace all instances of the keys of dict with their values.
|
||||
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
|
||||
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
|
||||
"""
|
||||
with open(sourcefile, 'r') as f:
|
||||
contents = f.read()
|
||||
for (k,v) in list(dict.items()):
|
||||
v = v.replace('\\','\\\\')
|
||||
contents = re.sub(k, v, contents)
|
||||
with open(targetfile, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def getstatusoutput(cmd):
|
||||
"""cmd is a list.
|
||||
"""
|
||||
try:
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
output, _ = process.communicate()
|
||||
status = process.returncode
|
||||
except:
|
||||
status = -1
|
||||
output = traceback.format_exc()
|
||||
return status, output
|
||||
|
||||
def run_cmd(cmd, silent=False):
|
||||
"""Raise exception on failure.
|
||||
"""
|
||||
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
|
||||
print(info)
|
||||
sys.stdout.flush()
|
||||
if silent:
|
||||
status, output = getstatusoutput(cmd)
|
||||
else:
|
||||
status, output = os.system(' '.join(cmd)), ''
|
||||
if status:
|
||||
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
|
||||
raise Exception(msg)
|
||||
|
||||
def assert_is_exe(path):
|
||||
if not path:
|
||||
raise Exception('path is empty.')
|
||||
if not os.path.isfile(path):
|
||||
raise Exception('%r is not a file.' %path)
|
||||
if not os.access(path, os.X_OK):
|
||||
raise Exception('%r is not executable by this user.' %path)
|
||||
|
||||
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||
assert_is_exe(doxygen_path)
|
||||
config_file = os.path.abspath(config_file)
|
||||
with cd(working_dir):
|
||||
cmd = [doxygen_path, config_file]
|
||||
run_cmd(cmd, is_silent)
|
||||
|
||||
def build_doc(options, make_release=False):
|
||||
if make_release:
|
||||
options.make_tarball = True
|
||||
options.with_dot = True
|
||||
options.with_html_help = True
|
||||
options.with_uml_look = True
|
||||
options.open = False
|
||||
options.silent = True
|
||||
|
||||
version = open('version', 'rt').read().strip()
|
||||
output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
top_dir = os.path.abspath('.')
|
||||
html_output_dirname = 'jsoncpp-api-html-' + version
|
||||
tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz')
|
||||
warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log')
|
||||
html_output_path = os.path.join(output_dir, html_output_dirname)
|
||||
def yesno(bool):
|
||||
return bool and 'YES' or 'NO'
|
||||
subst_keys = {
|
||||
'%JSONCPP_VERSION%': version,
|
||||
'%DOC_TOPDIR%': '',
|
||||
'%TOPDIR%': top_dir,
|
||||
'%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname),
|
||||
'%HAVE_DOT%': yesno(options.with_dot),
|
||||
'%DOT_PATH%': os.path.split(options.dot_path)[0],
|
||||
'%HTML_HELP%': yesno(options.with_html_help),
|
||||
'%UML_LOOK%': yesno(options.with_uml_look),
|
||||
'%WARNING_LOG_PATH%': os.path.join('..', warning_log_path)
|
||||
}
|
||||
|
||||
if os.path.isdir(output_dir):
|
||||
print('Deleting directory:', output_dir)
|
||||
shutil.rmtree(output_dir)
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys)
|
||||
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
|
||||
if not options.silent:
|
||||
print(open(warning_log_path, 'r').read())
|
||||
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
||||
print('Generated documentation can be found in:')
|
||||
print(index_path)
|
||||
if options.open:
|
||||
import webbrowser
|
||||
webbrowser.open('file://' + index_path)
|
||||
if options.make_tarball:
|
||||
print('Generating doc tarball to', tarball_path)
|
||||
tarball_sources = [
|
||||
output_dir,
|
||||
'README.md',
|
||||
'LICENSE',
|
||||
'NEWS.txt',
|
||||
'version'
|
||||
]
|
||||
tarball_basedir = os.path.join(output_dir, html_output_dirname)
|
||||
tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname)
|
||||
return tarball_path, html_output_dirname
|
||||
|
||||
def main():
|
||||
usage = """%prog
|
||||
Generates doxygen documentation in build/doxygen.
|
||||
Optionaly makes a tarball of the documentation to dist/.
|
||||
|
||||
Must be started in the project top directory.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False,
|
||||
help="""Enable usage of DOT to generate collaboration diagram""")
|
||||
parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'),
|
||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
|
||||
help="""Path to Doxygen tool. [Default: %default]""")
|
||||
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
|
||||
help="""Path to doxygen inputs. [Default: %default]""")
|
||||
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
|
||||
help="""Enable generation of Microsoft HTML HELP""")
|
||||
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
|
||||
help="""Generates DOT graph without UML look [Default: False]""")
|
||||
parser.add_option('--open', dest="open", action='store_true', default=False,
|
||||
help="""Open the HTML index in the web browser after generation""")
|
||||
parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False,
|
||||
help="""Generates a tarball of the documentation in dist/ directory""")
|
||||
parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False,
|
||||
help="""Hides doxygen output""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
build_doc(options)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
#ifndef JSON_CONFIG_H_INCLUDED
|
||||
#define JSON_CONFIG_H_INCLUDED
|
||||
#include <stddef.h>
|
||||
|
||||
/// If defined, indicates that json library is embedded in CppTL library.
|
||||
//# define JSON_IN_CPPTL 1
|
||||
@@ -56,34 +57,67 @@
|
||||
// Storages, and 64 bits integer support is disabled.
|
||||
// #define JSON_NO_INT64 1
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6
|
||||
// Microsoft Visual Studio 6 only support conversion from __int64 to double
|
||||
// (no conversion from unsigned __int64).
|
||||
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
||||
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
|
||||
// characters in the debug information)
|
||||
// All projects I've ever seen with VS6 were using this globally (not bothering
|
||||
// with pragma push/pop).
|
||||
#pragma warning(disable : 4786)
|
||||
#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
|
||||
#if defined(_MSC_VER) // MSVC
|
||||
# if _MSC_VER <= 1200 // MSVC 6
|
||||
// Microsoft Visual Studio 6 only support conversion from __int64 to double
|
||||
// (no conversion from unsigned __int64).
|
||||
# define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
||||
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
|
||||
// characters in the debug information)
|
||||
// All projects I've ever seen with VS6 were using this globally (not bothering
|
||||
// with pragma push/pop).
|
||||
# pragma warning(disable : 4786)
|
||||
# endif // MSVC 6
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
||||
/// Indicates that the following function is deprecated.
|
||||
#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
|
||||
#elif defined(__clang__) && defined(__has_feature)
|
||||
#if __has_feature(attribute_deprecated_with_message)
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||
#endif
|
||||
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||
#elif defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
|
||||
# if _MSC_VER >= 1500 // MSVC 2008
|
||||
/// Indicates that the following function is deprecated.
|
||||
# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
|
||||
# endif
|
||||
|
||||
#endif // defined(_MSC_VER)
|
||||
|
||||
|
||||
#ifndef JSON_HAS_RVALUE_REFERENCES
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010
|
||||
#define JSON_HAS_RVALUE_REFERENCES 1
|
||||
#endif // MSVC >= 2010
|
||||
|
||||
#ifdef __clang__
|
||||
#if __has_feature(cxx_rvalue_references)
|
||||
#define JSON_HAS_RVALUE_REFERENCES 1
|
||||
#endif // has_feature
|
||||
|
||||
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
|
||||
#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L)
|
||||
#define JSON_HAS_RVALUE_REFERENCES 1
|
||||
#endif // GXX_EXPERIMENTAL
|
||||
|
||||
#endif // __clang__ || __GNUC__
|
||||
|
||||
#endif // not defined JSON_HAS_RVALUE_REFERENCES
|
||||
|
||||
#ifndef JSON_HAS_RVALUE_REFERENCES
|
||||
#define JSON_HAS_RVALUE_REFERENCES 0
|
||||
#endif
|
||||
|
||||
#ifdef __clang__
|
||||
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
|
||||
# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
|
||||
# define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||
# elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||
# define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
|
||||
# endif // GNUC version
|
||||
#endif // __clang__ || __GNUC__
|
||||
|
||||
#if !defined(JSONCPP_DEPRECATED)
|
||||
#define JSONCPP_DEPRECATED(message)
|
||||
#endif // if !defined(JSONCPP_DEPRECATED)
|
||||
|
||||
#if __GNUC__ >= 6
|
||||
# define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
||||
#endif
|
||||
|
||||
namespace Json {
|
||||
typedef int Int;
|
||||
typedef unsigned int UInt;
|
||||
@@ -42,8 +42,8 @@ public:
|
||||
*
|
||||
*/
|
||||
struct StructuredError {
|
||||
size_t offset_start;
|
||||
size_t offset_limit;
|
||||
ptrdiff_t offset_start;
|
||||
ptrdiff_t offset_limit;
|
||||
std::string message;
|
||||
};
|
||||
|
||||
@@ -268,7 +268,7 @@ public:
|
||||
char const* beginDoc, char const* endDoc,
|
||||
Value* root, std::string* errs) = 0;
|
||||
|
||||
class Factory {
|
||||
class JSON_API Factory {
|
||||
public:
|
||||
virtual ~Factory() {}
|
||||
/** \brief Allocate a CharReader via operator new().
|
||||
@@ -321,6 +321,9 @@ public:
|
||||
the JSON value in the input string.
|
||||
- `"rejectDupKeys": false or true`
|
||||
- If true, `parse()` returns false when a key is duplicated within an object.
|
||||
- `"allowSpecialFloats": false or true`
|
||||
- If true, special float values (NaNs and infinities) are allowed
|
||||
and their values are lossfree restorable.
|
||||
|
||||
You can examine 'settings_` yourself
|
||||
to see the defaults. You can also write and read them just like any
|
||||
@@ -330,9 +333,9 @@ public:
|
||||
Json::Value settings_;
|
||||
|
||||
CharReaderBuilder();
|
||||
virtual ~CharReaderBuilder();
|
||||
~CharReaderBuilder() override;
|
||||
|
||||
virtual CharReader* newCharReader() const;
|
||||
CharReader* newCharReader() const override;
|
||||
|
||||
/** \return true if 'settings' are legal and consistent;
|
||||
* otherwise, indicate bad settings via 'invalid'.
|
||||
@@ -346,13 +349,13 @@ public:
|
||||
/** Called by ctor, but you can use this to reset settings_.
|
||||
* \pre 'settings' != NULL (but Json::null is fine)
|
||||
* \remark Defaults:
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
|
||||
*/
|
||||
static void setDefaults(Json::Value* settings);
|
||||
/** Same as old Features::strictMode().
|
||||
* \pre 'settings' != NULL (but Json::null is fine)
|
||||
* \remark Defaults:
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
|
||||
*/
|
||||
static void strictMode(Json::Value* settings);
|
||||
};
|
||||
@@ -37,21 +37,36 @@ namespace Json {
|
||||
*
|
||||
* We use nothing but these internally. Of course, STL can throw others.
|
||||
*/
|
||||
class JSON_API Exception;
|
||||
class JSON_API Exception : public std::exception {
|
||||
public:
|
||||
Exception(std::string const& msg);
|
||||
~Exception() throw() override;
|
||||
char const* what() const throw() override;
|
||||
protected:
|
||||
std::string msg_;
|
||||
};
|
||||
|
||||
/** Exceptions which the user cannot easily avoid.
|
||||
*
|
||||
* E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
|
||||
*
|
||||
* \remark derived from Json::Exception
|
||||
*/
|
||||
class JSON_API RuntimeError;
|
||||
class JSON_API RuntimeError : public Exception {
|
||||
public:
|
||||
RuntimeError(std::string const& msg);
|
||||
};
|
||||
|
||||
/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
|
||||
*
|
||||
* These are precondition-violations (user bugs) and internal errors (our bugs).
|
||||
*
|
||||
* \remark derived from Json::Exception
|
||||
*/
|
||||
class JSON_API LogicError;
|
||||
class JSON_API LogicError : public Exception {
|
||||
public:
|
||||
LogicError(std::string const& msg);
|
||||
};
|
||||
|
||||
/// used internally
|
||||
void throwRuntimeError(std::string const& msg);
|
||||
@@ -197,6 +212,9 @@ private:
|
||||
CZString(ArrayIndex index);
|
||||
CZString(char const* str, unsigned length, DuplicationPolicy allocate);
|
||||
CZString(CZString const& other);
|
||||
#if JSON_HAS_RVALUE_REFERENCES
|
||||
CZString(CZString&& other);
|
||||
#endif
|
||||
~CZString();
|
||||
CZString& operator=(CZString other);
|
||||
bool operator<(CZString const& other) const;
|
||||
@@ -211,7 +229,7 @@ private:
|
||||
void swap(CZString& other);
|
||||
|
||||
struct StringStorage {
|
||||
DuplicationPolicy policy_: 2;
|
||||
unsigned policy_: 2;
|
||||
unsigned length_: 30; // 1GB max
|
||||
};
|
||||
|
||||
@@ -255,7 +273,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
#endif // if defined(JSON_HAS_INT64)
|
||||
Value(double value);
|
||||
Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.)
|
||||
Value(const char* beginValue, const char* endValue); ///< Copy all, incl zeroes.
|
||||
Value(const char* begin, const char* end); ///< Copy all, incl zeroes.
|
||||
/** \brief Constructs a value from a static string.
|
||||
|
||||
* Like other value string constructor but do not duplicate the string for
|
||||
@@ -279,6 +297,10 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value(bool value);
|
||||
/// Deep copy.
|
||||
Value(const Value& other);
|
||||
#if JSON_HAS_RVALUE_REFERENCES
|
||||
/// Move constructor
|
||||
Value(Value&& other);
|
||||
#endif
|
||||
~Value();
|
||||
|
||||
/// Deep copy, then swap(other).
|
||||
@@ -306,7 +328,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
* \return false if !string. (Seg-fault if str or end are NULL.)
|
||||
*/
|
||||
bool getString(
|
||||
char const** str, char const** end) const;
|
||||
char const** begin, char const** end) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
CppTL::ConstString asConstString() const;
|
||||
#endif
|
||||
@@ -435,8 +457,8 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value get(const char* key, const Value& defaultValue) const;
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
/// \param key may contain embedded nulls.
|
||||
Value get(const char* key, const char* end, const Value& defaultValue) const;
|
||||
/// \note key may contain embedded nulls.
|
||||
Value get(const char* begin, const char* end, const Value& defaultValue) const;
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
/// \param key may contain embedded nulls.
|
||||
@@ -448,12 +470,12 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
#endif
|
||||
/// Most general and efficient version of isMember()const, get()const,
|
||||
/// and operator[]const
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-key) >= 2^30
|
||||
Value const* find(char const* key, char const* end) const;
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
|
||||
Value const* find(char const* begin, char const* end) const;
|
||||
/// Most general and efficient version of object-mutators.
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-key) >= 2^30
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
|
||||
/// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
|
||||
Value const* demand(char const* key, char const* end);
|
||||
Value const* demand(char const* begin, char const* end);
|
||||
/// \brief Remove and return the named member.
|
||||
///
|
||||
/// Do nothing if it did not exist.
|
||||
@@ -466,7 +488,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
/// \param key may contain embedded nulls.
|
||||
/// \deprecated
|
||||
Value removeMember(const std::string& key);
|
||||
/// Same as removeMember(const char* key, const char* end, Value* removed),
|
||||
/// Same as removeMember(const char* begin, const char* end, Value* removed),
|
||||
/// but 'key' is null-terminated.
|
||||
bool removeMember(const char* key, Value* removed);
|
||||
/** \brief Remove the named map member.
|
||||
@@ -477,7 +499,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
*/
|
||||
bool removeMember(std::string const& key, Value* removed);
|
||||
/// Same as removeMember(std::string const& key, Value* removed)
|
||||
bool removeMember(const char* key, const char* end, Value* removed);
|
||||
bool removeMember(const char* begin, const char* end, Value* removed);
|
||||
/** \brief Remove the indexed array element.
|
||||
|
||||
O(n) expensive operations.
|
||||
@@ -493,7 +515,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
/// \param key may contain embedded nulls.
|
||||
bool isMember(const std::string& key) const;
|
||||
/// Same as isMember(std::string const& key)const
|
||||
bool isMember(const char* key, const char* end) const;
|
||||
bool isMember(const char* begin, const char* end) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
/// Return true if the object has a member named key.
|
||||
bool isMember(const CppTL::ConstString& key) const;
|
||||
@@ -512,6 +534,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
//# endif
|
||||
|
||||
/// \deprecated Always pass len.
|
||||
JSONCPP_DEPRECATED("Use setComment(std::string const&) instead.")
|
||||
void setComment(const char* comment, CommentPlacement placement);
|
||||
/// Comments must be //... or /* ... */
|
||||
void setComment(const char* comment, size_t len, CommentPlacement placement);
|
||||
@@ -531,10 +554,10 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
|
||||
// Accessors for the [start, limit) range of bytes within the JSON text from
|
||||
// which this value was parsed, if any.
|
||||
void setOffsetStart(size_t start);
|
||||
void setOffsetLimit(size_t limit);
|
||||
size_t getOffsetStart() const;
|
||||
size_t getOffsetLimit() const;
|
||||
void setOffsetStart(ptrdiff_t start);
|
||||
void setOffsetLimit(ptrdiff_t limit);
|
||||
ptrdiff_t getOffsetStart() const;
|
||||
ptrdiff_t getOffsetLimit() const;
|
||||
|
||||
private:
|
||||
void initBasic(ValueType type, bool allocated = false);
|
||||
@@ -575,8 +598,8 @@ private:
|
||||
|
||||
// [start, limit) byte offsets in the source JSON text from which this Value
|
||||
// was extracted.
|
||||
size_t start_;
|
||||
size_t limit_;
|
||||
ptrdiff_t start_;
|
||||
ptrdiff_t limit_;
|
||||
};
|
||||
|
||||
/** \brief Experimental and untested: represents an element of the "path" to
|
||||
@@ -652,9 +675,6 @@ public:
|
||||
typedef int difference_type;
|
||||
typedef ValueIteratorBase SelfType;
|
||||
|
||||
ValueIteratorBase();
|
||||
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
|
||||
|
||||
bool operator==(const SelfType& other) const { return isEqual(other); }
|
||||
|
||||
bool operator!=(const SelfType& other) const { return !isEqual(other); }
|
||||
@@ -702,6 +722,12 @@ private:
|
||||
Value::ObjectValues::iterator current_;
|
||||
// Indicates that iterator is for a null value.
|
||||
bool isNull_;
|
||||
|
||||
public:
|
||||
// For some reason, BORLAND needs these at the end, rather
|
||||
// than earlier. No idea why.
|
||||
ValueIteratorBase();
|
||||
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
|
||||
};
|
||||
|
||||
/** \brief const iterator for object and array value.
|
||||
@@ -719,6 +745,7 @@ public:
|
||||
typedef ValueConstIterator SelfType;
|
||||
|
||||
ValueConstIterator();
|
||||
ValueConstIterator(ValueIterator const& other);
|
||||
|
||||
private:
|
||||
/*! \internal Use by Value to create an iterator.
|
||||
@@ -768,7 +795,7 @@ public:
|
||||
typedef ValueIterator SelfType;
|
||||
|
||||
ValueIterator();
|
||||
ValueIterator(const ValueConstIterator& other);
|
||||
explicit ValueIterator(const ValueConstIterator& other);
|
||||
ValueIterator(const ValueIterator& other);
|
||||
|
||||
private:
|
||||
@@ -1,12 +1,11 @@
|
||||
// DO NOT EDIT. This file is generated by CMake from "version"
|
||||
// and "version.h.in" files.
|
||||
// DO NOT EDIT. This file (and "version") is generated by CMake.
|
||||
// Run CMake configure step to update it.
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
# define JSON_VERSION_H_INCLUDED
|
||||
|
||||
# define JSONCPP_VERSION_STRING "1.6.0"
|
||||
# define JSONCPP_VERSION_STRING "1.7.0"
|
||||
# define JSONCPP_VERSION_MAJOR 1
|
||||
# define JSONCPP_VERSION_MINOR 6
|
||||
# define JSONCPP_VERSION_MINOR 7
|
||||
# define JSONCPP_VERSION_PATCH 0
|
||||
# define JSONCPP_VERSION_QUALIFIER
|
||||
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||
@@ -99,6 +99,10 @@ public:
|
||||
Strictly speaking, this is not valid JSON. But when the output is being
|
||||
fed to a browser's Javascript, it makes for smaller output and the
|
||||
browser can handle the output just fine.
|
||||
- "useSpecialFloats": false or true
|
||||
- If true, outputs non-finite floating point values in the following way:
|
||||
NaN values as "NaN", positive infinity as "Infinity", and negative infinity
|
||||
as "-Infinity".
|
||||
|
||||
You can examine 'settings_` yourself
|
||||
to see the defaults. You can also write and read them just like any
|
||||
@@ -108,12 +112,12 @@ public:
|
||||
Json::Value settings_;
|
||||
|
||||
StreamWriterBuilder();
|
||||
virtual ~StreamWriterBuilder();
|
||||
~StreamWriterBuilder() override;
|
||||
|
||||
/**
|
||||
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||
*/
|
||||
virtual StreamWriter* newStreamWriter() const;
|
||||
StreamWriter* newStreamWriter() const override;
|
||||
|
||||
/** \return true if 'settings' are legal and consistent;
|
||||
* otherwise, indicate bad settings via 'invalid'.
|
||||
@@ -154,7 +158,7 @@ class JSON_API FastWriter : public Writer {
|
||||
|
||||
public:
|
||||
FastWriter();
|
||||
virtual ~FastWriter() {}
|
||||
~FastWriter() override {}
|
||||
|
||||
void enableYAMLCompatibility();
|
||||
|
||||
@@ -168,7 +172,7 @@ public:
|
||||
void omitEndingLineFeed();
|
||||
|
||||
public: // overridden from Writer
|
||||
virtual std::string write(const Value& root);
|
||||
std::string write(const Value& root) override;
|
||||
|
||||
private:
|
||||
void writeValue(const Value& value);
|
||||
@@ -206,14 +210,14 @@ private:
|
||||
class JSON_API StyledWriter : public Writer {
|
||||
public:
|
||||
StyledWriter();
|
||||
virtual ~StyledWriter() {}
|
||||
~StyledWriter() override {}
|
||||
|
||||
public: // overridden from Writer
|
||||
/** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
|
||||
* \param root Value to serialize.
|
||||
* \return String containing the JSON document that represents the root value.
|
||||
*/
|
||||
virtual std::string write(const Value& root);
|
||||
std::string write(const Value& root) override;
|
||||
|
||||
private:
|
||||
void writeValue(const Value& value);
|
||||
@@ -234,8 +238,8 @@ private:
|
||||
ChildValues childValues_;
|
||||
std::string document_;
|
||||
std::string indentString_;
|
||||
int rightMargin_;
|
||||
int indentSize_;
|
||||
unsigned int rightMargin_;
|
||||
unsigned int indentSize_;
|
||||
bool addChildValues_;
|
||||
};
|
||||
|
||||
@@ -298,7 +302,7 @@ private:
|
||||
ChildValues childValues_;
|
||||
std::ostream* document_;
|
||||
std::string indentString_;
|
||||
int rightMargin_;
|
||||
unsigned int rightMargin_;
|
||||
std::string indentation_;
|
||||
bool addChildValues_ : 1;
|
||||
bool indented_ : 1;
|
||||
@@ -1,119 +1,119 @@
|
||||
<?xml version="1.0" encoding="Windows-1252"?>
|
||||
<VisualStudioProject
|
||||
ProjectType="Visual C++"
|
||||
Version="7.10"
|
||||
Name="jsontest"
|
||||
ProjectGUID="{25AF2DD2-D396-4668-B188-488C33B8E620}"
|
||||
Keyword="Win32Proj">
|
||||
<Platforms>
|
||||
<Platform
|
||||
Name="Win32"/>
|
||||
</Platforms>
|
||||
<Configurations>
|
||||
<Configuration
|
||||
Name="Debug|Win32"
|
||||
OutputDirectory="../../build/vs71/debug/jsontest"
|
||||
IntermediateDirectory="../../build/vs71/debug/jsontest"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
|
||||
MinimalRebuild="TRUE"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="4"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/jsontest.exe"
|
||||
LinkIncremental="2"
|
||||
GenerateDebugInformation="TRUE"
|
||||
ProgramDatabaseFile="$(OutDir)/jsontest.pdb"
|
||||
SubSystem="1"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="Release|Win32"
|
||||
OutputDirectory="../../build/vs71/release/jsontest"
|
||||
IntermediateDirectory="../../build/vs71/release/jsontest"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
|
||||
RuntimeLibrary="0"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/jsontest.exe"
|
||||
LinkIncremental="1"
|
||||
GenerateDebugInformation="TRUE"
|
||||
SubSystem="1"
|
||||
OptimizeReferences="2"
|
||||
EnableCOMDATFolding="2"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
</Configurations>
|
||||
<References>
|
||||
</References>
|
||||
<Files>
|
||||
<File
|
||||
RelativePath="..\..\src\jsontestrunner\main.cpp">
|
||||
</File>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
</VisualStudioProject>
|
||||
<?xml version="1.0" encoding="Windows-1252"?>
|
||||
<VisualStudioProject
|
||||
ProjectType="Visual C++"
|
||||
Version="7.10"
|
||||
Name="jsontest"
|
||||
ProjectGUID="{25AF2DD2-D396-4668-B188-488C33B8E620}"
|
||||
Keyword="Win32Proj">
|
||||
<Platforms>
|
||||
<Platform
|
||||
Name="Win32"/>
|
||||
</Platforms>
|
||||
<Configurations>
|
||||
<Configuration
|
||||
Name="Debug|Win32"
|
||||
OutputDirectory="../../build/vs71/debug/jsontest"
|
||||
IntermediateDirectory="../../build/vs71/debug/jsontest"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
|
||||
MinimalRebuild="TRUE"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="4"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/jsontest.exe"
|
||||
LinkIncremental="2"
|
||||
GenerateDebugInformation="TRUE"
|
||||
ProgramDatabaseFile="$(OutDir)/jsontest.pdb"
|
||||
SubSystem="1"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="Release|Win32"
|
||||
OutputDirectory="../../build/vs71/release/jsontest"
|
||||
IntermediateDirectory="../../build/vs71/release/jsontest"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
|
||||
RuntimeLibrary="0"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/jsontest.exe"
|
||||
LinkIncremental="1"
|
||||
GenerateDebugInformation="TRUE"
|
||||
SubSystem="1"
|
||||
OptimizeReferences="2"
|
||||
EnableCOMDATFolding="2"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
</Configurations>
|
||||
<References>
|
||||
</References>
|
||||
<Files>
|
||||
<File
|
||||
RelativePath="..\..\src\jsontestrunner\main.cpp">
|
||||
</File>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
</VisualStudioProject>
|
||||
|
||||
@@ -1,205 +1,205 @@
|
||||
<?xml version="1.0" encoding="Windows-1252"?>
|
||||
<VisualStudioProject
|
||||
ProjectType="Visual C++"
|
||||
Version="7.10"
|
||||
Name="lib_json"
|
||||
ProjectGUID="{B84F7231-16CE-41D8-8C08-7B523FF4225B}"
|
||||
Keyword="Win32Proj">
|
||||
<Platforms>
|
||||
<Platform
|
||||
Name="Win32"/>
|
||||
</Platforms>
|
||||
<Configurations>
|
||||
<Configuration
|
||||
Name="Debug|Win32"
|
||||
OutputDirectory="../../build/vs71/debug/lib_json"
|
||||
IntermediateDirectory="../../build/vs71/debug/lib_json"
|
||||
ConfigurationType="4"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_LIB"
|
||||
StringPooling="TRUE"
|
||||
MinimalRebuild="TRUE"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
EnableFunctionLevelLinking="TRUE"
|
||||
DisableLanguageExtensions="TRUE"
|
||||
ForceConformanceInForLoopScope="FALSE"
|
||||
RuntimeTypeInfo="TRUE"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="4"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLibrarianTool"
|
||||
OutputFile="$(OutDir)/json_vc71_libmtd.lib"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="Release|Win32"
|
||||
OutputDirectory="../../build/vs71/release/lib_json"
|
||||
IntermediateDirectory="../../build/vs71/release/lib_json"
|
||||
ConfigurationType="4"
|
||||
CharacterSet="2"
|
||||
WholeProgramOptimization="TRUE">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
GlobalOptimizations="TRUE"
|
||||
EnableIntrinsicFunctions="TRUE"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_LIB"
|
||||
StringPooling="TRUE"
|
||||
RuntimeLibrary="0"
|
||||
EnableFunctionLevelLinking="TRUE"
|
||||
DisableLanguageExtensions="TRUE"
|
||||
ForceConformanceInForLoopScope="FALSE"
|
||||
RuntimeTypeInfo="TRUE"
|
||||
UsePrecompiledHeader="0"
|
||||
AssemblerOutput="4"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLibrarianTool"
|
||||
OutputFile="$(OutDir)/json_vc71_libmt.lib"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="dummy|Win32"
|
||||
OutputDirectory="$(ConfigurationName)"
|
||||
IntermediateDirectory="$(ConfigurationName)"
|
||||
ConfigurationType="2"
|
||||
CharacterSet="2"
|
||||
WholeProgramOptimization="TRUE">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
GlobalOptimizations="TRUE"
|
||||
EnableIntrinsicFunctions="TRUE"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_LIB"
|
||||
StringPooling="TRUE"
|
||||
RuntimeLibrary="4"
|
||||
EnableFunctionLevelLinking="TRUE"
|
||||
DisableLanguageExtensions="TRUE"
|
||||
ForceConformanceInForLoopScope="FALSE"
|
||||
RuntimeTypeInfo="TRUE"
|
||||
UsePrecompiledHeader="0"
|
||||
AssemblerOutput="4"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
GenerateDebugInformation="TRUE"
|
||||
SubSystem="2"
|
||||
OptimizeReferences="2"
|
||||
EnableCOMDATFolding="2"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
</Configurations>
|
||||
<References>
|
||||
</References>
|
||||
<Files>
|
||||
<File
|
||||
RelativePath="..\..\include\json\autolink.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\config.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\features.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\forwards.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\json.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_reader.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_value.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_valueiterator.inl">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_writer.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\reader.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\value.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\writer.h">
|
||||
</File>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
</VisualStudioProject>
|
||||
<?xml version="1.0" encoding="Windows-1252"?>
|
||||
<VisualStudioProject
|
||||
ProjectType="Visual C++"
|
||||
Version="7.10"
|
||||
Name="lib_json"
|
||||
ProjectGUID="{B84F7231-16CE-41D8-8C08-7B523FF4225B}"
|
||||
Keyword="Win32Proj">
|
||||
<Platforms>
|
||||
<Platform
|
||||
Name="Win32"/>
|
||||
</Platforms>
|
||||
<Configurations>
|
||||
<Configuration
|
||||
Name="Debug|Win32"
|
||||
OutputDirectory="../../build/vs71/debug/lib_json"
|
||||
IntermediateDirectory="../../build/vs71/debug/lib_json"
|
||||
ConfigurationType="4"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_LIB"
|
||||
StringPooling="TRUE"
|
||||
MinimalRebuild="TRUE"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
EnableFunctionLevelLinking="TRUE"
|
||||
DisableLanguageExtensions="TRUE"
|
||||
ForceConformanceInForLoopScope="FALSE"
|
||||
RuntimeTypeInfo="TRUE"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="4"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLibrarianTool"
|
||||
OutputFile="$(OutDir)/json_vc71_libmtd.lib"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="Release|Win32"
|
||||
OutputDirectory="../../build/vs71/release/lib_json"
|
||||
IntermediateDirectory="../../build/vs71/release/lib_json"
|
||||
ConfigurationType="4"
|
||||
CharacterSet="2"
|
||||
WholeProgramOptimization="TRUE">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
GlobalOptimizations="TRUE"
|
||||
EnableIntrinsicFunctions="TRUE"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_LIB"
|
||||
StringPooling="TRUE"
|
||||
RuntimeLibrary="0"
|
||||
EnableFunctionLevelLinking="TRUE"
|
||||
DisableLanguageExtensions="TRUE"
|
||||
ForceConformanceInForLoopScope="FALSE"
|
||||
RuntimeTypeInfo="TRUE"
|
||||
UsePrecompiledHeader="0"
|
||||
AssemblerOutput="4"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLibrarianTool"
|
||||
OutputFile="$(OutDir)/json_vc71_libmt.lib"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="dummy|Win32"
|
||||
OutputDirectory="$(ConfigurationName)"
|
||||
IntermediateDirectory="$(ConfigurationName)"
|
||||
ConfigurationType="2"
|
||||
CharacterSet="2"
|
||||
WholeProgramOptimization="TRUE">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
GlobalOptimizations="TRUE"
|
||||
EnableIntrinsicFunctions="TRUE"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_LIB"
|
||||
StringPooling="TRUE"
|
||||
RuntimeLibrary="4"
|
||||
EnableFunctionLevelLinking="TRUE"
|
||||
DisableLanguageExtensions="TRUE"
|
||||
ForceConformanceInForLoopScope="FALSE"
|
||||
RuntimeTypeInfo="TRUE"
|
||||
UsePrecompiledHeader="0"
|
||||
AssemblerOutput="4"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
GenerateDebugInformation="TRUE"
|
||||
SubSystem="2"
|
||||
OptimizeReferences="2"
|
||||
EnableCOMDATFolding="2"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
</Configurations>
|
||||
<References>
|
||||
</References>
|
||||
<Files>
|
||||
<File
|
||||
RelativePath="..\..\include\json\autolink.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\config.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\features.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\forwards.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\json.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_reader.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_value.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_valueiterator.inl">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\lib_json\json_writer.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\reader.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\value.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\include\json\writer.h">
|
||||
</File>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
</VisualStudioProject>
|
||||
|
||||
@@ -1,130 +1,130 @@
|
||||
<?xml version="1.0" encoding="Windows-1252"?>
|
||||
<VisualStudioProject
|
||||
ProjectType="Visual C++"
|
||||
Version="7.10"
|
||||
Name="test_lib_json"
|
||||
ProjectGUID="{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
|
||||
RootNamespace="test_lib_json"
|
||||
Keyword="Win32Proj">
|
||||
<Platforms>
|
||||
<Platform
|
||||
Name="Win32"/>
|
||||
</Platforms>
|
||||
<Configurations>
|
||||
<Configuration
|
||||
Name="Debug|Win32"
|
||||
OutputDirectory="../../build/vs71/debug/test_lib_json"
|
||||
IntermediateDirectory="../../build/vs71/debug/test_lib_json"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
|
||||
MinimalRebuild="TRUE"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="4"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/test_lib_json.exe"
|
||||
LinkIncremental="2"
|
||||
GenerateDebugInformation="TRUE"
|
||||
ProgramDatabaseFile="$(OutDir)/test_lib_json.pdb"
|
||||
SubSystem="1"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"
|
||||
Description="Running all unit tests"
|
||||
CommandLine="$(TargetPath)"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="Release|Win32"
|
||||
OutputDirectory="../../build/vs71/release/test_lib_json"
|
||||
IntermediateDirectory="../../build/vs71/release/test_lib_json"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
|
||||
RuntimeLibrary="0"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/test_lib_json.exe"
|
||||
LinkIncremental="1"
|
||||
GenerateDebugInformation="TRUE"
|
||||
SubSystem="1"
|
||||
OptimizeReferences="2"
|
||||
EnableCOMDATFolding="2"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"
|
||||
Description="Running all unit tests"
|
||||
CommandLine="$(TargetPath)"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
</Configurations>
|
||||
<References>
|
||||
</References>
|
||||
<Files>
|
||||
<File
|
||||
RelativePath="..\..\src\test_lib_json\jsontest.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\test_lib_json\jsontest.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\test_lib_json\main.cpp">
|
||||
</File>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
</VisualStudioProject>
|
||||
<?xml version="1.0" encoding="Windows-1252"?>
|
||||
<VisualStudioProject
|
||||
ProjectType="Visual C++"
|
||||
Version="7.10"
|
||||
Name="test_lib_json"
|
||||
ProjectGUID="{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
|
||||
RootNamespace="test_lib_json"
|
||||
Keyword="Win32Proj">
|
||||
<Platforms>
|
||||
<Platform
|
||||
Name="Win32"/>
|
||||
</Platforms>
|
||||
<Configurations>
|
||||
<Configuration
|
||||
Name="Debug|Win32"
|
||||
OutputDirectory="../../build/vs71/debug/test_lib_json"
|
||||
IntermediateDirectory="../../build/vs71/debug/test_lib_json"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="0"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
|
||||
MinimalRebuild="TRUE"
|
||||
BasicRuntimeChecks="3"
|
||||
RuntimeLibrary="1"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="4"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/test_lib_json.exe"
|
||||
LinkIncremental="2"
|
||||
GenerateDebugInformation="TRUE"
|
||||
ProgramDatabaseFile="$(OutDir)/test_lib_json.pdb"
|
||||
SubSystem="1"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"
|
||||
Description="Running all unit tests"
|
||||
CommandLine="$(TargetPath)"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
<Configuration
|
||||
Name="Release|Win32"
|
||||
OutputDirectory="../../build/vs71/release/test_lib_json"
|
||||
IntermediateDirectory="../../build/vs71/release/test_lib_json"
|
||||
ConfigurationType="1"
|
||||
CharacterSet="2">
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
AdditionalIncludeDirectories="../../include"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
|
||||
RuntimeLibrary="0"
|
||||
UsePrecompiledHeader="0"
|
||||
WarningLevel="3"
|
||||
Detect64BitPortabilityProblems="TRUE"
|
||||
DebugInformationFormat="3"/>
|
||||
<Tool
|
||||
Name="VCCustomBuildTool"/>
|
||||
<Tool
|
||||
Name="VCLinkerTool"
|
||||
OutputFile="$(OutDir)/test_lib_json.exe"
|
||||
LinkIncremental="1"
|
||||
GenerateDebugInformation="TRUE"
|
||||
SubSystem="1"
|
||||
OptimizeReferences="2"
|
||||
EnableCOMDATFolding="2"
|
||||
TargetMachine="1"/>
|
||||
<Tool
|
||||
Name="VCMIDLTool"/>
|
||||
<Tool
|
||||
Name="VCPostBuildEventTool"
|
||||
Description="Running all unit tests"
|
||||
CommandLine="$(TargetPath)"/>
|
||||
<Tool
|
||||
Name="VCPreBuildEventTool"/>
|
||||
<Tool
|
||||
Name="VCPreLinkEventTool"/>
|
||||
<Tool
|
||||
Name="VCResourceCompilerTool"/>
|
||||
<Tool
|
||||
Name="VCWebServiceProxyGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCXMLDataGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCWebDeploymentTool"/>
|
||||
<Tool
|
||||
Name="VCManagedWrapperGeneratorTool"/>
|
||||
<Tool
|
||||
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
|
||||
</Configuration>
|
||||
</Configurations>
|
||||
<References>
|
||||
</References>
|
||||
<Files>
|
||||
<File
|
||||
RelativePath="..\..\src\test_lib_json\jsontest.cpp">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\test_lib_json\jsontest.h">
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\..\src\test_lib_json\main.cpp">
|
||||
</File>
|
||||
</Files>
|
||||
<Globals>
|
||||
</Globals>
|
||||
</VisualStudioProject>
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Tag the sandbox for release, make source and doc tarballs.
|
||||
|
||||
Requires Python 2.6
|
||||
@@ -14,6 +19,7 @@ python makerelease.py 0.5.0 0.6.0-dev
|
||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import subprocess
|
||||
|
||||
384
3P/jsoncpp/makerelease.py.orig
Normal file
384
3P/jsoncpp/makerelease.py.orig
Normal file
@@ -0,0 +1,384 @@
|
||||
"""Tag the sandbox for release, make source and doc tarballs.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (use to test the script):
|
||||
python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev
|
||||
|
||||
When testing this script:
|
||||
python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev
|
||||
|
||||
Example of invocation when doing a release:
|
||||
python makerelease.py 0.5.0 0.6.0-dev
|
||||
|
||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
import doxybuild
|
||||
import subprocess
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
import shutil
|
||||
import urllib2
|
||||
import tempfile
|
||||
import os
|
||||
import time
|
||||
from devtools import antglob, fixeol, tarball
|
||||
import amalgamate
|
||||
|
||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
|
||||
SOURCEFORGE_PROJECT = 'jsoncpp'
|
||||
|
||||
def set_version(version):
|
||||
with open('version','wb') as f:
|
||||
f.write(version.strip())
|
||||
|
||||
def rmdir_if_exist(dir_path):
|
||||
if os.path.isdir(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
class SVNError(Exception):
|
||||
pass
|
||||
|
||||
def svn_command(command, *args):
|
||||
cmd = ['svn', '--non-interactive', command] + list(args)
|
||||
print('Running:', ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
error = SVNError('SVN command failed:\n' + stdout)
|
||||
error.returncode = process.returncode
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def check_no_pending_commit():
|
||||
"""Checks that there is no pending commit in the sandbox."""
|
||||
stdout = svn_command('status', '--xml')
|
||||
etree = ElementTree.fromstring(stdout)
|
||||
msg = []
|
||||
for entry in etree.getiterator('entry'):
|
||||
path = entry.get('path')
|
||||
status = entry.find('wc-status').get('item')
|
||||
if status != 'unversioned' and path != 'version':
|
||||
msg.append('File "%s" has pending change (status="%s")' % (path, status))
|
||||
if msg:
|
||||
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!')
|
||||
return '\n'.join(msg)
|
||||
|
||||
def svn_join_url(base_url, suffix):
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if suffix.startswith('/'):
|
||||
suffix = suffix[1:]
|
||||
return base_url + suffix
|
||||
|
||||
def svn_check_if_tag_exist(tag_url):
|
||||
"""Checks if a tag exist.
|
||||
Returns: True if the tag exist, False otherwise.
|
||||
"""
|
||||
try:
|
||||
list_stdout = svn_command('list', tag_url)
|
||||
except SVNError as e:
|
||||
if e.returncode != 1 or not str(e).find('tag_url'):
|
||||
raise e
|
||||
# otherwise ignore error, meaning tag does not exist
|
||||
return False
|
||||
return True
|
||||
|
||||
def svn_commit(message):
|
||||
"""Commit the sandbox, providing the specified comment.
|
||||
"""
|
||||
svn_command('ci', '-m', message)
|
||||
|
||||
def svn_tag_sandbox(tag_url, message):
|
||||
"""Makes a tag based on the sandbox revisions.
|
||||
"""
|
||||
svn_command('copy', '-m', message, '.', tag_url)
|
||||
|
||||
def svn_remove_tag(tag_url, message):
|
||||
"""Removes an existing tag.
|
||||
"""
|
||||
svn_command('delete', '-m', message, tag_url)
|
||||
|
||||
def svn_export(tag_url, export_dir):
|
||||
"""Exports the tag_url revision to export_dir.
|
||||
Target directory, including its parent is created if it does not exist.
|
||||
If the directory export_dir exist, it is deleted before export proceed.
|
||||
"""
|
||||
rmdir_if_exist(export_dir)
|
||||
svn_command('export', tag_url, export_dir)
|
||||
|
||||
def fix_sources_eol(dist_dir):
|
||||
"""Set file EOL for tarball distribution.
|
||||
"""
|
||||
print('Preparing exported source file EOL for distribution...')
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
win_sources = antglob.glob(dist_dir,
|
||||
includes = '**/*.sln **/*.vcproj',
|
||||
prune_dirs = prune_dirs)
|
||||
unix_sources = antglob.glob(dist_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
|
||||
sconscript *.json *.expected AUTHORS LICENSE''',
|
||||
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
|
||||
prune_dirs = prune_dirs)
|
||||
for path in win_sources:
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
|
||||
for path in unix_sources:
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
|
||||
|
||||
def download(url, target_path):
|
||||
"""Download file represented by url to target_path.
|
||||
"""
|
||||
f = urllib2.urlopen(url)
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
fout = open(target_path, 'wb')
|
||||
try:
|
||||
fout.write(data)
|
||||
finally:
|
||||
fout.close()
|
||||
|
||||
def check_compile(distcheck_top_dir, platform):
|
||||
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
||||
print('Running:', ' '.join(cmd))
|
||||
log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform)
|
||||
flog = open(log_path, 'wb')
|
||||
try:
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=flog,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=distcheck_top_dir)
|
||||
stdout = process.communicate()[0]
|
||||
status = (process.returncode == 0)
|
||||
finally:
|
||||
flog.close()
|
||||
return (status, log_path)
|
||||
|
||||
def write_tempfile(content, **kwargs):
|
||||
fd, path = tempfile.mkstemp(**kwargs)
|
||||
f = os.fdopen(fd, 'wt')
|
||||
try:
|
||||
f.write(content)
|
||||
finally:
|
||||
f.close()
|
||||
return path
|
||||
|
||||
class SFTPError(Exception):
|
||||
pass
|
||||
|
||||
def run_sftp_batch(userhost, sftp, batch, retry=0):
|
||||
path = write_tempfile(batch, suffix='.sftp', text=True)
|
||||
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
||||
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
||||
error = None
|
||||
for retry_index in range(0, max(1,retry)):
|
||||
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
||||
print(heading, ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode != 0:
|
||||
error = SFTPError('SFTP batch failed:\n' + stdout)
|
||||
else:
|
||||
break
|
||||
if error:
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def sourceforge_web_synchro(sourceforge_project, doc_dir,
|
||||
user=None, sftp='sftp'):
|
||||
"""Notes: does not synchronize sub-directory of doc-dir.
|
||||
"""
|
||||
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
|
||||
stdout = run_sftp_batch(userhost, sftp, """
|
||||
cd htdocs
|
||||
dir
|
||||
exit
|
||||
""")
|
||||
existing_paths = set()
|
||||
collect = 0
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if not collect and line.endswith('> dir'):
|
||||
collect = True
|
||||
elif collect and line.endswith('> exit'):
|
||||
break
|
||||
elif collect == 1:
|
||||
collect = 2
|
||||
elif collect == 2:
|
||||
path = line.strip().split()[-1:]
|
||||
if path and path[0] not in ('.', '..'):
|
||||
existing_paths.add(path[0])
|
||||
upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
|
||||
paths_to_remove = existing_paths - upload_paths
|
||||
if paths_to_remove:
|
||||
print('Removing the following file from web:')
|
||||
print('\n'.join(paths_to_remove))
|
||||
stdout = run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
rm %s
|
||||
exit""" % ' '.join(paths_to_remove))
|
||||
print('Uploading %d files:' % len(upload_paths))
|
||||
batch_size = 10
|
||||
upload_paths = list(upload_paths)
|
||||
start_time = time.time()
|
||||
for index in range(0,len(upload_paths),batch_size):
|
||||
paths = upload_paths[index:index+batch_size]
|
||||
file_per_sec = (time.time() - start_time) / (index+1)
|
||||
remaining_files = len(upload_paths) - index
|
||||
remaining_sec = file_per_sec * remaining_files
|
||||
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
|
||||
run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
lcd %s
|
||||
mput %s
|
||||
exit""" % (doc_dir, ' '.join(paths)), retry=3)
|
||||
|
||||
def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'):
|
||||
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
|
||||
run_sftp_batch(userhost, sftp, """
|
||||
mput %s
|
||||
exit
|
||||
""" % (' '.join(paths),))
|
||||
|
||||
|
||||
def main():
|
||||
usage = """%prog release_version next_dev_version
|
||||
Update 'version' file to release_version and commit.
|
||||
Generates the document tarball.
|
||||
Tags the sandbox revision with release_version.
|
||||
Update 'version' file to next_dev_version and commit.
|
||||
|
||||
Performs an svn export of tag release version, and build a source tarball.
|
||||
|
||||
Must be started in the project top directory.
|
||||
|
||||
Warning: --force should only be used when developping/testing the release script.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
|
||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
|
||||
help="""Path to Doxygen tool. [Default: %default]""")
|
||||
parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
|
||||
help="""Ignore pending commit. [Default: %default]""")
|
||||
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
|
||||
help="""Overwrite release existing tag if it exist. [Default: %default]""")
|
||||
parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
|
||||
help="""Comma separated list of platform passed to scons for build check.""")
|
||||
parser.add_option('--no-test', dest="no_test", action='store_true', default=False,
|
||||
help="""Skips build check.""")
|
||||
parser.add_option('--no-web', dest="no_web", action='store_true', default=False,
|
||||
help="""Do not update web site.""")
|
||||
parser.add_option('-u', '--upload-user', dest="user", action='store',
|
||||
help="""Sourceforge user for SFTP documentation upload.""")
|
||||
parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
|
||||
help="""Path of the SFTP compatible binary used to upload the documentation.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 2:
|
||||
parser.error('release_version missing on command-line.')
|
||||
release_version = args[0]
|
||||
next_version = args[1]
|
||||
|
||||
if not options.platforms and not options.no_test:
|
||||
parser.error('You must specify either --platform or --no-test option.')
|
||||
|
||||
if options.ignore_pending_commit:
|
||||
msg = ''
|
||||
else:
|
||||
msg = check_no_pending_commit()
|
||||
if not msg:
|
||||
print('Setting version to', release_version)
|
||||
set_version(release_version)
|
||||
svn_commit('Release ' + release_version)
|
||||
tag_url = svn_join_url(SVN_TAG_ROOT, release_version)
|
||||
if svn_check_if_tag_exist(tag_url):
|
||||
if options.retag_release:
|
||||
svn_remove_tag(tag_url, 'Overwriting previous tag')
|
||||
else:
|
||||
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
|
||||
sys.exit(1)
|
||||
svn_tag_sandbox(tag_url, 'Release ' + release_version)
|
||||
|
||||
print('Generated doxygen document...')
|
||||
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
||||
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
||||
doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True)
|
||||
doc_distcheck_dir = 'dist/doccheck'
|
||||
tarball.decompress(doc_tarball_path, doc_distcheck_dir)
|
||||
doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname)
|
||||
|
||||
export_dir = 'dist/export'
|
||||
svn_export(tag_url, export_dir)
|
||||
fix_sources_eol(export_dir)
|
||||
|
||||
source_dir = 'jsoncpp-src-' + release_version
|
||||
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||
print('Generating source tarball to', source_tarball_path)
|
||||
tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir)
|
||||
|
||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
|
||||
amalgamation_dir = 'dist/amalgamation'
|
||||
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
|
||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
|
||||
amalgamation_dir, prefix_dir=amalgamation_source_dir)
|
||||
|
||||
# Decompress source tarball, download and install scons-local
|
||||
distcheck_dir = 'dist/distcheck'
|
||||
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
||||
print('Decompressing source tarball to', distcheck_dir)
|
||||
rmdir_if_exist(distcheck_dir)
|
||||
tarball.decompress(source_tarball_path, distcheck_dir)
|
||||
scons_local_path = 'dist/scons-local.tar.gz'
|
||||
print('Downloading scons-local to', scons_local_path)
|
||||
download(SCONS_LOCAL_URL, scons_local_path)
|
||||
print('Decompressing scons-local to', distcheck_top_dir)
|
||||
tarball.decompress(scons_local_path, distcheck_top_dir)
|
||||
|
||||
# Run compilation
|
||||
print('Compiling decompressed tarball')
|
||||
all_build_status = True
|
||||
for platform in options.platforms.split(','):
|
||||
print('Testing platform:', platform)
|
||||
build_status, log_path = check_compile(distcheck_top_dir, platform)
|
||||
print('see build log:', log_path)
|
||||
print(build_status and '=> ok' or '=> FAILED')
|
||||
all_build_status = all_build_status and build_status
|
||||
if not build_status:
|
||||
print('Testing failed on at least one platform, aborting...')
|
||||
svn_remove_tag(tag_url, 'Removing tag due to failed testing')
|
||||
sys.exit(1)
|
||||
if options.user:
|
||||
if not options.no_web:
|
||||
print('Uploading documentation using user', options.user)
|
||||
sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp)
|
||||
print('Completed documentation upload')
|
||||
print('Uploading source and documentation tarballs for release using user', options.user)
|
||||
sourceforge_release_tarball(SOURCEFORGE_PROJECT,
|
||||
[source_tarball_path, doc_tarball_path],
|
||||
user=options.user, sftp=options.sftp)
|
||||
print('Source and doc release tarballs uploaded')
|
||||
else:
|
||||
print('No upload user specified. Web site and download tarbal were not uploaded.')
|
||||
print('Tarball can be found at:', doc_tarball_path)
|
||||
|
||||
# Set next version number and commit
|
||||
set_version(next_version)
|
||||
svn_commit('Released ' + release_version)
|
||||
else:
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,7 +1,7 @@
|
||||
prefix=@CMAKE_INSTALL_PREFIX@
|
||||
exec_prefix=${prefix}
|
||||
libdir=${exec_prefix}/@LIBRARY_INSTALL_DIR@
|
||||
includedir=${prefix}/@INCLUDE_INSTALL_DIR@
|
||||
libdir=@LIBRARY_INSTALL_DIR@
|
||||
includedir=@INCLUDE_INSTALL_DIR@
|
||||
|
||||
Name: jsoncpp
|
||||
Description: A C++ library for interacting with JSON
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2009 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
import os
|
||||
import os.path
|
||||
from fnmatch import fnmatch
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2010 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
import re
|
||||
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
||||
import collections
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""tarball
|
||||
|
||||
Tool-specific initialization for tarball.
|
||||
|
||||
@@ -2,4 +2,4 @@ ADD_SUBDIRECTORY(lib_json)
|
||||
IF(JSONCPP_WITH_TESTS)
|
||||
ADD_SUBDIRECTORY(jsontestrunner)
|
||||
ADD_SUBDIRECTORY(test_lib_json)
|
||||
ENDIF(JSONCPP_WITH_TESTS)
|
||||
ENDIF()
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
FIND_PACKAGE(PythonInterp 2.6)
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
ADD_EXECUTABLE(jsontestrunner_exe
|
||||
main.cpp
|
||||
)
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
||||
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||
ELSE(BUILD_SHARED_LIBS)
|
||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static)
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ENDIF()
|
||||
|
||||
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
||||
|
||||
@@ -25,4 +22,4 @@ IF(PYTHONINTERP_FOUND)
|
||||
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||
)
|
||||
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
|
||||
ENDIF(PYTHONINTERP_FOUND)
|
||||
ENDIF()
|
||||
|
||||
@@ -57,12 +57,13 @@ static std::string readInputTestFile(const char* path) {
|
||||
if (!file)
|
||||
return std::string("");
|
||||
fseek(file, 0, SEEK_END);
|
||||
long size = ftell(file);
|
||||
long const size = ftell(file);
|
||||
unsigned long const usize = static_cast<unsigned long const>(size);
|
||||
fseek(file, 0, SEEK_SET);
|
||||
std::string text;
|
||||
char* buffer = new char[size + 1];
|
||||
buffer[size] = 0;
|
||||
if (fread(buffer, 1, size, file) == (unsigned long)size)
|
||||
if (fread(buffer, 1, usize, file) == usize)
|
||||
text = buffer;
|
||||
fclose(file);
|
||||
delete[] buffer;
|
||||
@@ -104,8 +105,8 @@ printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") {
|
||||
break;
|
||||
case Json::arrayValue: {
|
||||
fprintf(fout, "%s=[]\n", path.c_str());
|
||||
int size = value.size();
|
||||
for (int index = 0; index < size; ++index) {
|
||||
Json::ArrayIndex size = value.size();
|
||||
for (Json::ArrayIndex index = 0; index < size; ++index) {
|
||||
static char buffer[16];
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
sprintf_s(buffer, sizeof(buffer), "[%d]", index);
|
||||
@@ -310,12 +311,12 @@ static int runTest(Options const& opts)
|
||||
}
|
||||
int main(int argc, const char* argv[]) {
|
||||
Options opts;
|
||||
try {
|
||||
int exitCode = parseCommandLine(argc, argv, &opts);
|
||||
if (exitCode != 0) {
|
||||
printf("Failed to parse command-line.");
|
||||
return exitCode;
|
||||
}
|
||||
try {
|
||||
return runTest(opts);
|
||||
}
|
||||
catch (const std::exception& e) {
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
|
||||
OPTION(JSONCPP_LIB_BUILD_STATIC "Build jsoncpp_lib static library." ON)
|
||||
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
SET(JSONCPP_LIB_BUILD_SHARED ON)
|
||||
ENDIF(BUILD_SHARED_LIBS)
|
||||
|
||||
if( CMAKE_COMPILER_IS_GNUCXX )
|
||||
#Get compiler version.
|
||||
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
|
||||
OUTPUT_VARIABLE GNUCXX_VERSION )
|
||||
|
||||
|
||||
#-Werror=* was introduced -after- GCC 4.1.2
|
||||
if( GNUCXX_VERSION VERSION_GREATER 4.1.2 )
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
|
||||
@@ -44,13 +37,14 @@ IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
SET(INSTALL_EXPORT EXPORT jsoncpp)
|
||||
ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
SET(INSTALL_EXPORT)
|
||||
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
ENDIF()
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||
ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
|
||||
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||
|
||||
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
|
||||
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
||||
@@ -61,14 +55,15 @@ IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
|
||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>)
|
||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
ENDIF()
|
||||
|
||||
ENDIF()
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_STATIC)
|
||||
IF(BUILD_STATIC_LIBS)
|
||||
ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp )
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp
|
||||
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||
|
||||
INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT}
|
||||
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
||||
@@ -80,6 +75,6 @@ IF(JSONCPP_LIB_BUILD_STATIC)
|
||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
)
|
||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
ENDIF()
|
||||
|
||||
ENDIF()
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <jsoncpp/assertions.h>
|
||||
#include <jsoncpp/reader.h>
|
||||
#include <jsoncpp/value.h>
|
||||
#include <json/assertions.h>
|
||||
#include <json/reader.h>
|
||||
#include <json/value.h>
|
||||
#include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <utility>
|
||||
@@ -17,10 +17,27 @@
|
||||
#include <sstream>
|
||||
#include <memory>
|
||||
#include <set>
|
||||
#include <limits>
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below
|
||||
#if defined(_MSC_VER)
|
||||
#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
|
||||
#define snprintf sprintf_s
|
||||
#elif _MSC_VER >= 1900 // VC++ 14.0 and above
|
||||
#define snprintf std::snprintf
|
||||
#else
|
||||
#define snprintf _snprintf
|
||||
#endif
|
||||
#elif defined(__ANDROID__) || defined(__QNXNTO__)
|
||||
#define snprintf snprintf
|
||||
#elif __cplusplus >= 201103L
|
||||
#if !defined(__MINGW32__) && !defined(__CYGWIN__)
|
||||
#define snprintf std::snprintf
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if defined(__QNXNTO__)
|
||||
#define sscanf std::sscanf
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
|
||||
// Disable warning about strdup being deprecated.
|
||||
@@ -32,7 +49,7 @@ static int stackDepth_g = 0; // see readValue()
|
||||
|
||||
namespace Json {
|
||||
|
||||
#if __cplusplus >= 201103L
|
||||
#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
|
||||
typedef std::unique_ptr<CharReader> CharReaderPtr;
|
||||
#else
|
||||
typedef std::auto_ptr<CharReader> CharReaderPtr;
|
||||
@@ -353,7 +370,7 @@ bool Reader::readComment() {
|
||||
|
||||
static std::string normalizeEOL(Reader::Location begin, Reader::Location end) {
|
||||
std::string normalized;
|
||||
normalized.reserve(end - begin);
|
||||
normalized.reserve(static_cast<size_t>(end - begin));
|
||||
Reader::Location current = begin;
|
||||
while (current != end) {
|
||||
char c = *current++;
|
||||
@@ -555,7 +572,7 @@ bool Reader::decodeNumber(Token& token, Value& decoded) {
|
||||
++current;
|
||||
// TODO: Help the compiler do the div and mod at compile time or get rid of them.
|
||||
Value::LargestUInt maxIntegerValue =
|
||||
isNegative ? Value::LargestUInt(-Value::minLargestInt)
|
||||
isNegative ? Value::LargestUInt(Value::maxLargestInt) + 1
|
||||
: Value::maxLargestUInt;
|
||||
Value::LargestUInt threshold = maxIntegerValue / 10;
|
||||
Value::LargestUInt value = 0;
|
||||
@@ -563,7 +580,7 @@ bool Reader::decodeNumber(Token& token, Value& decoded) {
|
||||
Char c = *current++;
|
||||
if (c < '0' || c > '9')
|
||||
return decodeDouble(token, decoded);
|
||||
Value::UInt digit(c - '0');
|
||||
Value::UInt digit(static_cast<Value::UInt>(c - '0'));
|
||||
if (value >= threshold) {
|
||||
// We've hit or exceeded the max value divided by 10 (rounded down). If
|
||||
// a) we've only just touched the limit, b) this is the last digit, and
|
||||
@@ -576,7 +593,9 @@ bool Reader::decodeNumber(Token& token, Value& decoded) {
|
||||
}
|
||||
value = value * 10 + digit;
|
||||
}
|
||||
if (isNegative)
|
||||
if (isNegative && value == maxIntegerValue)
|
||||
decoded = Value::minLargestInt;
|
||||
else if (isNegative)
|
||||
decoded = -Value::LargestInt(value);
|
||||
else if (value <= Value::LargestUInt(Value::maxInt))
|
||||
decoded = Value::LargestInt(value);
|
||||
@@ -597,33 +616,9 @@ bool Reader::decodeDouble(Token& token) {
|
||||
|
||||
bool Reader::decodeDouble(Token& token, Value& decoded) {
|
||||
double value = 0;
|
||||
const int bufferSize = 32;
|
||||
int count;
|
||||
int length = int(token.end_ - token.start_);
|
||||
|
||||
// Sanity check to avoid buffer overflow exploits.
|
||||
if (length < 0) {
|
||||
return addError("Unable to parse token length", token);
|
||||
}
|
||||
|
||||
// Avoid using a string constant for the format control string given to
|
||||
// sscanf, as this can cause hard to debug crashes on OS X. See here for more
|
||||
// info:
|
||||
//
|
||||
// http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html
|
||||
char format[] = "%lf";
|
||||
|
||||
if (length <= bufferSize) {
|
||||
Char buffer[bufferSize + 1];
|
||||
memcpy(buffer, token.start_, length);
|
||||
buffer[length] = 0;
|
||||
count = sscanf(buffer, format, &value);
|
||||
} else {
|
||||
std::string buffer(token.start_, token.end_);
|
||||
count = sscanf(buffer.c_str(), format, &value);
|
||||
}
|
||||
|
||||
if (count != 1)
|
||||
std::string buffer(token.start_, token.end_);
|
||||
std::istringstream is(buffer);
|
||||
if (!(is >> value))
|
||||
return addError("'" + std::string(token.start_, token.end_) +
|
||||
"' is not a number.",
|
||||
token);
|
||||
@@ -643,7 +638,7 @@ bool Reader::decodeString(Token& token) {
|
||||
}
|
||||
|
||||
bool Reader::decodeString(Token& token, std::string& decoded) {
|
||||
decoded.reserve(token.end_ - token.start_ - 2);
|
||||
decoded.reserve(static_cast<size_t>(token.end_ - token.start_ - 2));
|
||||
Location current = token.start_ + 1; // skip '"'
|
||||
Location end = token.end_ - 1; // do not include '"'
|
||||
while (current != end) {
|
||||
@@ -727,13 +722,13 @@ bool Reader::decodeUnicodeCodePoint(Token& token,
|
||||
bool Reader::decodeUnicodeEscapeSequence(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
unsigned int& unicode) {
|
||||
unsigned int& ret_unicode) {
|
||||
if (end - current < 4)
|
||||
return addError(
|
||||
"Bad unicode escape sequence in string: four digits expected.",
|
||||
token,
|
||||
current);
|
||||
unicode = 0;
|
||||
int unicode = 0;
|
||||
for (int index = 0; index < 4; ++index) {
|
||||
Char c = *current++;
|
||||
unicode *= 16;
|
||||
@@ -749,6 +744,7 @@ bool Reader::decodeUnicodeEscapeSequence(Token& token,
|
||||
token,
|
||||
current);
|
||||
}
|
||||
ret_unicode = static_cast<unsigned int>(unicode);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -763,7 +759,7 @@ Reader::addError(const std::string& message, Token& token, Location extra) {
|
||||
}
|
||||
|
||||
bool Reader::recoverFromError(TokenType skipUntilToken) {
|
||||
int errorCount = int(errors_.size());
|
||||
size_t const errorCount = errors_.size();
|
||||
Token skip;
|
||||
for (;;) {
|
||||
if (!readToken(skip))
|
||||
@@ -817,15 +813,7 @@ std::string Reader::getLocationLineAndColumn(Location location) const {
|
||||
int line, column;
|
||||
getLocationLineAndColumn(location, line, column);
|
||||
char buffer[18 + 16 + 16 + 1];
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
#if defined(WINCE)
|
||||
_snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#else
|
||||
sprintf_s(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#endif
|
||||
#else
|
||||
snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#endif
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@@ -866,7 +854,7 @@ std::vector<Reader::StructuredError> Reader::getStructuredErrors() const {
|
||||
}
|
||||
|
||||
bool Reader::pushError(const Value& value, const std::string& message) {
|
||||
size_t length = end_ - begin_;
|
||||
ptrdiff_t const length = end_ - begin_;
|
||||
if(value.getOffsetStart() > length
|
||||
|| value.getOffsetLimit() > length)
|
||||
return false;
|
||||
@@ -883,7 +871,7 @@ bool Reader::pushError(const Value& value, const std::string& message) {
|
||||
}
|
||||
|
||||
bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) {
|
||||
size_t length = end_ - begin_;
|
||||
ptrdiff_t const length = end_ - begin_;
|
||||
if(value.getOffsetStart() > length
|
||||
|| value.getOffsetLimit() > length
|
||||
|| extra.getOffsetLimit() > length)
|
||||
@@ -908,7 +896,6 @@ bool Reader::good() const {
|
||||
class OurFeatures {
|
||||
public:
|
||||
static OurFeatures all();
|
||||
OurFeatures();
|
||||
bool allowComments_;
|
||||
bool strictRoot_;
|
||||
bool allowDroppedNullPlaceholders_;
|
||||
@@ -916,20 +903,13 @@ public:
|
||||
bool allowSingleQuotes_;
|
||||
bool failIfExtra_;
|
||||
bool rejectDupKeys_;
|
||||
bool allowSpecialFloats_;
|
||||
int stackLimit_;
|
||||
}; // OurFeatures
|
||||
|
||||
// exact copy of Implementation of class Features
|
||||
// ////////////////////////////////
|
||||
|
||||
OurFeatures::OurFeatures()
|
||||
: allowComments_(true), strictRoot_(false)
|
||||
, allowDroppedNullPlaceholders_(false), allowNumericKeys_(false)
|
||||
, allowSingleQuotes_(false)
|
||||
, failIfExtra_(false)
|
||||
{
|
||||
}
|
||||
|
||||
OurFeatures OurFeatures::all() { return OurFeatures(); }
|
||||
|
||||
// Implementation of class Reader
|
||||
@@ -941,8 +921,8 @@ public:
|
||||
typedef char Char;
|
||||
typedef const Char* Location;
|
||||
struct StructuredError {
|
||||
size_t offset_start;
|
||||
size_t offset_limit;
|
||||
ptrdiff_t offset_start;
|
||||
ptrdiff_t offset_limit;
|
||||
std::string message;
|
||||
};
|
||||
|
||||
@@ -972,6 +952,9 @@ private:
|
||||
tokenTrue,
|
||||
tokenFalse,
|
||||
tokenNull,
|
||||
tokenNaN,
|
||||
tokenPosInf,
|
||||
tokenNegInf,
|
||||
tokenArraySeparator,
|
||||
tokenMemberSeparator,
|
||||
tokenComment,
|
||||
@@ -1002,7 +985,7 @@ private:
|
||||
bool readCppStyleComment();
|
||||
bool readString();
|
||||
bool readStringSingleQuote();
|
||||
void readNumber();
|
||||
bool readNumber(bool checkInf);
|
||||
bool readValue();
|
||||
bool readObject(Token& token);
|
||||
bool readArray(Token& token);
|
||||
@@ -1054,7 +1037,9 @@ private:
|
||||
|
||||
OurReader::OurReader(OurFeatures const& features)
|
||||
: errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(),
|
||||
lastValue_(), commentsBefore_(), features_(features), collectComments_() {
|
||||
lastValue_(), commentsBefore_(),
|
||||
stackDepth_(0),
|
||||
features_(features), collectComments_() {
|
||||
}
|
||||
|
||||
bool OurReader::parse(const char* beginDoc,
|
||||
@@ -1156,6 +1141,30 @@ bool OurReader::readValue() {
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenNaN:
|
||||
{
|
||||
Value v(std::numeric_limits<double>::quiet_NaN());
|
||||
currentValue().swapPayload(v);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenPosInf:
|
||||
{
|
||||
Value v(std::numeric_limits<double>::infinity());
|
||||
currentValue().swapPayload(v);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenNegInf:
|
||||
{
|
||||
Value v(-std::numeric_limits<double>::infinity());
|
||||
currentValue().swapPayload(v);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenArraySeparator:
|
||||
case tokenObjectEnd:
|
||||
case tokenArrayEnd:
|
||||
@@ -1236,9 +1245,16 @@ bool OurReader::readToken(Token& token) {
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
case '-':
|
||||
token.type_ = tokenNumber;
|
||||
readNumber();
|
||||
readNumber(false);
|
||||
break;
|
||||
case '-':
|
||||
if (readNumber(true)) {
|
||||
token.type_ = tokenNumber;
|
||||
} else {
|
||||
token.type_ = tokenNegInf;
|
||||
ok = features_.allowSpecialFloats_ && match("nfinity", 7);
|
||||
}
|
||||
break;
|
||||
case 't':
|
||||
token.type_ = tokenTrue;
|
||||
@@ -1252,6 +1268,22 @@ bool OurReader::readToken(Token& token) {
|
||||
token.type_ = tokenNull;
|
||||
ok = match("ull", 3);
|
||||
break;
|
||||
case 'N':
|
||||
if (features_.allowSpecialFloats_) {
|
||||
token.type_ = tokenNaN;
|
||||
ok = match("aN", 2);
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
case 'I':
|
||||
if (features_.allowSpecialFloats_) {
|
||||
token.type_ = tokenPosInf;
|
||||
ok = match("nfinity", 7);
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
case ',':
|
||||
token.type_ = tokenArraySeparator;
|
||||
break;
|
||||
@@ -1352,8 +1384,12 @@ bool OurReader::readCppStyleComment() {
|
||||
return true;
|
||||
}
|
||||
|
||||
void OurReader::readNumber() {
|
||||
bool OurReader::readNumber(bool checkInf) {
|
||||
const char *p = current_;
|
||||
if (checkInf && p != end_ && *p == 'I') {
|
||||
current_ = ++p;
|
||||
return false;
|
||||
}
|
||||
char c = '0'; // stopgap for already consumed character
|
||||
// integral part
|
||||
while (c >= '0' && c <= '9')
|
||||
@@ -1372,6 +1408,7 @@ void OurReader::readNumber() {
|
||||
while (c >= '0' && c <= '9')
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool OurReader::readString() {
|
||||
Char c = 0;
|
||||
@@ -1526,7 +1563,7 @@ bool OurReader::decodeNumber(Token& token, Value& decoded) {
|
||||
Char c = *current++;
|
||||
if (c < '0' || c > '9')
|
||||
return decodeDouble(token, decoded);
|
||||
Value::UInt digit(c - '0');
|
||||
Value::UInt digit(static_cast<Value::UInt>(c - '0'));
|
||||
if (value >= threshold) {
|
||||
// We've hit or exceeded the max value divided by 10 (rounded down). If
|
||||
// a) we've only just touched the limit, b) this is the last digit, and
|
||||
@@ -1562,12 +1599,13 @@ bool OurReader::decodeDouble(Token& token, Value& decoded) {
|
||||
double value = 0;
|
||||
const int bufferSize = 32;
|
||||
int count;
|
||||
int length = int(token.end_ - token.start_);
|
||||
ptrdiff_t const length = token.end_ - token.start_;
|
||||
|
||||
// Sanity check to avoid buffer overflow exploits.
|
||||
if (length < 0) {
|
||||
return addError("Unable to parse token length", token);
|
||||
}
|
||||
size_t const ulength = static_cast<size_t>(length);
|
||||
|
||||
// Avoid using a string constant for the format control string given to
|
||||
// sscanf, as this can cause hard to debug crashes on OS X. See here for more
|
||||
@@ -1578,7 +1616,7 @@ bool OurReader::decodeDouble(Token& token, Value& decoded) {
|
||||
|
||||
if (length <= bufferSize) {
|
||||
Char buffer[bufferSize + 1];
|
||||
memcpy(buffer, token.start_, length);
|
||||
memcpy(buffer, token.start_, ulength);
|
||||
buffer[length] = 0;
|
||||
count = sscanf(buffer, format, &value);
|
||||
} else {
|
||||
@@ -1606,7 +1644,7 @@ bool OurReader::decodeString(Token& token) {
|
||||
}
|
||||
|
||||
bool OurReader::decodeString(Token& token, std::string& decoded) {
|
||||
decoded.reserve(token.end_ - token.start_ - 2);
|
||||
decoded.reserve(static_cast<size_t>(token.end_ - token.start_ - 2));
|
||||
Location current = token.start_ + 1; // skip '"'
|
||||
Location end = token.end_ - 1; // do not include '"'
|
||||
while (current != end) {
|
||||
@@ -1690,13 +1728,13 @@ bool OurReader::decodeUnicodeCodePoint(Token& token,
|
||||
bool OurReader::decodeUnicodeEscapeSequence(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
unsigned int& unicode) {
|
||||
unsigned int& ret_unicode) {
|
||||
if (end - current < 4)
|
||||
return addError(
|
||||
"Bad unicode escape sequence in string: four digits expected.",
|
||||
token,
|
||||
current);
|
||||
unicode = 0;
|
||||
int unicode = 0;
|
||||
for (int index = 0; index < 4; ++index) {
|
||||
Char c = *current++;
|
||||
unicode *= 16;
|
||||
@@ -1712,6 +1750,7 @@ bool OurReader::decodeUnicodeEscapeSequence(Token& token,
|
||||
token,
|
||||
current);
|
||||
}
|
||||
ret_unicode = static_cast<unsigned int>(unicode);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1726,7 +1765,7 @@ OurReader::addError(const std::string& message, Token& token, Location extra) {
|
||||
}
|
||||
|
||||
bool OurReader::recoverFromError(TokenType skipUntilToken) {
|
||||
int errorCount = int(errors_.size());
|
||||
size_t errorCount = errors_.size();
|
||||
Token skip;
|
||||
for (;;) {
|
||||
if (!readToken(skip))
|
||||
@@ -1780,15 +1819,7 @@ std::string OurReader::getLocationLineAndColumn(Location location) const {
|
||||
int line, column;
|
||||
getLocationLineAndColumn(location, line, column);
|
||||
char buffer[18 + 16 + 16 + 1];
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
#if defined(WINCE)
|
||||
_snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#else
|
||||
sprintf_s(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#endif
|
||||
#else
|
||||
snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#endif
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@@ -1824,7 +1855,7 @@ std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const {
|
||||
}
|
||||
|
||||
bool OurReader::pushError(const Value& value, const std::string& message) {
|
||||
size_t length = end_ - begin_;
|
||||
ptrdiff_t length = end_ - begin_;
|
||||
if(value.getOffsetStart() > length
|
||||
|| value.getOffsetLimit() > length)
|
||||
return false;
|
||||
@@ -1841,7 +1872,7 @@ bool OurReader::pushError(const Value& value, const std::string& message) {
|
||||
}
|
||||
|
||||
bool OurReader::pushError(const Value& value, const std::string& message, const Value& extra) {
|
||||
size_t length = end_ - begin_;
|
||||
ptrdiff_t length = end_ - begin_;
|
||||
if(value.getOffsetStart() > length
|
||||
|| value.getOffsetLimit() > length
|
||||
|| extra.getOffsetLimit() > length)
|
||||
@@ -1873,9 +1904,9 @@ public:
|
||||
: collectComments_(collectComments)
|
||||
, reader_(features)
|
||||
{}
|
||||
virtual bool parse(
|
||||
bool parse(
|
||||
char const* beginDoc, char const* endDoc,
|
||||
Value* root, std::string* errs) {
|
||||
Value* root, std::string* errs) override {
|
||||
bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_);
|
||||
if (errs) {
|
||||
*errs = reader_.getFormattedErrorMessages();
|
||||
@@ -1902,6 +1933,7 @@ CharReader* CharReaderBuilder::newCharReader() const
|
||||
features.stackLimit_ = settings_["stackLimit"].asInt();
|
||||
features.failIfExtra_ = settings_["failIfExtra"].asBool();
|
||||
features.rejectDupKeys_ = settings_["rejectDupKeys"].asBool();
|
||||
features.allowSpecialFloats_ = settings_["allowSpecialFloats"].asBool();
|
||||
return new OurCharReader(collectComments, features);
|
||||
}
|
||||
static void getValidReaderKeys(std::set<std::string>* valid_keys)
|
||||
@@ -1916,6 +1948,7 @@ static void getValidReaderKeys(std::set<std::string>* valid_keys)
|
||||
valid_keys->insert("stackLimit");
|
||||
valid_keys->insert("failIfExtra");
|
||||
valid_keys->insert("rejectDupKeys");
|
||||
valid_keys->insert("allowSpecialFloats");
|
||||
}
|
||||
bool CharReaderBuilder::validate(Json::Value* invalid) const
|
||||
{
|
||||
@@ -1947,8 +1980,10 @@ void CharReaderBuilder::strictMode(Json::Value* settings)
|
||||
(*settings)["allowDroppedNullPlaceholders"] = false;
|
||||
(*settings)["allowNumericKeys"] = false;
|
||||
(*settings)["allowSingleQuotes"] = false;
|
||||
(*settings)["stackLimit"] = 1000;
|
||||
(*settings)["failIfExtra"] = true;
|
||||
(*settings)["rejectDupKeys"] = true;
|
||||
(*settings)["allowSpecialFloats"] = false;
|
||||
//! [CharReaderBuilderStrictMode]
|
||||
}
|
||||
// static
|
||||
@@ -1964,6 +1999,7 @@ void CharReaderBuilder::setDefaults(Json::Value* settings)
|
||||
(*settings)["stackLimit"] = 1000;
|
||||
(*settings)["failIfExtra"] = false;
|
||||
(*settings)["rejectDupKeys"] = false;
|
||||
(*settings)["allowSpecialFloats"] = false;
|
||||
//! [CharReaderBuilderDefaults]
|
||||
}
|
||||
|
||||
@@ -1993,7 +2029,7 @@ std::istream& operator>>(std::istream& sin, Value& root) {
|
||||
"Error from reader: %s",
|
||||
errs.c_str());
|
||||
|
||||
throwRuntimeError("reader error");
|
||||
throwRuntimeError(errs);
|
||||
}
|
||||
return sin;
|
||||
}
|
||||
|
||||
@@ -30,8 +30,8 @@ static inline std::string codePointToUTF8(unsigned int cp) {
|
||||
} else if (cp <= 0xFFFF) {
|
||||
result.resize(3);
|
||||
result[2] = static_cast<char>(0x80 | (0x3f & cp));
|
||||
result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6)));
|
||||
result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12)));
|
||||
result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
|
||||
result[0] = static_cast<char>(0xE0 | (0xf & (cp >> 12)));
|
||||
} else if (cp <= 0x10FFFF) {
|
||||
result.resize(4);
|
||||
result[3] = static_cast<char>(0x80 | (0x3f & cp));
|
||||
@@ -43,7 +43,7 @@ static inline std::string codePointToUTF8(unsigned int cp) {
|
||||
return result;
|
||||
}
|
||||
|
||||
/// Returns true if ch is a control character (in range [0,32[).
|
||||
/// Returns true if ch is a control character (in range [1,31]).
|
||||
static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
|
||||
|
||||
enum {
|
||||
@@ -63,7 +63,7 @@ typedef char UIntToStringBuffer[uintToStringBufferSize];
|
||||
static inline void uintToString(LargestUInt value, char*& current) {
|
||||
*--current = 0;
|
||||
do {
|
||||
*--current = char(value % 10) + '0';
|
||||
*--current = static_cast<signed char>(value % 10U + static_cast<unsigned>('0'));
|
||||
value /= 10;
|
||||
} while (value != 0);
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <jsoncpp/assertions.h>
|
||||
#include <jsoncpp/value.h>
|
||||
#include <jsoncpp/writer.h>
|
||||
#include <json/assertions.h>
|
||||
#include <json/value.h>
|
||||
#include <json/writer.h>
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <math.h>
|
||||
#include <sstream>
|
||||
@@ -55,6 +55,9 @@ const LargestUInt Value::maxLargestUInt = LargestUInt(-1);
|
||||
#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
template <typename T, typename U>
|
||||
static inline bool InRange(double d, T min, U max) {
|
||||
// The casts can lose precision, but we are looking only for
|
||||
// an approximate range. Might fail on edge cases though. ~cdunn
|
||||
//return d >= static_cast<double>(min) && d <= static_cast<double>(max);
|
||||
return d >= min && d <= max;
|
||||
}
|
||||
#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
@@ -108,7 +111,7 @@ static inline char* duplicateAndPrefixStringValue(
|
||||
JSON_ASSERT_MESSAGE(length <= (unsigned)Value::maxInt - sizeof(unsigned) - 1U,
|
||||
"in Json::Value::duplicateAndPrefixStringValue(): "
|
||||
"length too big for prefixing");
|
||||
unsigned actualLength = length + sizeof(unsigned) + 1U;
|
||||
unsigned actualLength = length + static_cast<unsigned>(sizeof(unsigned)) + 1U;
|
||||
char* newString = static_cast<char*>(malloc(actualLength));
|
||||
if (newString == 0) {
|
||||
throwRuntimeError(
|
||||
@@ -125,7 +128,7 @@ inline static void decodePrefixedString(
|
||||
unsigned* length, char const** value)
|
||||
{
|
||||
if (!isPrefixed) {
|
||||
*length = strlen(prefixed);
|
||||
*length = static_cast<unsigned>(strlen(prefixed));
|
||||
*value = prefixed;
|
||||
} else {
|
||||
*length = *reinterpret_cast<unsigned const*>(prefixed);
|
||||
@@ -152,23 +155,6 @@ static inline void releaseStringValue(char* value) { free(value); }
|
||||
|
||||
namespace Json {
|
||||
|
||||
class JSON_API Exception : public std::exception {
|
||||
public:
|
||||
Exception(std::string const& msg);
|
||||
virtual ~Exception() throw();
|
||||
virtual char const* what() const throw();
|
||||
protected:
|
||||
std::string const msg_;
|
||||
};
|
||||
class JSON_API RuntimeError : public Exception {
|
||||
public:
|
||||
RuntimeError(std::string const& msg);
|
||||
};
|
||||
class JSON_API LogicError : public Exception {
|
||||
public:
|
||||
LogicError(std::string const& msg);
|
||||
};
|
||||
|
||||
Exception::Exception(std::string const& msg)
|
||||
: msg_(msg)
|
||||
{}
|
||||
@@ -232,28 +218,33 @@ void Value::CommentInfo::setComment(const char* text, size_t len) {
|
||||
// Notes: policy_ indicates if the string was allocated when
|
||||
// a string is stored.
|
||||
|
||||
Value::CZString::CZString(ArrayIndex index) : cstr_(0), index_(index) {}
|
||||
Value::CZString::CZString(ArrayIndex aindex) : cstr_(0), index_(aindex) {}
|
||||
|
||||
Value::CZString::CZString(char const* str, unsigned length, DuplicationPolicy allocate)
|
||||
: cstr_(str)
|
||||
{
|
||||
Value::CZString::CZString(char const* str, unsigned ulength, DuplicationPolicy allocate)
|
||||
: cstr_(str) {
|
||||
// allocate != duplicate
|
||||
storage_.policy_ = allocate;
|
||||
storage_.length_ = length;
|
||||
storage_.policy_ = allocate & 0x3;
|
||||
storage_.length_ = ulength & 0x3FFFFFFF;
|
||||
}
|
||||
|
||||
Value::CZString::CZString(const CZString& other)
|
||||
: cstr_(other.storage_.policy_ != noDuplication && other.cstr_ != 0
|
||||
? duplicateStringValue(other.cstr_, other.storage_.length_)
|
||||
: other.cstr_)
|
||||
{
|
||||
storage_.policy_ = (other.cstr_
|
||||
? (other.storage_.policy_ == noDuplication
|
||||
: other.cstr_) {
|
||||
storage_.policy_ = static_cast<unsigned>(other.cstr_
|
||||
? (static_cast<DuplicationPolicy>(other.storage_.policy_) == noDuplication
|
||||
? noDuplication : duplicate)
|
||||
: other.storage_.policy_);
|
||||
: static_cast<DuplicationPolicy>(other.storage_.policy_));
|
||||
storage_.length_ = other.storage_.length_;
|
||||
}
|
||||
|
||||
#if JSON_HAS_RVALUE_REFERENCES
|
||||
Value::CZString::CZString(CZString&& other)
|
||||
: cstr_(other.cstr_), index_(other.index_) {
|
||||
other.cstr_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
Value::CZString::~CZString() {
|
||||
if (cstr_ && storage_.policy_ == duplicate)
|
||||
releaseStringValue(const_cast<char*>(cstr_));
|
||||
@@ -312,9 +303,9 @@ bool Value::CZString::isStaticString() const { return storage_.policy_ == noDupl
|
||||
* memset( this, 0, sizeof(Value) )
|
||||
* This optimization is used in ValueInternalMap fast allocator.
|
||||
*/
|
||||
Value::Value(ValueType type) {
|
||||
initBasic(type);
|
||||
switch (type) {
|
||||
Value::Value(ValueType vtype) {
|
||||
initBasic(vtype);
|
||||
switch (vtype) {
|
||||
case nullValue:
|
||||
break;
|
||||
case intValue:
|
||||
@@ -442,6 +433,14 @@ Value::Value(Value const& other)
|
||||
}
|
||||
}
|
||||
|
||||
#if JSON_HAS_RVALUE_REFERENCES
|
||||
// Move constructor
|
||||
Value::Value(Value&& other) {
|
||||
initBasic(nullValue);
|
||||
swap(other);
|
||||
}
|
||||
#endif
|
||||
|
||||
Value::~Value() {
|
||||
switch (type_) {
|
||||
case nullValue:
|
||||
@@ -478,7 +477,7 @@ void Value::swapPayload(Value& other) {
|
||||
std::swap(value_, other.value_);
|
||||
int temp2 = allocated_;
|
||||
allocated_ = other.allocated_;
|
||||
other.allocated_ = temp2;
|
||||
other.allocated_ = temp2 & 0x1;
|
||||
}
|
||||
|
||||
void Value::swap(Value& other) {
|
||||
@@ -606,12 +605,12 @@ const char* Value::asCString() const {
|
||||
return this_str;
|
||||
}
|
||||
|
||||
bool Value::getString(char const** str, char const** end) const {
|
||||
bool Value::getString(char const** str, char const** cend) const {
|
||||
if (type_ != stringValue) return false;
|
||||
if (value_.string_ == 0) return false;
|
||||
unsigned length;
|
||||
decodePrefixedString(this->allocated_, this->value_.string_, &length, str);
|
||||
*end = *str + length;
|
||||
*cend = *str + length;
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -785,7 +784,8 @@ float Value::asFloat() const {
|
||||
#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
return static_cast<float>(value_.uint_);
|
||||
#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
return integerToDouble(value_.uint_);
|
||||
// This can fail (silently?) if the value is bigger than MAX_FLOAT.
|
||||
return static_cast<float>(integerToDouble(value_.uint_));
|
||||
#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
case realValue:
|
||||
return static_cast<float>(value_.real_);
|
||||
@@ -810,7 +810,8 @@ bool Value::asBool() const {
|
||||
case uintValue:
|
||||
return value_.uint_ ? true : false;
|
||||
case realValue:
|
||||
return value_.real_ ? true : false;
|
||||
// This is kind of strange. Not recommended.
|
||||
return (value_.real_ != 0.0) ? true : false;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@@ -960,8 +961,8 @@ const Value& Value::operator[](int index) const {
|
||||
return (*this)[ArrayIndex(index)];
|
||||
}
|
||||
|
||||
void Value::initBasic(ValueType type, bool allocated) {
|
||||
type_ = type;
|
||||
void Value::initBasic(ValueType vtype, bool allocated) {
|
||||
type_ = vtype;
|
||||
allocated_ = allocated;
|
||||
comments_ = 0;
|
||||
start_ = 0;
|
||||
@@ -990,7 +991,7 @@ Value& Value::resolveReference(const char* key) {
|
||||
}
|
||||
|
||||
// @param key is not null-terminated.
|
||||
Value& Value::resolveReference(char const* key, char const* end)
|
||||
Value& Value::resolveReference(char const* key, char const* cend)
|
||||
{
|
||||
JSON_ASSERT_MESSAGE(
|
||||
type_ == nullValue || type_ == objectValue,
|
||||
@@ -998,7 +999,7 @@ Value& Value::resolveReference(char const* key, char const* end)
|
||||
if (type_ == nullValue)
|
||||
*this = Value(objectValue);
|
||||
CZString actualKey(
|
||||
key, static_cast<unsigned>(end-key), CZString::duplicateOnCopy);
|
||||
key, static_cast<unsigned>(cend-key), CZString::duplicateOnCopy);
|
||||
ObjectValues::iterator it = value_.map_->lower_bound(actualKey);
|
||||
if (it != value_.map_->end() && (*it).first == actualKey)
|
||||
return (*it).second;
|
||||
@@ -1016,13 +1017,13 @@ Value Value::get(ArrayIndex index, const Value& defaultValue) const {
|
||||
|
||||
bool Value::isValidIndex(ArrayIndex index) const { return index < size(); }
|
||||
|
||||
Value const* Value::find(char const* key, char const* end) const
|
||||
Value const* Value::find(char const* key, char const* cend) const
|
||||
{
|
||||
JSON_ASSERT_MESSAGE(
|
||||
type_ == nullValue || type_ == objectValue,
|
||||
"in Json::Value::find(key, end, found): requires objectValue or nullValue");
|
||||
if (type_ == nullValue) return NULL;
|
||||
CZString actualKey(key, static_cast<unsigned>(end-key), CZString::noDuplication);
|
||||
CZString actualKey(key, static_cast<unsigned>(cend-key), CZString::noDuplication);
|
||||
ObjectValues::const_iterator it = value_.map_->find(actualKey);
|
||||
if (it == value_.map_->end()) return NULL;
|
||||
return &(*it).second;
|
||||
@@ -1066,9 +1067,9 @@ Value const& Value::operator[](CppTL::ConstString const& key) const
|
||||
|
||||
Value& Value::append(const Value& value) { return (*this)[size()] = value; }
|
||||
|
||||
Value Value::get(char const* key, char const* end, Value const& defaultValue) const
|
||||
Value Value::get(char const* key, char const* cend, Value const& defaultValue) const
|
||||
{
|
||||
Value const* found = find(key, end);
|
||||
Value const* found = find(key, cend);
|
||||
return !found ? defaultValue : *found;
|
||||
}
|
||||
Value Value::get(char const* key, Value const& defaultValue) const
|
||||
@@ -1081,12 +1082,12 @@ Value Value::get(std::string const& key, Value const& defaultValue) const
|
||||
}
|
||||
|
||||
|
||||
bool Value::removeMember(const char* key, const char* end, Value* removed)
|
||||
bool Value::removeMember(const char* key, const char* cend, Value* removed)
|
||||
{
|
||||
if (type_ != objectValue) {
|
||||
return false;
|
||||
}
|
||||
CZString actualKey(key, static_cast<unsigned>(end-key), CZString::noDuplication);
|
||||
CZString actualKey(key, static_cast<unsigned>(cend-key), CZString::noDuplication);
|
||||
ObjectValues::iterator it = value_.map_->find(actualKey);
|
||||
if (it == value_.map_->end())
|
||||
return false;
|
||||
@@ -1131,8 +1132,8 @@ bool Value::removeIndex(ArrayIndex index, Value* removed) {
|
||||
ArrayIndex oldSize = size();
|
||||
// shift left all items left, into the place of the "removed"
|
||||
for (ArrayIndex i = index; i < (oldSize - 1); ++i){
|
||||
CZString key(i);
|
||||
(*value_.map_)[key] = (*this)[i + 1];
|
||||
CZString keey(i);
|
||||
(*value_.map_)[keey] = (*this)[i + 1];
|
||||
}
|
||||
// erase the last one ("leftover")
|
||||
CZString keyLast(oldSize - 1);
|
||||
@@ -1148,9 +1149,9 @@ Value Value::get(const CppTL::ConstString& key,
|
||||
}
|
||||
#endif
|
||||
|
||||
bool Value::isMember(char const* key, char const* end) const
|
||||
bool Value::isMember(char const* key, char const* cend) const
|
||||
{
|
||||
Value const* value = find(key, end);
|
||||
Value const* value = find(key, cend);
|
||||
return NULL != value;
|
||||
}
|
||||
bool Value::isMember(char const* key) const
|
||||
@@ -1335,13 +1336,13 @@ std::string Value::getComment(CommentPlacement placement) const {
|
||||
return "";
|
||||
}
|
||||
|
||||
void Value::setOffsetStart(size_t start) { start_ = start; }
|
||||
void Value::setOffsetStart(ptrdiff_t start) { start_ = start; }
|
||||
|
||||
void Value::setOffsetLimit(size_t limit) { limit_ = limit; }
|
||||
void Value::setOffsetLimit(ptrdiff_t limit) { limit_ = limit; }
|
||||
|
||||
size_t Value::getOffsetStart() const { return start_; }
|
||||
ptrdiff_t Value::getOffsetStart() const { return start_; }
|
||||
|
||||
size_t Value::getOffsetLimit() const { return limit_; }
|
||||
ptrdiff_t Value::getOffsetLimit() const { return limit_; }
|
||||
|
||||
std::string Value::toStyledString() const {
|
||||
StyledWriter writer;
|
||||
|
||||
@@ -93,26 +93,26 @@ UInt ValueIteratorBase::index() const {
|
||||
}
|
||||
|
||||
std::string ValueIteratorBase::name() const {
|
||||
char const* key;
|
||||
char const* keey;
|
||||
char const* end;
|
||||
key = memberName(&end);
|
||||
if (!key) return std::string();
|
||||
return std::string(key, end);
|
||||
keey = memberName(&end);
|
||||
if (!keey) return std::string();
|
||||
return std::string(keey, end);
|
||||
}
|
||||
|
||||
char const* ValueIteratorBase::memberName() const {
|
||||
const char* name = (*current_).first.data();
|
||||
return name ? name : "";
|
||||
const char* cname = (*current_).first.data();
|
||||
return cname ? cname : "";
|
||||
}
|
||||
|
||||
char const* ValueIteratorBase::memberName(char const** end) const {
|
||||
const char* name = (*current_).first.data();
|
||||
if (!name) {
|
||||
const char* cname = (*current_).first.data();
|
||||
if (!cname) {
|
||||
*end = NULL;
|
||||
return NULL;
|
||||
}
|
||||
*end = name + (*current_).first.length();
|
||||
return name;
|
||||
*end = cname + (*current_).first.length();
|
||||
return cname;
|
||||
}
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
@@ -129,6 +129,9 @@ ValueConstIterator::ValueConstIterator(
|
||||
const Value::ObjectValues::iterator& current)
|
||||
: ValueIteratorBase(current) {}
|
||||
|
||||
ValueConstIterator::ValueConstIterator(ValueIterator const& other)
|
||||
: ValueIteratorBase(other) {}
|
||||
|
||||
ValueConstIterator& ValueConstIterator::
|
||||
operator=(const ValueIteratorBase& other) {
|
||||
copy(other);
|
||||
@@ -149,7 +152,9 @@ ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
|
||||
: ValueIteratorBase(current) {}
|
||||
|
||||
ValueIterator::ValueIterator(const ValueConstIterator& other)
|
||||
: ValueIteratorBase(other) {}
|
||||
: ValueIteratorBase(other) {
|
||||
throwRuntimeError("ConstIterator to Iterator should never be allowed.");
|
||||
}
|
||||
|
||||
ValueIterator::ValueIterator(const ValueIterator& other)
|
||||
: ValueIteratorBase(other) {}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <jsoncpp/writer.h>
|
||||
#include <json/writer.h>
|
||||
#include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <iomanip>
|
||||
@@ -20,17 +20,52 @@
|
||||
#include <float.h>
|
||||
#define isfinite _finite
|
||||
#elif defined(__sun) && defined(__SVR4) //Solaris
|
||||
#if !defined(isfinite)
|
||||
#include <ieeefp.h>
|
||||
#define isfinite finite
|
||||
#endif
|
||||
#elif defined(_AIX)
|
||||
#if !defined(isfinite)
|
||||
#include <math.h>
|
||||
#define isfinite finite
|
||||
#endif
|
||||
#elif defined(__hpux)
|
||||
#if !defined(isfinite)
|
||||
#if defined(__ia64) && !defined(finite)
|
||||
#define isfinite(x) ((sizeof(x) == sizeof(float) ? \
|
||||
_Isfinitef(x) : _IsFinite(x)))
|
||||
#else
|
||||
#include <math.h>
|
||||
#define isfinite finite
|
||||
#endif
|
||||
#endif
|
||||
#else
|
||||
#include <cmath>
|
||||
#if !(defined(__QNXNTO__)) // QNX already defines isfinite
|
||||
#define isfinite std::isfinite
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below
|
||||
#define snprintf _snprintf
|
||||
#else
|
||||
#if defined(_MSC_VER)
|
||||
#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
|
||||
#define snprintf sprintf_s
|
||||
#elif _MSC_VER >= 1900 // VC++ 14.0 and above
|
||||
#define snprintf std::snprintf
|
||||
#else
|
||||
#define snprintf _snprintf
|
||||
#endif
|
||||
#elif defined(__ANDROID__) || defined(__QNXNTO__)
|
||||
#define snprintf snprintf
|
||||
#elif __cplusplus >= 201103L
|
||||
#if !defined(__MINGW32__) && !defined(__CYGWIN__)
|
||||
#define snprintf std::snprintf
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if defined(__BORLANDC__)
|
||||
#include <float.h>
|
||||
#define isfinite _finite
|
||||
#define snprintf _snprintf
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
|
||||
@@ -40,7 +75,7 @@
|
||||
|
||||
namespace Json {
|
||||
|
||||
#if __cplusplus >= 201103L
|
||||
#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
|
||||
typedef std::unique_ptr<StreamWriter> StreamWriterPtr;
|
||||
#else
|
||||
typedef std::auto_ptr<StreamWriter> StreamWriterPtr;
|
||||
@@ -67,12 +102,15 @@ static bool containsControlCharacter0(const char* str, unsigned len) {
|
||||
std::string valueToString(LargestInt value) {
|
||||
UIntToStringBuffer buffer;
|
||||
char* current = buffer + sizeof(buffer);
|
||||
bool isNegative = value < 0;
|
||||
if (isNegative)
|
||||
value = -value;
|
||||
uintToString(LargestUInt(value), current);
|
||||
if (isNegative)
|
||||
if (value == Value::minLargestInt) {
|
||||
uintToString(LargestUInt(Value::maxLargestInt) + 1, current);
|
||||
*--current = '-';
|
||||
} else if (value < 0) {
|
||||
uintToString(LargestUInt(-value), current);
|
||||
*--current = '-';
|
||||
} else {
|
||||
uintToString(LargestUInt(value), current);
|
||||
}
|
||||
assert(current >= buffer);
|
||||
return current;
|
||||
}
|
||||
@@ -97,43 +135,38 @@ std::string valueToString(UInt value) {
|
||||
|
||||
#endif // # if defined(JSON_HAS_INT64)
|
||||
|
||||
std::string valueToString(double value) {
|
||||
std::string valueToString(double value, bool useSpecialFloats, unsigned int precision) {
|
||||
// Allocate a buffer that is more than large enough to store the 16 digits of
|
||||
// precision requested below.
|
||||
char buffer[32];
|
||||
int len = -1;
|
||||
|
||||
// Print into the buffer. We need not request the alternative representation
|
||||
// that always has a decimal point because JSON doesn't distingish the
|
||||
// concepts of reals and integers.
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with
|
||||
// visual studio 2005 to
|
||||
// avoid warning.
|
||||
#if defined(WINCE)
|
||||
len = _snprintf(buffer, sizeof(buffer), "%.17g", value);
|
||||
#else
|
||||
len = sprintf_s(buffer, sizeof(buffer), "%.17g", value);
|
||||
#endif
|
||||
#else
|
||||
char formatString[6];
|
||||
sprintf(formatString, "%%.%dg", precision);
|
||||
|
||||
// Print into the buffer. We need not request the alternative representation
|
||||
// that always has a decimal point because JSON doesn't distingish the
|
||||
// concepts of reals and integers.
|
||||
if (isfinite(value)) {
|
||||
len = snprintf(buffer, sizeof(buffer), "%.17g", value);
|
||||
len = snprintf(buffer, sizeof(buffer), formatString, value);
|
||||
} else {
|
||||
// IEEE standard states that NaN values will not compare to themselves
|
||||
if (value != value) {
|
||||
len = snprintf(buffer, sizeof(buffer), "null");
|
||||
len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "NaN" : "null");
|
||||
} else if (value < 0) {
|
||||
len = snprintf(buffer, sizeof(buffer), "-1e+9999");
|
||||
len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "-Infinity" : "-1e+9999");
|
||||
} else {
|
||||
len = snprintf(buffer, sizeof(buffer), "1e+9999");
|
||||
len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "Infinity" : "1e+9999");
|
||||
}
|
||||
// For those, we do not need to call fixNumLoc, but it is fast.
|
||||
}
|
||||
#endif
|
||||
assert(len >= 0);
|
||||
fixNumericLocale(buffer, buffer + len);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
std::string valueToString(double value) { return valueToString(value, false, 17); }
|
||||
|
||||
std::string valueToString(bool value) { return value ? "true" : "false"; }
|
||||
|
||||
std::string valueToQuotedString(const char* value) {
|
||||
@@ -317,15 +350,21 @@ void FastWriter::writeValue(const Value& value) {
|
||||
document_ += valueToString(value.asDouble());
|
||||
break;
|
||||
case stringValue:
|
||||
document_ += valueToQuotedString(value.asCString());
|
||||
{
|
||||
// Is NULL possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok) document_ += valueToQuotedStringN(str, static_cast<unsigned>(end-str));
|
||||
break;
|
||||
}
|
||||
case booleanValue:
|
||||
document_ += valueToString(value.asBool());
|
||||
break;
|
||||
case arrayValue: {
|
||||
document_ += '[';
|
||||
int size = value.size();
|
||||
for (int index = 0; index < size; ++index) {
|
||||
ArrayIndex size = value.size();
|
||||
for (ArrayIndex index = 0; index < size; ++index) {
|
||||
if (index > 0)
|
||||
document_ += ',';
|
||||
writeValue(value[index]);
|
||||
@@ -340,7 +379,7 @@ void FastWriter::writeValue(const Value& value) {
|
||||
const std::string& name = *it;
|
||||
if (it != members.begin())
|
||||
document_ += ',';
|
||||
document_ += valueToQuotedStringN(name.data(), name.length());
|
||||
document_ += valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length()));
|
||||
document_ += yamlCompatiblityEnabled_ ? ": " : ":";
|
||||
writeValue(value[name]);
|
||||
}
|
||||
@@ -382,7 +421,7 @@ void StyledWriter::writeValue(const Value& value) {
|
||||
break;
|
||||
case stringValue:
|
||||
{
|
||||
// Is NULL is possible for value.string_?
|
||||
// Is NULL possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
@@ -469,26 +508,25 @@ void StyledWriter::writeArrayValue(const Value& value) {
|
||||
}
|
||||
|
||||
bool StyledWriter::isMultineArray(const Value& value) {
|
||||
int size = value.size();
|
||||
ArrayIndex const size = value.size();
|
||||
bool isMultiLine = size * 3 >= rightMargin_;
|
||||
childValues_.clear();
|
||||
for (int index = 0; index < size && !isMultiLine; ++index) {
|
||||
for (ArrayIndex index = 0; index < size && !isMultiLine; ++index) {
|
||||
const Value& childValue = value[index];
|
||||
isMultiLine =
|
||||
isMultiLine || ((childValue.isArray() || childValue.isObject()) &&
|
||||
isMultiLine = ((childValue.isArray() || childValue.isObject()) &&
|
||||
childValue.size() > 0);
|
||||
}
|
||||
if (!isMultiLine) // check if line length > max line length
|
||||
{
|
||||
childValues_.reserve(size);
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (int index = 0; index < size; ++index) {
|
||||
ArrayIndex lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (ArrayIndex index = 0; index < size; ++index) {
|
||||
if (hasCommentForValue(value[index])) {
|
||||
isMultiLine = true;
|
||||
}
|
||||
writeValue(value[index]);
|
||||
lineLength += int(childValues_[index].length());
|
||||
lineLength += static_cast<ArrayIndex>(childValues_[index].length());
|
||||
}
|
||||
addChildValues_ = false;
|
||||
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||
@@ -522,7 +560,7 @@ void StyledWriter::writeWithIndent(const std::string& value) {
|
||||
void StyledWriter::indent() { indentString_ += std::string(indentSize_, ' '); }
|
||||
|
||||
void StyledWriter::unindent() {
|
||||
assert(int(indentString_.size()) >= indentSize_);
|
||||
assert(indentString_.size() >= indentSize_);
|
||||
indentString_.resize(indentString_.size() - indentSize_);
|
||||
}
|
||||
|
||||
@@ -599,8 +637,15 @@ void StyledStreamWriter::writeValue(const Value& value) {
|
||||
pushValue(valueToString(value.asDouble()));
|
||||
break;
|
||||
case stringValue:
|
||||
pushValue(valueToQuotedString(value.asCString()));
|
||||
{
|
||||
// Is NULL possible for value.string_?
|
||||
char const* str;
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||
else pushValue("");
|
||||
break;
|
||||
}
|
||||
case booleanValue:
|
||||
pushValue(valueToString(value.asBool()));
|
||||
break;
|
||||
@@ -682,26 +727,25 @@ void StyledStreamWriter::writeArrayValue(const Value& value) {
|
||||
}
|
||||
|
||||
bool StyledStreamWriter::isMultineArray(const Value& value) {
|
||||
int size = value.size();
|
||||
ArrayIndex const size = value.size();
|
||||
bool isMultiLine = size * 3 >= rightMargin_;
|
||||
childValues_.clear();
|
||||
for (int index = 0; index < size && !isMultiLine; ++index) {
|
||||
for (ArrayIndex index = 0; index < size && !isMultiLine; ++index) {
|
||||
const Value& childValue = value[index];
|
||||
isMultiLine =
|
||||
isMultiLine || ((childValue.isArray() || childValue.isObject()) &&
|
||||
isMultiLine = ((childValue.isArray() || childValue.isObject()) &&
|
||||
childValue.size() > 0);
|
||||
}
|
||||
if (!isMultiLine) // check if line length > max line length
|
||||
{
|
||||
childValues_.reserve(size);
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (int index = 0; index < size; ++index) {
|
||||
ArrayIndex lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (ArrayIndex index = 0; index < size; ++index) {
|
||||
if (hasCommentForValue(value[index])) {
|
||||
isMultiLine = true;
|
||||
}
|
||||
writeValue(value[index]);
|
||||
lineLength += int(childValues_[index].length());
|
||||
lineLength += static_cast<ArrayIndex>(childValues_[index].length());
|
||||
}
|
||||
addChildValues_ = false;
|
||||
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||
@@ -792,8 +836,10 @@ struct BuiltStyledStreamWriter : public StreamWriter
|
||||
CommentStyle::Enum cs,
|
||||
std::string const& colonSymbol,
|
||||
std::string const& nullSymbol,
|
||||
std::string const& endingLineFeedSymbol);
|
||||
virtual int write(Value const& root, std::ostream* sout);
|
||||
std::string const& endingLineFeedSymbol,
|
||||
bool useSpecialFloats,
|
||||
unsigned int precision);
|
||||
int write(Value const& root, std::ostream* sout) override;
|
||||
private:
|
||||
void writeValue(Value const& value);
|
||||
void writeArrayValue(Value const& value);
|
||||
@@ -811,7 +857,7 @@ private:
|
||||
|
||||
ChildValues childValues_;
|
||||
std::string indentString_;
|
||||
int rightMargin_;
|
||||
unsigned int rightMargin_;
|
||||
std::string indentation_;
|
||||
CommentStyle::Enum cs_;
|
||||
std::string colonSymbol_;
|
||||
@@ -819,13 +865,17 @@ private:
|
||||
std::string endingLineFeedSymbol_;
|
||||
bool addChildValues_ : 1;
|
||||
bool indented_ : 1;
|
||||
bool useSpecialFloats_ : 1;
|
||||
unsigned int precision_;
|
||||
};
|
||||
BuiltStyledStreamWriter::BuiltStyledStreamWriter(
|
||||
std::string const& indentation,
|
||||
CommentStyle::Enum cs,
|
||||
std::string const& colonSymbol,
|
||||
std::string const& nullSymbol,
|
||||
std::string const& endingLineFeedSymbol)
|
||||
std::string const& endingLineFeedSymbol,
|
||||
bool useSpecialFloats,
|
||||
unsigned int precision)
|
||||
: rightMargin_(74)
|
||||
, indentation_(indentation)
|
||||
, cs_(cs)
|
||||
@@ -834,6 +884,8 @@ BuiltStyledStreamWriter::BuiltStyledStreamWriter(
|
||||
, endingLineFeedSymbol_(endingLineFeedSymbol)
|
||||
, addChildValues_(false)
|
||||
, indented_(false)
|
||||
, useSpecialFloats_(useSpecialFloats)
|
||||
, precision_(precision)
|
||||
{
|
||||
}
|
||||
int BuiltStyledStreamWriter::write(Value const& root, std::ostream* sout)
|
||||
@@ -863,7 +915,7 @@ void BuiltStyledStreamWriter::writeValue(Value const& value) {
|
||||
pushValue(valueToString(value.asLargestUInt()));
|
||||
break;
|
||||
case realValue:
|
||||
pushValue(valueToString(value.asDouble()));
|
||||
pushValue(valueToString(value.asDouble(), useSpecialFloats_, precision_));
|
||||
break;
|
||||
case stringValue:
|
||||
{
|
||||
@@ -893,7 +945,7 @@ void BuiltStyledStreamWriter::writeValue(Value const& value) {
|
||||
std::string const& name = *it;
|
||||
Value const& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedStringN(name.data(), name.length()));
|
||||
writeWithIndent(valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length())));
|
||||
*sout_ << colonSymbol_;
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
@@ -958,26 +1010,25 @@ void BuiltStyledStreamWriter::writeArrayValue(Value const& value) {
|
||||
}
|
||||
|
||||
bool BuiltStyledStreamWriter::isMultineArray(Value const& value) {
|
||||
int size = value.size();
|
||||
ArrayIndex const size = value.size();
|
||||
bool isMultiLine = size * 3 >= rightMargin_;
|
||||
childValues_.clear();
|
||||
for (int index = 0; index < size && !isMultiLine; ++index) {
|
||||
for (ArrayIndex index = 0; index < size && !isMultiLine; ++index) {
|
||||
Value const& childValue = value[index];
|
||||
isMultiLine =
|
||||
isMultiLine || ((childValue.isArray() || childValue.isObject()) &&
|
||||
isMultiLine = ((childValue.isArray() || childValue.isObject()) &&
|
||||
childValue.size() > 0);
|
||||
}
|
||||
if (!isMultiLine) // check if line length > max line length
|
||||
{
|
||||
childValues_.reserve(size);
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (int index = 0; index < size; ++index) {
|
||||
ArrayIndex lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||
for (ArrayIndex index = 0; index < size; ++index) {
|
||||
if (hasCommentForValue(value[index])) {
|
||||
isMultiLine = true;
|
||||
}
|
||||
writeValue(value[index]);
|
||||
lineLength += int(childValues_[index].length());
|
||||
lineLength += static_cast<ArrayIndex>(childValues_[index].length());
|
||||
}
|
||||
addChildValues_ = false;
|
||||
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||
@@ -1078,6 +1129,8 @@ StreamWriter* StreamWriterBuilder::newStreamWriter() const
|
||||
std::string cs_str = settings_["commentStyle"].asString();
|
||||
bool eyc = settings_["enableYAMLCompatibility"].asBool();
|
||||
bool dnp = settings_["dropNullPlaceholders"].asBool();
|
||||
bool usf = settings_["useSpecialFloats"].asBool();
|
||||
unsigned int pre = settings_["precision"].asUInt();
|
||||
CommentStyle::Enum cs = CommentStyle::All;
|
||||
if (cs_str == "All") {
|
||||
cs = CommentStyle::All;
|
||||
@@ -1096,10 +1149,11 @@ StreamWriter* StreamWriterBuilder::newStreamWriter() const
|
||||
if (dnp) {
|
||||
nullSymbol = "";
|
||||
}
|
||||
if (pre > 17) pre = 17;
|
||||
std::string endingLineFeedSymbol = "";
|
||||
return new BuiltStyledStreamWriter(
|
||||
indentation, cs,
|
||||
colonSymbol, nullSymbol, endingLineFeedSymbol);
|
||||
colonSymbol, nullSymbol, endingLineFeedSymbol, usf, pre);
|
||||
}
|
||||
static void getValidWriterKeys(std::set<std::string>* valid_keys)
|
||||
{
|
||||
@@ -1108,6 +1162,8 @@ static void getValidWriterKeys(std::set<std::string>* valid_keys)
|
||||
valid_keys->insert("commentStyle");
|
||||
valid_keys->insert("enableYAMLCompatibility");
|
||||
valid_keys->insert("dropNullPlaceholders");
|
||||
valid_keys->insert("useSpecialFloats");
|
||||
valid_keys->insert("precision");
|
||||
}
|
||||
bool StreamWriterBuilder::validate(Json::Value* invalid) const
|
||||
{
|
||||
@@ -1138,6 +1194,8 @@ void StreamWriterBuilder::setDefaults(Json::Value* settings)
|
||||
(*settings)["indentation"] = "\t";
|
||||
(*settings)["enableYAMLCompatibility"] = false;
|
||||
(*settings)["dropNullPlaceholders"] = false;
|
||||
(*settings)["useSpecialFloats"] = false;
|
||||
(*settings)["precision"] = 17;
|
||||
//! [StreamWriterBuilderDefaults]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
// DO NOT EDIT. This file is generated by CMake from "version"
|
||||
// and "version.h.in" files.
|
||||
// DO NOT EDIT. This file (and "version") is generated by CMake.
|
||||
// Run CMake configure step to update it.
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
# define JSON_VERSION_H_INCLUDED
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
# vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
ADD_EXECUTABLE( jsoncpp_test
|
||||
jsontest.cpp
|
||||
jsontest.h
|
||||
@@ -11,11 +7,12 @@ ADD_EXECUTABLE( jsoncpp_test
|
||||
)
|
||||
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
||||
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||
ELSE(BUILD_SHARED_LIBS)
|
||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static)
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ENDIF()
|
||||
|
||||
# another way to solve issue #90
|
||||
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
|
||||
@@ -23,19 +20,19 @@ ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
# Run unit tests in post-build
|
||||
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
# First, copy the shared lib, for Microsoft.
|
||||
# Then, run the test executable.
|
||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
|
||||
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||
ELSE(BUILD_SHARED_LIBS)
|
||||
# Just run the test executable.
|
||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
||||
|
||||
@@ -265,8 +265,8 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
} \
|
||||
\
|
||||
public: /* overidden from TestCase */ \
|
||||
virtual const char* testName() const { return #FixtureType "/" #name; } \
|
||||
virtual void runTestCase(); \
|
||||
const char* testName() const override { return #FixtureType "/" #name; } \
|
||||
void runTestCase() override; \
|
||||
}; \
|
||||
\
|
||||
void Test##FixtureType##name::runTestCase()
|
||||
|
||||
@@ -7,6 +7,10 @@
|
||||
#include <json/config.h>
|
||||
#include <json/json.h>
|
||||
#include <cstring>
|
||||
#include <limits>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <iomanip>
|
||||
|
||||
// Make numeric limits more convenient to talk about.
|
||||
// Assumes int type in 32 bits.
|
||||
@@ -257,6 +261,20 @@ JSONTEST_FIXTURE(ValueTest, arrays) {
|
||||
JSONTEST_ASSERT_EQUAL(Json::Value(17), got);
|
||||
JSONTEST_ASSERT_EQUAL(false, array1_.removeIndex(2, &got)); // gone now
|
||||
}
|
||||
JSONTEST_FIXTURE(ValueTest, arrayIssue252)
|
||||
{
|
||||
int count = 5;
|
||||
Json::Value root;
|
||||
Json::Value item;
|
||||
root["array"] = Json::Value::nullRef;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
item["a"] = i;
|
||||
item["b"] = i;
|
||||
root["array"][i] = item;
|
||||
}
|
||||
//JSONTEST_ASSERT_EQUAL(5, root["array"].size());
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, null) {
|
||||
JSONTEST_ASSERT_EQUAL(Json::nullValue, null_.type());
|
||||
@@ -1198,7 +1216,7 @@ JSONTEST_FIXTURE(ValueTest, nonIntegers) {
|
||||
JSONTEST_ASSERT_EQUAL(-2147483648.5, val.asDouble());
|
||||
JSONTEST_ASSERT_EQUAL(float(-2147483648.5), val.asFloat());
|
||||
#ifdef JSON_HAS_INT64
|
||||
JSONTEST_ASSERT_EQUAL(-Json::Int64(1) << 31, val.asLargestInt());
|
||||
JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 31), val.asLargestInt());
|
||||
#endif
|
||||
JSONTEST_ASSERT_EQUAL(true, val.asBool());
|
||||
JSONTEST_ASSERT_EQUAL("-2147483648.5",
|
||||
@@ -1240,7 +1258,7 @@ JSONTEST_FIXTURE(ValueTest, nonIntegers) {
|
||||
|
||||
// A 16-digit floating point number.
|
||||
val = Json::Value(2199023255552000.0f);
|
||||
JSONTEST_ASSERT_EQUAL(float(2199023255552000), val.asFloat());
|
||||
JSONTEST_ASSERT_EQUAL(float(2199023255552000.0f), val.asFloat());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("2199023255552000",
|
||||
normalizeFloatingPointStr(val.asString()));
|
||||
|
||||
@@ -1542,7 +1560,7 @@ JSONTEST_FIXTURE(ValueTest, StaticString) {
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, CommentBefore) {
|
||||
Json::Value val; // fill val
|
||||
val.setComment("// this comment should appear before", Json::commentBefore);
|
||||
val.setComment(std::string("// this comment should appear before"), Json::commentBefore);
|
||||
Json::StreamWriterBuilder wbuilder;
|
||||
wbuilder.settings_["commentStyle"] = "All";
|
||||
{
|
||||
@@ -1637,6 +1655,63 @@ JSONTEST_FIXTURE(ValueTest, zeroesInKeys) {
|
||||
}
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, specialFloats) {
|
||||
Json::StreamWriterBuilder b;
|
||||
b.settings_["useSpecialFloats"] = true;
|
||||
|
||||
Json::Value v = std::numeric_limits<double>::quiet_NaN();
|
||||
std::string expected = "NaN";
|
||||
std::string result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
v = std::numeric_limits<double>::infinity();
|
||||
expected = "Infinity";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
v = -std::numeric_limits<double>::infinity();
|
||||
expected = "-Infinity";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(ValueTest, precision) {
|
||||
Json::StreamWriterBuilder b;
|
||||
b.settings_["precision"] = 5;
|
||||
|
||||
Json::Value v = 100.0/3;
|
||||
std::string expected = "33.333";
|
||||
std::string result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
v = 0.25000000;
|
||||
expected = "0.25";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
v = 0.2563456;
|
||||
expected = "0.25635";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
b.settings_["precision"] = 1;
|
||||
expected = "0.3";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
b.settings_["precision"] = 17;
|
||||
v = 1234857476305.256345694873740545068;
|
||||
expected = "1234857476305.2563";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
|
||||
b.settings_["precision"] = 24;
|
||||
v = 0.256345694873740545068;
|
||||
expected = "0.25634569487374054";
|
||||
result = Json::writeString(b, v);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, result);
|
||||
}
|
||||
|
||||
struct WriterTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(WriterTest, dropNullPlaceholders) {
|
||||
@@ -1910,7 +1985,7 @@ JSONTEST_FIXTURE(CharReaderStrictModeTest, dupKeys) {
|
||||
struct CharReaderFailIfExtraTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderFailIfExtraTest, issue164) {
|
||||
// This is interpretted as a string value followed by a colon.
|
||||
// This is interpreted as a string value followed by a colon.
|
||||
Json::CharReaderBuilder b;
|
||||
Json::Value root;
|
||||
char const doc[] =
|
||||
@@ -2209,6 +2284,7 @@ JSONTEST_FIXTURE(CharReaderAllowSingleQuotesTest, issue182) {
|
||||
JSONTEST_ASSERT_STRING_EQUAL("x", root["a"].asString());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("y", root["b"].asString());
|
||||
}
|
||||
delete reader;
|
||||
}
|
||||
|
||||
struct CharReaderAllowZeroesTest : JsonTest::TestCase {};
|
||||
@@ -2241,6 +2317,82 @@ JSONTEST_FIXTURE(CharReaderAllowZeroesTest, issue176) {
|
||||
JSONTEST_ASSERT_STRING_EQUAL("x", root["a"].asString());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("y", root["b"].asString());
|
||||
}
|
||||
delete reader;
|
||||
}
|
||||
|
||||
struct CharReaderAllowSpecialFloatsTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(CharReaderAllowSpecialFloatsTest, issue209) {
|
||||
Json::CharReaderBuilder b;
|
||||
b.settings_["allowSpecialFloats"] = true;
|
||||
Json::Value root;
|
||||
std::string errs;
|
||||
Json::CharReader* reader(b.newCharReader());
|
||||
{
|
||||
char const doc[] = "{\"a\":NaN,\"b\":Infinity,\"c\":-Infinity}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(3u, root.size());
|
||||
double n = root["a"].asDouble();
|
||||
JSONTEST_ASSERT(n != n);
|
||||
JSONTEST_ASSERT_EQUAL(std::numeric_limits<double>::infinity(), root.get("b", 0.0));
|
||||
JSONTEST_ASSERT_EQUAL(-std::numeric_limits<double>::infinity(), root.get("c", 0.0));
|
||||
}
|
||||
|
||||
struct TestData {
|
||||
int line;
|
||||
bool ok;
|
||||
std::string in;
|
||||
};
|
||||
const TestData test_data[] = {
|
||||
{__LINE__, 1, "{\"a\":9}"},
|
||||
{__LINE__, 0, "{\"a\":0Infinity}"},
|
||||
{__LINE__, 0, "{\"a\":1Infinity}"},
|
||||
{__LINE__, 0, "{\"a\":9Infinity}"},
|
||||
{__LINE__, 0, "{\"a\":0nfinity}"},
|
||||
{__LINE__, 0, "{\"a\":1nfinity}"},
|
||||
{__LINE__, 0, "{\"a\":9nfinity}"},
|
||||
{__LINE__, 0, "{\"a\":nfinity}"},
|
||||
{__LINE__, 0, "{\"a\":.nfinity}"},
|
||||
{__LINE__, 0, "{\"a\":9nfinity}"},
|
||||
{__LINE__, 0, "{\"a\":-nfinity}"},
|
||||
{__LINE__, 1, "{\"a\":Infinity}"},
|
||||
{__LINE__, 0, "{\"a\":.Infinity}"},
|
||||
{__LINE__, 0, "{\"a\":_Infinity}"},
|
||||
{__LINE__, 0, "{\"a\":_nfinity}"},
|
||||
{__LINE__, 1, "{\"a\":-Infinity}"}
|
||||
};
|
||||
for (size_t tdi = 0; tdi < sizeof(test_data) / sizeof(*test_data); ++tdi) {
|
||||
const TestData& td = test_data[tdi];
|
||||
bool ok = reader->parse(&*td.in.begin(),
|
||||
&*td.in.begin() + td.in.size(),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(td.ok == ok)
|
||||
<< "line:" << td.line << "\n"
|
||||
<< " expected: {"
|
||||
<< "ok:" << td.ok
|
||||
<< ", in:\'" << td.in << "\'"
|
||||
<< "}\n"
|
||||
<< " actual: {"
|
||||
<< "ok:" << ok
|
||||
<< "}\n";
|
||||
}
|
||||
|
||||
{
|
||||
char const doc[] = "{\"posInf\": Infinity, \"NegInf\": -Infinity}";
|
||||
bool ok = reader->parse(
|
||||
doc, doc + std::strlen(doc),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(ok);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("", errs);
|
||||
JSONTEST_ASSERT_EQUAL(2u, root.size());
|
||||
JSONTEST_ASSERT_EQUAL(std::numeric_limits<double>::infinity(), root["posInf"].asDouble());
|
||||
JSONTEST_ASSERT_EQUAL(-std::numeric_limits<double>::infinity(), root["NegInf"].asDouble());
|
||||
}
|
||||
delete reader;
|
||||
}
|
||||
|
||||
struct BuilderTest : JsonTest::TestCase {};
|
||||
@@ -2318,12 +2470,53 @@ JSONTEST_FIXTURE(IteratorTest, indexes) {
|
||||
JSONTEST_ASSERT(it == json.end());
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE(IteratorTest, const) {
|
||||
Json::Value const v;
|
||||
JSONTEST_ASSERT_THROWS(
|
||||
Json::Value::iterator it(v.begin()) // Compile, but throw.
|
||||
);
|
||||
|
||||
Json::Value value;
|
||||
|
||||
for(int i = 9; i < 12; ++i)
|
||||
{
|
||||
std::ostringstream out;
|
||||
out << std::setw(2) << i;
|
||||
std::string str = out.str();
|
||||
value[str] = str;
|
||||
}
|
||||
|
||||
std::ostringstream out;
|
||||
//in old code, this will get a compile error
|
||||
Json::Value::const_iterator iter = value.begin();
|
||||
for(; iter != value.end(); ++iter)
|
||||
{
|
||||
out << *iter << ',';
|
||||
}
|
||||
std::string expected = "\" 9\",\"10\",\"11\",";
|
||||
JSONTEST_ASSERT_STRING_EQUAL(expected, out.str());
|
||||
}
|
||||
|
||||
struct RValueTest : JsonTest::TestCase {};
|
||||
|
||||
JSONTEST_FIXTURE(RValueTest, moveConstruction) {
|
||||
#if JSON_HAS_RVALUE_REFERENCES
|
||||
Json::Value json;
|
||||
json["key"] = "value";
|
||||
Json::Value moved = std::move(json);
|
||||
JSONTEST_ASSERT(moved != json); // Possibly not nullValue; definitely not equal.
|
||||
JSONTEST_ASSERT_EQUAL(Json::objectValue, moved.type());
|
||||
JSONTEST_ASSERT_EQUAL(Json::stringValue, moved["key"].type());
|
||||
#endif
|
||||
}
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
JsonTest::Runner runner;
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, checkNormalizeFloatingPointStr);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, memberCount);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, objects);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, arrays);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, arrayIssue252);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, null);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, strings);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, bools);
|
||||
@@ -2345,6 +2538,8 @@ int main(int argc, const char* argv[]) {
|
||||
//JSONTEST_REGISTER_FIXTURE(runner, ValueTest, nulls);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, zeroes);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, zeroesInKeys);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, specialFloats);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, ValueTest, precision);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, WriterTest, dropNullPlaceholders);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, StreamWriterTest, dropNullPlaceholders);
|
||||
@@ -2379,11 +2574,16 @@ int main(int argc, const char* argv[]) {
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderAllowZeroesTest, issue176);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, CharReaderAllowSpecialFloatsTest, issue209);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, BuilderTest, settings);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, distance);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, names);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, indexes);
|
||||
JSONTEST_REGISTER_FIXTURE(runner, IteratorTest, const);
|
||||
|
||||
JSONTEST_REGISTER_FIXTURE(runner, RValueTest, moveConstruction);
|
||||
|
||||
return runner.runCommandLine(argc, argv);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
# removes all files created during testing
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Removes all files created during testing."""
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
import glob
|
||||
import os.path
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
# Simple implementation of a json test runner to run the test against json-py.
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Simple implementation of a json test runner to run the test against
|
||||
json-py."""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2007 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from io import open
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright 2009 Baptiste Lepilleur
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from io import open
|
||||
|
||||
29
3P/jsoncpp/travis.sh
Normal file
29
3P/jsoncpp/travis.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env sh
|
||||
# This is called by `.travis.yml` via Travis CI.
|
||||
# Travis supplies $TRAVIS_OS_NAME.
|
||||
# http://docs.travis-ci.com/user/multi-os/
|
||||
# Our .travis.yml also defines:
|
||||
# - SHARED_LIB=ON/OFF
|
||||
# - STATIC_LIB=ON/OFF
|
||||
# - CMAKE_PKG=ON/OFF
|
||||
# - BUILD_TYPE=release/debug
|
||||
# - VERBOSE_MAKE=false/true
|
||||
# - VERBOSE (set or not)
|
||||
|
||||
# -e: fail on error
|
||||
# -v: show commands
|
||||
# -x: show expanded commands
|
||||
set -vex
|
||||
|
||||
env | sort
|
||||
|
||||
cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DBUILD_SHARED_LIBS=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE .
|
||||
make
|
||||
|
||||
# Python is not available in Travis for osx.
|
||||
# https://github.com/travis-ci/travis-ci/issues/2320
|
||||
if [ "$TRAVIS_OS_NAME" != "osx" ]
|
||||
then
|
||||
make jsoncpp_check
|
||||
valgrind --error-exitcode=42 --leak-check=full ./src/test_lib_json/jsoncpp_test
|
||||
fi
|
||||
@@ -1 +1 @@
|
||||
1.6.0
|
||||
1.7.0
|
||||
|
||||
Reference in New Issue
Block a user