From e99e6d9cc694f79e6fd5a497131b755e2614d0df Mon Sep 17 00:00:00 2001 From: Magnus Bjerke Vik Date: Wed, 3 Dec 2014 15:42:41 +0100 Subject: [PATCH 001/885] CMake: Add include directory to jsoncpp_lib target so that it can be easier used with other projects. --- src/lib_json/CMakeLists.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index 418044deb..ccef9aeb0 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -39,6 +39,10 @@ ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE} SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} ) +TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib + PUBLIC ${JSONCPP_INCLUDE_DIR} + ) + # Install instructions for this target IF(JSONCPP_WITH_CMAKE_PACKAGE) TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib From 3fd7f8b47092d69b4655e3e836d68444f7c2e27a Mon Sep 17 00:00:00 2001 From: Magnus Bjerke Vik Date: Tue, 16 Dec 2014 08:58:52 +0100 Subject: [PATCH 002/885] CMake: Only add include directory to jsoncpp_lib target if CMake version supports the command. --- src/lib_json/CMakeLists.txt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index ccef9aeb0..d09ad8145 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -39,9 +39,11 @@ ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE} SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} ) -TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib - PUBLIC ${JSONCPP_INCLUDE_DIR} - ) +IF(CMAKE_VERSION VERSION_EQUAL 2.8.11 OR CMAKE_VERSION VERSION_GREATER 2.8.11) + TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib + PUBLIC ${JSONCPP_INCLUDE_DIR} + ) +ENDIF(CMAKE_VERSION VERSION_EQUAL 2.8.11 OR CMAKE_VERSION VERSION_GREATER 2.8.11) # Install instructions for this target IF(JSONCPP_WITH_CMAKE_PACKAGE) From 54764dd85b6be0b866ac780f45eb0cc4d2202222 Mon Sep 17 00:00:00 2001 From: dominicpezzuto Date: Sat, 27 Dec 2014 16:44:26 -0500 Subject: [PATCH 003/885] Fix build issues related to Solaris and older GCC MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixed two build issues: - JsonCPP currently doesn’t compile for Solaris due to platform differences with ‘isfinite’ function. Fixed by adding proper include and define for Solaris. - JsonCPP currently doesn’t compile for GCC version 4.1.2 and earlier due to use of ‘-Werror=*’ compile flag, which was introduced in a later version. Fixed by adding version check to only add this flag on supported versions of GCC. --- src/lib_json/CMakeLists.txt | 1 - src/lib_json/json_writer.cpp | 5 +++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index e395c64ab..3bf5e1aea 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -10,7 +10,6 @@ ELSE(JSONCPP_LIB_BUILD_SHARED) SET(JSONCPP_LIB_TYPE STATIC) ENDIF(JSONCPP_LIB_BUILD_SHARED) - if( CMAKE_COMPILER_IS_GNUCXX ) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing") endif( CMAKE_COMPILER_IS_GNUCXX ) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 89964ea32..280f09ed5 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -26,6 +26,11 @@ #pragma warning(disable : 4996) #endif +#if defined(__sun) && defined(__SVR4) //Solaris +#include +#define isfinite finite +#endif + namespace Json { static bool containsControlCharacter(const char* str) { From d2b6992f3e88cd58ab106ccaca29069dd26796af Mon Sep 17 00:00:00 2001 From: dominicpezzuto Date: Sat, 27 Dec 2014 16:45:40 -0500 Subject: [PATCH 004/885] Fix build issues related to Solaris and older GCC MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixed two build issues: - JsonCPP currently doesn’t compile for Solaris due to platform differences with ‘isfinite’ function. Fixed by adding proper include and define for Solaris. - JsonCPP currently doesn’t compile for GCC version 4.1.2 and earlier due to use of ‘-Werror=*’ compile flag, which was introduced in a later version. Fixed by adding version check to only add this flag on supported versions of GCC. --- src/lib_json/CMakeLists.txt | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index 3bf5e1aea..f21069b7b 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -11,7 +11,14 @@ ELSE(JSONCPP_LIB_BUILD_SHARED) ENDIF(JSONCPP_LIB_BUILD_SHARED) if( CMAKE_COMPILER_IS_GNUCXX ) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing") + #Get compiler version. + execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion + OUTPUT_VARIABLE GNUCXX_VERSION ) + + #-Werror=* was introduced -after- GCC 4.1.2 + if( GNUCXX_VERSION VERSION_GREATER 4.1.2 ) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing") + endif() endif( CMAKE_COMPILER_IS_GNUCXX ) SET( JSONCPP_INCLUDE_DIR ../../include ) From 748328a0d128b403fe5ff05a7664cbca2d606ecd Mon Sep 17 00:00:00 2001 From: Magnus Bjerke Vik Date: Tue, 6 Jan 2015 09:51:44 +0100 Subject: [PATCH 005/885] Fix cmake_package install being broken because of wrong include path. The TARGET_INCLUDE_DIRECTORIES from inside the IF(JSONCPP_WITH_CMAKE_PACKAGE) block was removed, since it only needs to be set once. In addition the CMAKE_VERSION check was simplified. --- src/lib_json/CMakeLists.txt | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index f21069b7b..d0f6a5ea5 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -49,18 +49,15 @@ ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE} SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} ) -IF(CMAKE_VERSION VERSION_EQUAL 2.8.11 OR CMAKE_VERSION VERSION_GREATER 2.8.11) - TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib - PUBLIC ${JSONCPP_INCLUDE_DIR} +IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC + $ + $ ) -ENDIF(CMAKE_VERSION VERSION_EQUAL 2.8.11 OR CMAKE_VERSION VERSION_GREATER 2.8.11) +ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) # Install instructions for this target IF(JSONCPP_WITH_CMAKE_PACKAGE) - TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib - PUBLIC $ - $ - ) SET(INSTALL_EXPORT EXPORT jsoncpp) ELSE(JSONCPP_WITH_CMAKE_PACKAGE) SET(INSTALL_EXPORT) From 1a4dc3a888c7e6e179503250ccb391820a586b1b Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 6 Jan 2015 12:09:48 -0600 Subject: [PATCH 006/885] quote cmdline arg See #99. --- test/runjsontests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index a1f6082b5..2defb53ae 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -1,8 +1,8 @@ from __future__ import print_function +from glob import glob import sys import os -import os.path -from glob import glob +import pipes import optparse VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' @@ -56,7 +56,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, options = is_json_checker_test and '--json-checker' or '' pipe = os.popen( "%s%s %s %s" % ( valgrind_path, jsontest_executable_path, options, - input_path) ) + pipes.quote(input_path))) process_output = pipe.read() status = pipe.close() if is_json_checker_test: From 65cee6ea16dc159ce8d974b729bc6e7855af8646 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 6 Jan 2015 12:40:36 -0600 Subject: [PATCH 007/885] fix double->string->double round-trip (bump minor ver.) See #98. http://stackoverflow.com/questions/747470/what-is-the-meaning-of-numeric-limitsdoubledigits10/16941784#16941784 --- src/lib_json/json_writer.cpp | 6 +++--- src/test_lib_json/main.cpp | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 280f09ed5..5113c38be 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -87,13 +87,13 @@ std::string valueToString(double value) { // visual studio 2005 to // avoid warning. #if defined(WINCE) - len = _snprintf(buffer, sizeof(buffer), "%.16g", value); + len = _snprintf(buffer, sizeof(buffer), "%.17g", value); #else - len = sprintf_s(buffer, sizeof(buffer), "%.16g", value); + len = sprintf_s(buffer, sizeof(buffer), "%.17g", value); #endif #else if (isfinite(value)) { - len = snprintf(buffer, sizeof(buffer), "%.16g", value); + len = snprintf(buffer, sizeof(buffer), "%.17g", value); } else { // IEEE standard states that NaN values will not compare to themselves if (value != value) { diff --git a/src/test_lib_json/main.cpp b/src/test_lib_json/main.cpp index 13fc21df5..51c5e7442 100644 --- a/src/test_lib_json/main.cpp +++ b/src/test_lib_json/main.cpp @@ -926,7 +926,7 @@ JSONTEST_FIXTURE(ValueTest, integers) { JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(Json::UInt64(1) << 63)), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("9.223372036854776e+18", + JSONTEST_ASSERT_STRING_EQUAL("9.2233720368547758e+18", normalizeFloatingPointStr(val.asString())); // int64 min @@ -974,7 +974,7 @@ JSONTEST_FIXTURE(ValueTest, integers) { JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("-9.223372036854776e+18", + JSONTEST_ASSERT_STRING_EQUAL("-9.2233720368547758e+18", normalizeFloatingPointStr(val.asString())); // 10^19 @@ -1065,7 +1065,7 @@ JSONTEST_FIXTURE(ValueTest, integers) { JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.844674407370955e+19", + JSONTEST_ASSERT_STRING_EQUAL("1.8446744073709552e+19", normalizeFloatingPointStr(val.asString())); #endif } @@ -1217,7 +1217,7 @@ JSONTEST_FIXTURE(ValueTest, nonIntegers) { normalizeFloatingPointStr(val.asString())); val = Json::Value(1.2345678901234); - JSONTEST_ASSERT_STRING_EQUAL("1.2345678901234", + JSONTEST_ASSERT_STRING_EQUAL("1.2345678901234001", normalizeFloatingPointStr(val.asString())); // A 16-digit floating point number. From 8b489f891ae96d00237292f2446fb1a9645953b7 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 6 Jan 2015 12:46:17 -0600 Subject: [PATCH 008/885] 1.1.0 <- 1.0.0 --- include/json/version.h | 4 ++-- version | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/include/json/version.h b/include/json/version.h index 6fe06824d..58fca8a98 100644 --- a/include/json/version.h +++ b/include/json/version.h @@ -4,9 +4,9 @@ #ifndef JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED -# define JSONCPP_VERSION_STRING "1.0.0" +# define JSONCPP_VERSION_STRING "1.1.0" # define JSONCPP_VERSION_MAJOR 1 -# define JSONCPP_VERSION_MINOR 0 +# define JSONCPP_VERSION_MINOR 1 # define JSONCPP_VERSION_PATCH 0 # define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) diff --git a/version b/version index 566d024c7..6314daca8 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.0.0 \ No newline at end of file +1.1.0 \ No newline at end of file From 4ca9d25ccceaa198c4e92b86d8ad8b8d01fb87ff Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 9 Jan 2015 22:28:20 -0600 Subject: [PATCH 009/885] Revert "Merge pull request #108 from open-source-parsers/quote-spaces" This reverts commit dfc5f879c168d11fe2f4ef686ab33bbecdc5679f, reversing changes made to 0f6884f771eae221873b345af2c6369ccf7dcb5b. --- test/runjsontests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index 2defb53ae..a1f6082b5 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -1,8 +1,8 @@ from __future__ import print_function -from glob import glob import sys import os -import pipes +import os.path +from glob import glob import optparse VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' @@ -56,7 +56,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, options = is_json_checker_test and '--json-checker' or '' pipe = os.popen( "%s%s %s %s" % ( valgrind_path, jsontest_executable_path, options, - pipes.quote(input_path))) + input_path) ) process_output = pipe.read() status = pipe.close() if is_json_checker_test: From d98b5f4230bf84d6282ca635eedebc2e1e0822dc Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 9 Jan 2015 22:32:10 -0600 Subject: [PATCH 010/885] quote spaces in commands for Windows See comments at: https://github.com/open-source-parsers/jsoncpp/commit/1a4dc3a888c7e6e179503250ccb391820a586b1b --- test/runjsontests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index a1f6082b5..9422d57d9 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -54,7 +54,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, is_json_checker_test = (input_path in test_jsonchecker) or expect_failure print('TESTING:', input_path, end=' ') options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( + pipe = os.popen( '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, input_path) ) process_output = pipe.read() From 8371a4337c61571eca60a744601befeaa024dc37 Mon Sep 17 00:00:00 2001 From: Magnus Bjerke Vik Date: Thu, 15 Jan 2015 19:16:51 +0100 Subject: [PATCH 011/885] CMake: Remove set(CMAKE_*_OUTPUT_DIRECTORY) With set(CMAKE_*_OUTPUT_DIRECTORY) when using jsoncpp as a sub project, the parent project's executables and libraries will also be outputed to jsoncpp's directory. By removing this, it is up to the parent projects to decide where to put their and jsoncpp's executables and libraries. --- CMakeLists.txt | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3e8f96ecb..31f73cff0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,16 +31,6 @@ SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake CACHE PATH "Install dir for cmake package config files") MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR ) -# This ensures shared DLL are in the same dir as executable on Windows. -# Put all executables / libraries are in a project global directory. -SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib - CACHE PATH "Single directory for all static libraries.") -SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib - CACHE PATH "Single directory for all dynamic libraries on Unix.") -SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin - CACHE PATH "Single directory for all executable and dynamic libraries on Windows.") -MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY ) - # Set variable named ${VAR_NAME} to value ${VALUE} FUNCTION(set_using_dynamic_name VAR_NAME VALUE) SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE) From 01aee4a0dc481aa40ed5d8d0532eea7daec43441 Mon Sep 17 00:00:00 2001 From: datadiode Date: Sun, 11 Jan 2015 10:39:24 +0100 Subject: [PATCH 012/885] Fix Python test scripts for Python 3 and Windows --- test/runjsontests.py | 6 +++--- test/rununittests.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index 9422d57d9..62e560076 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -34,7 +34,7 @@ def safeGetLine( lines, index ): def safeReadFile( path ): try: - return file( path, 'rt' ).read() + return open( path, 'rt', encoding = 'utf-8' ).read() except IOError as e: return '' % (path,e) @@ -77,13 +77,13 @@ def runAllTests( jsontest_executable_path, input_dir = None, base_path = os.path.splitext(input_path)[0] actual_output = safeReadFile( base_path + '.actual' ) actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) + open(base_path + '.process-output', 'wt', encoding = 'utf-8').write( process_output ) if status: print('parsing failed') failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) else: expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() + expected_output = open( expected_output_path, 'rt', encoding = 'utf-8' ).read() detail = ( compareOutputs( expected_output, actual_output, 'input' ) or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) if detail: diff --git a/test/rununittests.py b/test/rununittests.py index 6279f80e3..90b7bb9ec 100644 --- a/test/rununittests.py +++ b/test/rununittests.py @@ -31,7 +31,7 @@ def runAllTests( exe_path, use_valgrind=False ): if not status: print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr) return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] + test_names = [name.strip() for name in test_names.decode('utf-8').strip().split('\n')] failures = [] for name in test_names: print('TESTING %s:' % name, end=' ') From cd140b5141c457818c86bf84a8ced696374896af Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 16 Jan 2015 13:44:27 -0600 Subject: [PATCH 013/885] py2 and py3 --- test/runjsontests.py | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index 62e560076..724306334 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -1,12 +1,30 @@ from __future__ import print_function +from __future__ import unicode_literals +from io import open +from glob import glob import sys import os import os.path -from glob import glob import optparse VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' +def getStatusOutput(cmd): + """ + Return int, unicode (for both Python 2 and 3). + Note: os.popen().close() would return None for 0. + """ + pipe = os.popen(cmd) + process_output = pipe.read() + try: + # We have been using os.popen(). When we read() the result + # we get 'str' (bytes) in py2, and 'str' (unicode) in py3. + # Ugh! There must be a better way to handle this. + process_output = process_output.decode('utf-8') + except AttributeError: + pass # python3 + status = pipe.close() + return status, process_output def compareOutputs( expected, actual, message ): expected = expected.strip().replace('\r','').split('\n') actual = actual.strip().replace('\r','').split('\n') @@ -54,21 +72,20 @@ def runAllTests( jsontest_executable_path, input_dir = None, is_json_checker_test = (input_path in test_jsonchecker) or expect_failure print('TESTING:', input_path, end=' ') options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( '%s%s %s "%s"' % ( + cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() + input_path) + status, process_output = getStatusOutput(cmd) if is_json_checker_test: if expect_failure: - if status is None: + if not status: print('FAILED') failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % safeReadFile(input_path)) ) else: print('OK') else: - if status is not None: + if status: print('FAILED') failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) else: From 4bc311503cad5596c8d129db13f1c72acb47b623 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 16 Jan 2015 14:48:06 -0600 Subject: [PATCH 014/885] just in case --- test/rununittests.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/test/rununittests.py b/test/rununittests.py index 90b7bb9ec..54c4da42c 100644 --- a/test/rununittests.py +++ b/test/rununittests.py @@ -1,4 +1,6 @@ from __future__ import print_function +from __future__ import unicode_literals +from io import open from glob import glob import sys import os @@ -19,7 +21,11 @@ def run( self, options ): else: cmd = [] cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + try: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + except: + print(cmd) + raise stdout = process.communicate()[0] if process.returncode: return False, stdout From c1441ef5e04c6b2e8db15cf043fd43eab2910ca2 Mon Sep 17 00:00:00 2001 From: datadiode Date: Thu, 15 Jan 2015 08:32:13 +0100 Subject: [PATCH 015/885] stricter float parsing fixes `test/jsonchecker/fail31.json` (issue #113) --- src/lib_json/json_reader.cpp | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index c5111f8d7..dfae3fb73 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -381,11 +381,24 @@ bool Reader::readCppStyleComment() { } void Reader::readNumber() { - while (current_ != end_) { - if (!(*current_ >= '0' && *current_ <= '9') && - !in(*current_, '.', 'e', 'E', '+', '-')) - break; - ++current_; + const char *p = current_; + char c = '0'; // stopgap for already consumed character + // integral part + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + // fractional part + if (c == '.') { + c = (current_ = p) < end_ ? *p++ : 0; + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + } + // exponential part + if (c == 'e' || c == 'E') { + c = (current_ = p) < end_ ? *p++ : 0; + if (c == '+' || c == '-') + c = (current_ = p) < end_ ? *p++ : 0; + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; } } From 4997dfb8afe181b043fc22e2663f9c9702d4255c Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 16 Jan 2015 15:09:54 -0600 Subject: [PATCH 016/885] 1.1.1 <- 1.1.0 slight change to fail on a bad float --- .gitignore | 2 +- include/json/version.h | 4 ++-- version | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 60c4a0b9a..88a0b0dbe 100644 --- a/.gitignore +++ b/.gitignore @@ -10,4 +10,4 @@ /libs/ /doc/doxyfile /dist/ -/include/json/version.h +#/include/json/version.h diff --git a/include/json/version.h b/include/json/version.h index 58fca8a98..baf5daa7b 100644 --- a/include/json/version.h +++ b/include/json/version.h @@ -4,10 +4,10 @@ #ifndef JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED -# define JSONCPP_VERSION_STRING "1.1.0" +# define JSONCPP_VERSION_STRING "1.1.1" # define JSONCPP_VERSION_MAJOR 1 # define JSONCPP_VERSION_MINOR 1 -# define JSONCPP_VERSION_PATCH 0 +# define JSONCPP_VERSION_PATCH 1 # define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) diff --git a/version b/version index 6314daca8..cd075f270 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.1.0 \ No newline at end of file +1.1.1 \ No newline at end of file From 8f3aa220dbab931bfd5a32bdb7d7e77c785c1796 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 16 Jan 2015 16:27:06 -0600 Subject: [PATCH 017/885] consider these as binary, so git will not alter line-endings issue #116 --- test/data/.gitattributes | 1 + 1 file changed, 1 insertion(+) create mode 100644 test/data/.gitattributes diff --git a/test/data/.gitattributes b/test/data/.gitattributes new file mode 100644 index 000000000..87e56e777 --- /dev/null +++ b/test/data/.gitattributes @@ -0,0 +1 @@ +/* binary From 50753bb8082bb7b486a90753c1d450d0f69817a6 Mon Sep 17 00:00:00 2001 From: datadiode Date: Sat, 17 Jan 2015 13:21:42 +0100 Subject: [PATCH 018/885] Simplify Reader::decodeNumber() --- src/lib_json/json_reader.cpp | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index dfae3fb73..73dea1b04 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -517,13 +517,6 @@ bool Reader::decodeNumber(Token& token) { } bool Reader::decodeNumber(Token& token, Value& decoded) { - bool isDouble = false; - for (Location inspect = token.start_; inspect != token.end_; ++inspect) { - isDouble = isDouble || in(*inspect, '.', 'e', 'E', '+') || - (*inspect == '-' && inspect != token.start_); - } - if (isDouble) - return decodeDouble(token, decoded); // Attempts to parse the number as an integer. If the number is // larger than the maximum supported value of an integer then // we decode the number as a double. @@ -531,6 +524,7 @@ bool Reader::decodeNumber(Token& token, Value& decoded) { bool isNegative = *current == '-'; if (isNegative) ++current; + // TODO: Help the compiler do the div and mod at compile time or get rid of them. Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) : Value::maxLargestUInt; @@ -539,9 +533,7 @@ bool Reader::decodeNumber(Token& token, Value& decoded) { while (current < token.end_) { Char c = *current++; if (c < '0' || c > '9') - return addError("'" + std::string(token.start_, token.end_) + - "' is not a number.", - token); + return decodeDouble(token, decoded); Value::UInt digit(c - '0'); if (value >= threshold) { // We've hit or exceeded the max value divided by 10 (rounded down). If From 09d352ac13d3d78bf081972a5e27620c26b4815a Mon Sep 17 00:00:00 2001 From: datadiode Date: Sat, 17 Jan 2015 13:26:23 +0100 Subject: [PATCH 019/885] Remove unused functions --- include/json/reader.h | 1 - src/lib_json/json_reader.cpp | 24 ------------------------ 2 files changed, 25 deletions(-) diff --git a/include/json/reader.h b/include/json/reader.h index 98814d50e..bd2204be4 100644 --- a/include/json/reader.h +++ b/include/json/reader.h @@ -187,7 +187,6 @@ class JSON_API Reader { typedef std::deque Errors; - bool expectToken(TokenType type, Token& token, const char* message); bool readToken(Token& token); void skipSpaces(); bool match(Location pattern, int patternLength); diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index 73dea1b04..2e587ab4a 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -47,23 +47,6 @@ Features Features::strictMode() { // Implementation of class Reader // //////////////////////////////// -static inline bool in(Reader::Char c, - Reader::Char c1, - Reader::Char c2, - Reader::Char c3, - Reader::Char c4) { - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool in(Reader::Char c, - Reader::Char c1, - Reader::Char c2, - Reader::Char c3, - Reader::Char c4, - Reader::Char c5) { - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - static bool containsNewLine(Reader::Location begin, Reader::Location end) { for (; begin < end; ++begin) if (*begin == '\n' || *begin == '\r') @@ -229,13 +212,6 @@ void Reader::skipCommentTokens(Token& token) { } } -bool Reader::expectToken(TokenType type, Token& token, const char* message) { - readToken(token); - if (token.type_ != type) - return addError(message, token); - return true; -} - bool Reader::readToken(Token& token) { skipSpaces(); token.start_ = current_; From c07ef37904d7a829f28aa2f2908d8cd2b56f17dd Mon Sep 17 00:00:00 2001 From: datadiode Date: Sun, 18 Jan 2015 10:05:25 +0100 Subject: [PATCH 020/885] https://github.com/open-source-parsers/jsoncpp/issues/47 --- include/json/value.h | 5 +++-- src/lib_json/json_value.cpp | 9 +++++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index 197a85614..3473c7e33 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -240,8 +240,6 @@ Json::Value obj_value(Json::objectValue); // {} Value& operator=(Value other); /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. void swap(Value& other); ValueType type() const; @@ -444,6 +442,9 @@ Json::Value obj_value(Json::objectValue); // {} Value& resolveReference(const char* key, bool isStatic); + /// Swap values but leave comments and source offsets in place. + void swapPayload(Value& other); + #ifdef JSON_VALUE_USE_INTERNAL_MAP inline bool isItemAvailable() const { return itemIsUsed_ == 0; } diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index b73deac17..85b26ce79 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -406,11 +406,11 @@ Value::~Value() { } Value& Value::operator=(Value other) { - swap(other); + swapPayload(other); return *this; } -void Value::swap(Value& other) { +void Value::swapPayload(Value& other) { ValueType temp = type_; type_ = other.type_; other.type_ = temp; @@ -418,6 +418,11 @@ void Value::swap(Value& other) { int temp2 = allocated_; allocated_ = other.allocated_; other.allocated_ = temp2; +} + +void Value::swap(Value& other) { + swapPayload(other); + std::swap(comments_, other.comments_); std::swap(start_, other.start_); std::swap(limit_, other.limit_); } From 94b0297dc53d5402ff8ea5b56fef47ed17c6adf5 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 12:06:12 -0600 Subject: [PATCH 021/885] Revert "consider these as binary, so git will not alter line-endings" This reverts commit 8f3aa220dbab931bfd5a32bdb7d7e77c785c1796. We will find a better fix for #116. In the meantime, we want to see diffs for changes to test-data. --- test/data/.gitattributes | 1 - 1 file changed, 1 deletion(-) delete mode 100644 test/data/.gitattributes diff --git a/test/data/.gitattributes b/test/data/.gitattributes deleted file mode 100644 index 87e56e777..000000000 --- a/test/data/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -/* binary From 66eb72f121e689c17186f050093e6c6eff3688d8 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 11:02:22 -0600 Subject: [PATCH 022/885] use SwapPayload() to retain comments All tests pass, but we might be missing coverage. issue #47 --- include/json/value.h | 12 ++++++------ src/lib_json/json_reader.cpp | 10 +++++++--- src/lib_json/json_value.cpp | 2 +- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index 3473c7e33..93112d125 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -235,23 +235,26 @@ Json::Value obj_value(Json::objectValue); // {} Value(const CppTL::ConstString& value); #endif Value(bool value); + /// Deep copy. Value(const Value& other); ~Value(); + // Deep copy, then swap(other). Value& operator=(Value other); - /// Swap values. + /// Swap everything. void swap(Value& other); + /// Swap values but leave comments and source offsets in place. + void swapPayload(Value& other); ValueType type() const; + /// Compare payload only, not comments etc. bool operator<(const Value& other) const; bool operator<=(const Value& other) const; bool operator>=(const Value& other) const; bool operator>(const Value& other) const; - bool operator==(const Value& other) const; bool operator!=(const Value& other) const; - int compare(const Value& other) const; const char* asCString() const; @@ -442,9 +445,6 @@ Json::Value obj_value(Json::objectValue); // {} Value& resolveReference(const char* key, bool isStatic); - /// Swap values but leave comments and source offsets in place. - void swapPayload(Value& other); - #ifdef JSON_VALUE_USE_INTERNAL_MAP inline bool isItemAvailable() const { return itemIsUsed_ == 0; } diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index 2e587ab4a..be2234306 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -173,9 +173,12 @@ bool Reader::readValue() { currentValue().setOffsetLimit(token.end_ - begin_); break; case tokenNull: - currentValue() = Value(); + { + Value v; + currentValue().swapPayload(v); currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetLimit(token.end_ - begin_); + } break; case tokenArraySeparator: if (features_.allowDroppedNullPlaceholders_) { @@ -393,7 +396,8 @@ bool Reader::readString() { bool Reader::readObject(Token& tokenStart) { Token tokenName; std::string name; - currentValue() = Value(objectValue); + Value init(objectValue); + currentValue().swapPayload(init); currentValue().setOffsetStart(tokenStart.start_ - begin_); while (readToken(tokenName)) { bool initialTokenOk = true; @@ -486,7 +490,7 @@ bool Reader::decodeNumber(Token& token) { Value decoded; if (!decodeNumber(token, decoded)) return false; - currentValue() = decoded; + currentValue().swapPayload(decoded); currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetLimit(token.end_ - begin_); return true; diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 85b26ce79..0a7fb858e 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -406,7 +406,7 @@ Value::~Value() { } Value& Value::operator=(Value other) { - swapPayload(other); + swap(other); return *this; } From 37644abd7742dbc3a803d9795b5c54d55ab74c19 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 11:11:13 -0600 Subject: [PATCH 023/885] test comment before several types * array * double * string * true * false * null --- test/data/test_comment_01.expected | 1 + test/data/test_comment_01.json | 1 + test/data/test_comment_02.expected | 11 ++++++++++- test/data/test_comment_02.json | 11 ++++++++++- 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/test/data/test_comment_01.expected b/test/data/test_comment_01.expected index 2a7f00c45..d8548d1e4 100644 --- a/test/data/test_comment_01.expected +++ b/test/data/test_comment_01.expected @@ -1,4 +1,5 @@ .={} +// Comment for array .test=[] .test[0]={} .test[0].a="aaa" diff --git a/test/data/test_comment_01.json b/test/data/test_comment_01.json index 7363490a9..db7c6a45c 100644 --- a/test/data/test_comment_01.json +++ b/test/data/test_comment_01.json @@ -1,5 +1,6 @@ { "test": + // Comment for array [ { "a" : "aaa" }, // Comment for a { "b" : "bbb" }, // Comment for b diff --git a/test/data/test_comment_02.expected b/test/data/test_comment_02.expected index 88d2bd0dc..8986dbac0 100644 --- a/test/data/test_comment_02.expected +++ b/test/data/test_comment_02.expected @@ -11,4 +11,13 @@ // Multiline comment cpp-style // Second line .cpp-test.c=3 -.cpp-test.d=4 +// Comment before double +.cpp-test.d=4.1 +// Comment before string +.cpp-test.e="e-string" +// Comment before true +.cpp-test.f=true +// Comment before false +.cpp-test.g=false +// Comment before null +.cpp-test.h=null diff --git a/test/data/test_comment_02.json b/test/data/test_comment_02.json index 297d88903..f5042e0dd 100644 --- a/test/data/test_comment_02.json +++ b/test/data/test_comment_02.json @@ -12,6 +12,15 @@ // Multiline comment cpp-style // Second line "c" : 3, - "d" : 4 + // Comment before double + "d" : 4.1, + // Comment before string + "e" : "e-string", + // Comment before true + "f" : true, + // Comment before false + "g" : false, + // Comment before null + "h" : null } } From 836f0fb863311bb4c36a2783cd28cfe40f08925e Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 11:09:36 -0600 Subject: [PATCH 024/885] fix comments before several types tests pass --- src/lib_json/json_reader.cpp | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index be2234306..b61f1b121 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -163,14 +163,20 @@ bool Reader::readValue() { successful = decodeString(token); break; case tokenTrue: - currentValue() = true; + { + Value v(true); + currentValue().swapPayload(v); currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetLimit(token.end_ - begin_); + } break; case tokenFalse: - currentValue() = false; + { + Value v(false); + currentValue().swapPayload(v); currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetLimit(token.end_ - begin_); + } break; case tokenNull: { @@ -185,7 +191,8 @@ bool Reader::readValue() { // "Un-read" the current token and mark the current value as a null // token. current_--; - currentValue() = Value(); + Value v; + currentValue().swapPayload(v); currentValue().setOffsetStart(current_ - begin_ - 1); currentValue().setOffsetLimit(current_ - begin_); break; @@ -450,7 +457,8 @@ bool Reader::readObject(Token& tokenStart) { } bool Reader::readArray(Token& tokenStart) { - currentValue() = Value(arrayValue); + Value init(arrayValue); + currentValue().swapPayload(init); currentValue().setOffsetStart(tokenStart.start_ - begin_); skipSpaces(); if (*current_ == ']') // empty array @@ -540,7 +548,7 @@ bool Reader::decodeDouble(Token& token) { Value decoded; if (!decodeDouble(token, decoded)) return false; - currentValue() = decoded; + currentValue().swapPayload(decoded); currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetLimit(token.end_ - begin_); return true; @@ -583,10 +591,11 @@ bool Reader::decodeDouble(Token& token, Value& decoded) { } bool Reader::decodeString(Token& token) { - std::string decoded; - if (!decodeString(token, decoded)) + std::string decoded_string; + if (!decodeString(token, decoded_string)) return false; - currentValue() = decoded; + Value decoded(decoded_string); + currentValue().swapPayload(decoded); currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetLimit(token.end_ - begin_); return true; From 2cd0f4ec214f5980648d86014c17b7cb5026f5fa Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 12:44:49 -0600 Subject: [PATCH 025/885] 1.2.0 <- 1.1.1 `operator=()` (which already performed a deep-copy) now includes comments. The change is probably harmless in all practical cases. --- include/json/version.h | 6 +++--- version | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/include/json/version.h b/include/json/version.h index baf5daa7b..4565dc114 100644 --- a/include/json/version.h +++ b/include/json/version.h @@ -4,10 +4,10 @@ #ifndef JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED -# define JSONCPP_VERSION_STRING "1.1.1" +# define JSONCPP_VERSION_STRING "1.2.0" # define JSONCPP_VERSION_MAJOR 1 -# define JSONCPP_VERSION_MINOR 1 -# define JSONCPP_VERSION_PATCH 1 +# define JSONCPP_VERSION_MINOR 2 +# define JSONCPP_VERSION_PATCH 0 # define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) diff --git a/version b/version index cd075f270..de23bf317 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.1.1 \ No newline at end of file +1.2.0 \ No newline at end of file From ec727e2f6b51fb4c02185a1ceed466e0fc6930b1 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 13:32:38 -0600 Subject: [PATCH 026/885] -Wall for Clang/GCC --- CMakeLists.txt | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 31f73cff0..f5c40d52f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -83,6 +83,14 @@ if ( MSVC ) set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ") endif( MSVC ) +if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") + # using regular Clang or AppleClang + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall") +elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + # using GCC + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall") +endif() + IF(JSONCPP_WITH_WARNING_AS_ERROR) UseCompilationWarningAsError() ENDIF(JSONCPP_WITH_WARNING_AS_ERROR) From e39fb0083c2b013f684031f5cef8182634e121f5 Mon Sep 17 00:00:00 2001 From: Mark Zeren Date: Tue, 20 Jan 2015 02:02:33 +0000 Subject: [PATCH 027/885] Normalize comment EOLs while reading instead of while writing Tests are currently failing when git cloning on Windows with autocrlf = true. In that setup multiline comments contain \r\n EOLs. The test code assumes that comments contain \n EOLs and opens the .actual files (etc.) with "wt" which converts \n to \r\n. Thus we end up with \r\r\n EOLs in the output, which triggers a test failure. Instead we should cannonicalize comments while reading so that they contain only \n EOLs. This approach simplifies other parts of the reader and writer logic, and requires no changes to the test. It is a breaking change, but probably the Right Thing going forward. This change also fixes dereferencing past the end of the comment string in StyledWriter::writeCommentBeforeValue. Tests should be added with appropriate .gitattributes for the input files to ensure that we run tests for DOS, Mac, and Unix EOL files on all platforms. For now this change is enough to unblock Windows builds. issue #116 --- src/lib_json/json_reader.cpp | 44 ++++++++++++++++++++------- src/lib_json/json_writer.cpp | 59 +++++++----------------------------- 2 files changed, 44 insertions(+), 59 deletions(-) diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index b61f1b121..9e6d1616a 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -135,14 +135,9 @@ bool Reader::readValue() { bool successful = true; if (collectComments_ && !commentsBefore_.empty()) { - // Remove newline characters at the end of the comments - size_t lastNonNewline = commentsBefore_.find_last_not_of("\r\n"); - if (lastNonNewline != std::string::npos) { - commentsBefore_.erase(lastNonNewline + 1); - } else { - commentsBefore_.clear(); - } - + // Remove newline at the end of the comment + if (commentsBefore_[commentsBefore_.size() - 1] == '\n') + commentsBefore_.resize(commentsBefore_.size() - 1); currentValue().setComment(commentsBefore_, commentBefore); commentsBefore_ = ""; } @@ -337,14 +332,34 @@ bool Reader::readComment() { return true; } +static std::string normalizeEOL(Reader::Location begin, Reader::Location end) { + std::string normalized; + normalized.reserve(end - begin); + Reader::Location current = begin; + while (current != end) { + char c = *current++; + if (c == '\r') { + if (current != end && *current == '\n') + // convert dos EOL + ++current; + // convert Mac EOL + normalized += '\n'; + } else { + normalized += c; + } + } + return normalized; +} + void Reader::addComment(Location begin, Location end, CommentPlacement placement) { assert(collectComments_); + const std::string& normalized = normalizeEOL(begin, end); if (placement == commentAfterOnSameLine) { assert(lastValue_ != 0); - lastValue_->setComment(std::string(begin, end), placement); + lastValue_->setComment(normalized, placement); } else { - commentsBefore_ += std::string(begin, end); + commentsBefore_ += normalized; } } @@ -360,8 +375,15 @@ bool Reader::readCStyleComment() { bool Reader::readCppStyleComment() { while (current_ != end_) { Char c = getNextChar(); - if (c == '\r' || c == '\n') + if (c == '\n') break; + if (c == '\r') { + // Consume DOS EOL. It will be normalized in addComment. + if (current_ != end_ && *current_ == '\n') + getNextChar(); + // Break on Moc OS 9 EOL. + break; + } } return true; } diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 5113c38be..778661c30 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -421,26 +421,27 @@ void StyledWriter::writeCommentBeforeValue(const Value& root) { document_ += "\n"; writeIndent(); - std::string normalizedComment = normalizeEOL(root.getComment(commentBefore)); - std::string::const_iterator iter = normalizedComment.begin(); - while (iter != normalizedComment.end()) { + const std::string& comment = root.getComment(commentBefore); + std::string::const_iterator iter = comment.begin(); + while (iter != comment.end()) { document_ += *iter; - if (*iter == '\n' && *(iter + 1) == '/') + if (*iter == '\n' && + (iter != comment.end() && *(iter + 1) == '/')) writeIndent(); ++iter; } - // Comments are stripped of newlines, so add one here + // Comments are stripped of trailing newlines, so add one here document_ += "\n"; } void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) { if (root.hasComment(commentAfterOnSameLine)) - document_ += " " + normalizeEOL(root.getComment(commentAfterOnSameLine)); + document_ += " " + root.getComment(commentAfterOnSameLine); if (root.hasComment(commentAfter)) { document_ += "\n"; - document_ += normalizeEOL(root.getComment(commentAfter)); + document_ += root.getComment(commentAfter); document_ += "\n"; } } @@ -451,25 +452,6 @@ bool StyledWriter::hasCommentForValue(const Value& value) { value.hasComment(commentAfter); } -std::string StyledWriter::normalizeEOL(const std::string& text) { - std::string normalized; - normalized.reserve(text.length()); - const char* begin = text.c_str(); - const char* end = begin + text.length(); - const char* current = begin; - while (current != end) { - char c = *current++; - if (c == '\r') // mac or dos EOL - { - if (*current == '\n') // convert dos EOL - ++current; - normalized += '\n'; - } else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - // Class StyledStreamWriter // ////////////////////////////////////////////////////////////////// @@ -646,17 +628,17 @@ void StyledStreamWriter::unindent() { void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { if (!root.hasComment(commentBefore)) return; - *document_ << normalizeEOL(root.getComment(commentBefore)); + *document_ << root.getComment(commentBefore); *document_ << "\n"; } void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { if (root.hasComment(commentAfterOnSameLine)) - *document_ << " " + normalizeEOL(root.getComment(commentAfterOnSameLine)); + *document_ << " " + root.getComment(commentAfterOnSameLine); if (root.hasComment(commentAfter)) { *document_ << "\n"; - *document_ << normalizeEOL(root.getComment(commentAfter)); + *document_ << root.getComment(commentAfter); *document_ << "\n"; } } @@ -667,25 +649,6 @@ bool StyledStreamWriter::hasCommentForValue(const Value& value) { value.hasComment(commentAfter); } -std::string StyledStreamWriter::normalizeEOL(const std::string& text) { - std::string normalized; - normalized.reserve(text.length()); - const char* begin = text.c_str(); - const char* end = begin + text.length(); - const char* current = begin; - while (current != end) { - char c = *current++; - if (c == '\r') // mac or dos EOL - { - if (*current == '\n') // convert dos EOL - ++current; - normalized += '\n'; - } else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - std::ostream& operator<<(std::ostream& sout, const Value& root) { Json::StyledStreamWriter writer; writer.write(sout, root); From 51c0afab22268424a28b3b7dc063e44f035b12bd Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 15:02:47 -0600 Subject: [PATCH 028/885] 1.2.1 <- 1.2.0 This can affect existing round-trip tests, but we never made any guarantees about whitespace constancy. --- include/json/version.h | 4 ++-- version | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/include/json/version.h b/include/json/version.h index 4565dc114..a46c7040b 100644 --- a/include/json/version.h +++ b/include/json/version.h @@ -4,10 +4,10 @@ #ifndef JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED -# define JSONCPP_VERSION_STRING "1.2.0" +# define JSONCPP_VERSION_STRING "1.2.1" # define JSONCPP_VERSION_MAJOR 1 # define JSONCPP_VERSION_MINOR 2 -# define JSONCPP_VERSION_PATCH 0 +# define JSONCPP_VERSION_PATCH 1 # define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) diff --git a/version b/version index de23bf317..998dccf7d 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.2.0 \ No newline at end of file +1.2.1 \ No newline at end of file From c407f1407f349191dbdeb1973bd5b4858aba2238 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 15:16:44 -0600 Subject: [PATCH 029/885] test-data for #103 passes --- test/data/test_comment_01.expected | 1 + test/data/test_comment_01.json | 1 + 2 files changed, 2 insertions(+) diff --git a/test/data/test_comment_01.expected b/test/data/test_comment_01.expected index d8548d1e4..1ed01ba10 100644 --- a/test/data/test_comment_01.expected +++ b/test/data/test_comment_01.expected @@ -1,6 +1,7 @@ .={} // Comment for array .test=[] +// Comment within array .test[0]={} .test[0].a="aaa" .test[1]={} diff --git a/test/data/test_comment_01.json b/test/data/test_comment_01.json index db7c6a45c..6defe4004 100644 --- a/test/data/test_comment_01.json +++ b/test/data/test_comment_01.json @@ -2,6 +2,7 @@ "test": // Comment for array [ + // Comment within array { "a" : "aaa" }, // Comment for a { "b" : "bbb" }, // Comment for b { "c" : "ccc" } // Comment for c From 46a925ba4a3ddbbcfcfee5fb82a202ed9be60c01 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 15:19:22 -0600 Subject: [PATCH 030/885] fix compiler warning for a test --- src/test_lib_json/main.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test_lib_json/main.cpp b/src/test_lib_json/main.cpp index 51c5e7442..08ef66abe 100644 --- a/src/test_lib_json/main.cpp +++ b/src/test_lib_json/main.cpp @@ -17,8 +17,8 @@ #define kint64min Json::Value::minInt64 #define kuint64max Json::Value::maxUInt64 -static const double kdint64max = double(kint64max); -static const float kfint64max = float(kint64max); +//static const double kdint64max = double(kint64max); +//static const float kfint64max = float(kint64max); static const float kfint32max = float(kint32max); static const float kfuint32max = float(kuint32max); From 9454e687a32bb812092a990e3bc9162077e48e77 Mon Sep 17 00:00:00 2001 From: datadiode Date: Tue, 20 Jan 2015 15:25:04 -0600 Subject: [PATCH 031/885] Specialize std::swap() for Json::Value in a C++ standard compliant way originally from pull #119 --- include/json/value.h | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/include/json/value.h b/include/json/value.h index 93112d125..78e75467d 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -1082,6 +1082,14 @@ class JSON_API ValueIterator : public ValueIteratorBase { } // namespace Json + +namespace std { +/// Specialize std::swap() for Json::Value. +template<> +inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); } +} + + #if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #pragma warning(pop) #endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) From 7956ccd61ecefd5c24af78d8d7baad85106151cf Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 16:18:15 -0600 Subject: [PATCH 032/885] allow stream ops for JSON_FAIL_MESSAGE http://www.iar.com/Global/Resources/Developers_Toolbox/C_Cplusplus_Programming/Tips%20and%20tricks%20using%20the%20preprocessor%20%28part%20two%29.pdf --- include/json/assertions.h | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/include/json/assertions.h b/include/json/assertions.h index 5ef7e7bb7..21d8907ca 100644 --- a/include/json/assertions.h +++ b/include/json/assertions.h @@ -7,6 +7,7 @@ #define CPPTL_JSON_ASSERTIONS_H_INCLUDED #include +#include #if !defined(JSON_IS_AMALGAMATION) #include "config.h" @@ -16,26 +17,26 @@ #include #define JSON_ASSERT(condition) \ assert(condition); // @todo <= change this into an exception throw -#define JSON_FAIL_MESSAGE(message) throw std::runtime_error(message); +#define JSON_FAIL_MESSAGE(message) do{std::ostringstream oss; oss << message; throw std::runtime_error(oss.str());}while(0) +//#define JSON_FAIL_MESSAGE(message) throw std::runtime_error(message) #else // JSON_USE_EXCEPTION #define JSON_ASSERT(condition) assert(condition); // The call to assert() will show the failure message in debug builds. In -// release bugs we write to invalid memory in order to crash hard, so that a -// debugger or crash reporter gets the chance to take over. We still call exit() -// afterward in order to tell the compiler that this macro doesn't return. +// release bugs we abort, for a core-dump or debugger. #define JSON_FAIL_MESSAGE(message) \ { \ - assert(false&& message); \ - strcpy(reinterpret_cast(666), message); \ - exit(123); \ + std::ostringstream oss; oss << message; \ + assert(false && oss.str().c_str()); \ + abort(); \ } + #endif #define JSON_ASSERT_MESSAGE(condition, message) \ if (!(condition)) { \ - JSON_FAIL_MESSAGE(message) \ + JSON_FAIL_MESSAGE(message); \ } #endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED From 9de2c2d84d3655c76db876cd38c1faf23126422d Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 16:15:40 -0600 Subject: [PATCH 033/885] partial --- include/json/value.h | 9 +++++++++ src/lib_json/json_value.cpp | 8 ++++++++ 2 files changed, 17 insertions(+) diff --git a/include/json/value.h b/include/json/value.h index 78e75467d..18355d054 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -395,6 +395,15 @@ Json::Value obj_value(Json::objectValue); // {} Value removeMember(const char* key); /// Same as removeMember(const char*) Value removeMember(const std::string& key); + /** \brief Remove the indexed array element. + + O(n) expensive operations. + Update 'removed' iff removed. + (This is a better pattern than removeMember().) + JSON_FAIL if !isValidIndex(i) or if not arrayObject + \return true iff removed + */ + bool removeIndex(ArrayIndex i, Value* removed); /// Return true if the object has a member named key. bool isMember(const char* key) const; diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 0a7fb858e..aa9bfb4a3 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -1018,6 +1018,14 @@ Value Value::removeMember(const std::string& key) { return removeMember(key.c_str()); } +bool Value::removeIndex(ArrayIndex i, Value* removed) { + JSON_ASSERT_MESSAGE(this->type_ == arrayValue, + "in Json::Value::removeIndex(): requires arrayValue"); + JSON_ASSERT_MESSAGE(this->isValidIndex(i), + "invalid index i=" << i << " for array of size " << this->size()); + return true; +} + #ifdef JSON_USE_CPPTL Value Value::get(const CppTL::ConstString& key, const Value& defaultValue) const { From e87e41cdb0b26aceecdda451755b1237a4307979 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 16:24:11 -0600 Subject: [PATCH 034/885] from Itzik S; see issue #28 with minor corrections --- include/json/value.h | 3 +-- src/lib_json/json_value.cpp | 30 +++++++++++++++++++++++++----- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index 18355d054..c4169b440 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -400,8 +400,7 @@ Json::Value obj_value(Json::objectValue); // {} O(n) expensive operations. Update 'removed' iff removed. (This is a better pattern than removeMember().) - JSON_FAIL if !isValidIndex(i) or if not arrayObject - \return true iff removed + \return true iff removed (no exceptions) */ bool removeIndex(ArrayIndex i, Value* removed); diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index aa9bfb4a3..f9e733fa5 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -1018,12 +1018,32 @@ Value Value::removeMember(const std::string& key) { return removeMember(key.c_str()); } -bool Value::removeIndex(ArrayIndex i, Value* removed) { - JSON_ASSERT_MESSAGE(this->type_ == arrayValue, - "in Json::Value::removeIndex(): requires arrayValue"); - JSON_ASSERT_MESSAGE(this->isValidIndex(i), - "invalid index i=" << i << " for array of size " << this->size()); +bool Value::removeIndex(ArrayIndex index, Value* removed) { + if (this->type_ != arrayValue) { + return false; + } +#ifdef JSON_VALUE_USE_INTERNAL_MAP + JSON_FAIL_MESSAGE("removeIndex is not implemented for ValueInternalArray."); + return false; +#else + CZString key(index); + ObjectValues::iterator it = this->value_.map_->find(key); + if (it == this->value_.map_->end()) { + return false; + } + *removed = it->second; + ArrayIndex oldSize = this->size(); + // shift left all items left, into the place of the "removed" + for (ArrayIndex i=index; ivalue_.map_)[key] = (*this)[i+1]; + } + // erase the last one ("leftover") + CZString keyLast(oldSize-1); + ObjectValues::iterator itLast = this->value_.map_->find(keyLast); + this->value_.map_->erase(itLast); return true; +#endif } #ifdef JSON_USE_CPPTL From e893625e88f02539b9cd03e591df3ee8970552c3 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 16:54:27 -0600 Subject: [PATCH 035/885] test removeIndex/Member() --- src/test_lib_json/main.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/test_lib_json/main.cpp b/src/test_lib_json/main.cpp index 08ef66abe..98fb8d00a 100644 --- a/src/test_lib_json/main.cpp +++ b/src/test_lib_json/main.cpp @@ -198,6 +198,14 @@ JSONTEST_FIXTURE(ValueTest, objects) { object1_["some other id"] = "foo"; JSONTEST_ASSERT_EQUAL(Json::Value("foo"), object1_["some other id"]); + JSONTEST_ASSERT_EQUAL(Json::Value("foo"), object1_["some other id"]); + + // Remove. + Json::Value got; + got = object1_.removeMember("some other id"); + JSONTEST_ASSERT_EQUAL(Json::Value("foo"), got); + got = object1_.removeMember("some other id"); + JSONTEST_ASSERT_EQUAL(Json::nullValue, got); } JSONTEST_FIXTURE(ValueTest, arrays) { @@ -240,6 +248,10 @@ JSONTEST_FIXTURE(ValueTest, arrays) { array1_[2] = Json::Value(17); JSONTEST_ASSERT_EQUAL(Json::Value(), array1_[1]); JSONTEST_ASSERT_EQUAL(Json::Value(17), array1_[2]); + Json::Value got; + JSONTEST_ASSERT_EQUAL(true, array1_.removeIndex(2, &got)); + JSONTEST_ASSERT_EQUAL(Json::Value(17), got); + JSONTEST_ASSERT_EQUAL(false, array1_.removeIndex(2, &got)); // gone now } JSONTEST_FIXTURE(ValueTest, null) { From 05c1b8344d8149f1b9f5b6ec7ca59af952c18da2 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Wed, 21 Jan 2015 15:43:48 -0600 Subject: [PATCH 036/885] drop this-> (team preference) --- src/lib_json/json_value.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index f9e733fa5..da5e35aa4 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -1019,7 +1019,7 @@ Value Value::removeMember(const std::string& key) { } bool Value::removeIndex(ArrayIndex index, Value* removed) { - if (this->type_ != arrayValue) { + if (type_ != arrayValue) { return false; } #ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -1027,21 +1027,21 @@ bool Value::removeIndex(ArrayIndex index, Value* removed) { return false; #else CZString key(index); - ObjectValues::iterator it = this->value_.map_->find(key); - if (it == this->value_.map_->end()) { + ObjectValues::iterator it = value_.map_->find(key); + if (it == value_.map_->end()) { return false; } *removed = it->second; - ArrayIndex oldSize = this->size(); + ArrayIndex oldSize = size(); // shift left all items left, into the place of the "removed" for (ArrayIndex i=index; ivalue_.map_)[key] = (*this)[i+1]; + (*value_.map_)[key] = (*this)[i+1]; } // erase the last one ("leftover") CZString keyLast(oldSize-1); - ObjectValues::iterator itLast = this->value_.map_->find(keyLast); - this->value_.map_->erase(itLast); + ObjectValues::iterator itLast = value_.map_->find(keyLast); + value_.map_->erase(itLast); return true; #endif } From 59167d86271437ddd24e017c2d2d66893568a057 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Wed, 21 Jan 2015 16:04:46 -0600 Subject: [PATCH 037/885] more changes per cr --- src/lib_json/json_value.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index da5e35aa4..7cc65bd76 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -1034,12 +1034,12 @@ bool Value::removeIndex(ArrayIndex index, Value* removed) { *removed = it->second; ArrayIndex oldSize = size(); // shift left all items left, into the place of the "removed" - for (ArrayIndex i=index; ifind(keyLast); value_.map_->erase(itLast); return true; From 00b8ce81dbcf16a8f37779b1dbf14673693a1312 Mon Sep 17 00:00:00 2001 From: Connor Manning Date: Thu, 22 Jan 2015 10:48:45 -0600 Subject: [PATCH 038/885] Build without warnings with -pedantic enabled. --- src/lib_json/json_value.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 7cc65bd76..9ca09201f 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -340,7 +340,7 @@ Value::Value(const Value& other) case stringValue: if (other.value_.string_) { value_.string_ = duplicateStringValue(other.value_.string_); - allocated_ = true; + allocated_ |= true; } else { value_.string_ = 0; allocated_ = false; From 26842530f2fab8cda2ff1630c38438e561b180e2 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 22 Jan 2015 15:24:26 -0600 Subject: [PATCH 039/885] upgrade -std=c++ version Travis CI does not yet support gcc-4.8, needed for c++11, so we will try c++0x for now. --- CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f5c40d52f..a8558a302 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -85,10 +85,10 @@ endif( MSVC ) if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") # using regular Clang or AppleClang - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=c++11") elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") # using GCC - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=c++0x") endif() IF(JSONCPP_WITH_WARNING_AS_ERROR) From 76746b09fcd44ffee7f0f9114c73771b667061b6 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Wed, 21 Jan 2015 16:01:30 -0600 Subject: [PATCH 040/885] deprecate old removeMember() --- include/json/value.h | 9 ++++++++- src/lib_json/json_value.cpp | 33 ++++++++++++++++++++++----------- src/test_lib_json/main.cpp | 10 +++++++--- 3 files changed, 37 insertions(+), 15 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index c4169b440..b7ac2ad21 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -392,14 +392,21 @@ Json::Value obj_value(Json::objectValue); // {} /// \return the removed Value, or null. /// \pre type() is objectValue or nullValue /// \post type() is unchanged + /// \deprecated Value removeMember(const char* key); /// Same as removeMember(const char*) + /// \deprecated Value removeMember(const std::string& key); + /** \brief Remove the named map member. + + Update 'removed' iff removed. + \return true iff removed (no exceptions) + */ + bool removeMember(const char* key, Value* removed); /** \brief Remove the indexed array element. O(n) expensive operations. Update 'removed' iff removed. - (This is a better pattern than removeMember().) \return true iff removed (no exceptions) */ bool removeIndex(ArrayIndex i, Value* removed); diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 9ca09201f..d64605588 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -989,31 +989,42 @@ Value Value::get(const std::string& key, const Value& defaultValue) const { return get(key.c_str(), defaultValue); } -Value Value::removeMember(const char* key) { - JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue, - "in Json::Value::removeMember(): requires objectValue"); - if (type_ == nullValue) - return null; + +bool Value::removeMember(const char* key, Value* removed) { + if (type_ != objectValue) { + return false; + } #ifndef JSON_VALUE_USE_INTERNAL_MAP CZString actualKey(key, CZString::noDuplication); ObjectValues::iterator it = value_.map_->find(actualKey); if (it == value_.map_->end()) - return null; - Value old(it->second); + return false; + *removed = it->second; value_.map_->erase(it); - return old; + return true; #else Value* value = value_.map_->find(key); if (value) { - Value old(*value); + *removed = *value; value_.map_.remove(key); - return old; + return true; } else { - return null; + return false; } #endif } +Value Value::removeMember(const char* key) { + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue, + "in Json::Value::removeMember(): requires objectValue"); + if (type_ == nullValue) + return null; + + Value removed; // null + removeMember(key, &removed); + return removed; // still null if removeMember() did nothing +} + Value Value::removeMember(const std::string& key) { return removeMember(key.c_str()); } diff --git a/src/test_lib_json/main.cpp b/src/test_lib_json/main.cpp index 98fb8d00a..8af3e19ef 100644 --- a/src/test_lib_json/main.cpp +++ b/src/test_lib_json/main.cpp @@ -202,10 +202,14 @@ JSONTEST_FIXTURE(ValueTest, objects) { // Remove. Json::Value got; - got = object1_.removeMember("some other id"); + bool did; + did = object1_.removeMember("some other id", &got); JSONTEST_ASSERT_EQUAL(Json::Value("foo"), got); - got = object1_.removeMember("some other id"); - JSONTEST_ASSERT_EQUAL(Json::nullValue, got); + JSONTEST_ASSERT_EQUAL(true, did); + got = Json::Value("bar"); + did = object1_.removeMember("some other id", &got); + JSONTEST_ASSERT_EQUAL(Json::Value("bar"), got); + JSONTEST_ASSERT_EQUAL(false, did); } JSONTEST_FIXTURE(ValueTest, arrays) { From 9132aa94b1230636477a2088dd375f20ef076d53 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 22 Jan 2015 13:45:14 -0600 Subject: [PATCH 041/885] 1.3.0 http://apr.apache.org/versioning.html#binary --- include/json/version.h | 6 +++--- version | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/include/json/version.h b/include/json/version.h index a46c7040b..37e082b6c 100644 --- a/include/json/version.h +++ b/include/json/version.h @@ -4,10 +4,10 @@ #ifndef JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED -# define JSONCPP_VERSION_STRING "1.2.1" +# define JSONCPP_VERSION_STRING "1.3.0" # define JSONCPP_VERSION_MAJOR 1 -# define JSONCPP_VERSION_MINOR 2 -# define JSONCPP_VERSION_PATCH 1 +# define JSONCPP_VERSION_MINOR 3 +# define JSONCPP_VERSION_PATCH 0 # define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) diff --git a/version b/version index 998dccf7d..589268e6f 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.2.1 \ No newline at end of file +1.3.0 \ No newline at end of file From 2160c9a042069385b81308b6e264da06e60ce9e2 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 09:02:44 -0600 Subject: [PATCH 042/885] switch from StyledWriter to StyledStream writer in tests --- src/jsontestrunner/main.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index ba985877d..46eb872bd 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -8,6 +8,7 @@ #include #include // sort +#include #include #if defined(_MSC_VER) && _MSC_VER >= 1310 @@ -158,8 +159,10 @@ static int rewriteValueTree(const std::string& rewritePath, std::string& rewrite) { // Json::FastWriter writer; // writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write(root); + Json::StyledStreamWriter writer; + std::ostringstream sout; + writer.write(sout, root); + rewrite = sout.str(); FILE* fout = fopen(rewritePath.c_str(), "wt"); if (!fout) { printf("Failed to create rewrite file: %s\n", rewritePath.c_str()); From 942e2c999ad021b8cbd5737a1b70c85871cabbe8 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:03:55 -0600 Subject: [PATCH 043/885] unindent test-code --- src/jsontestrunner/main.cpp | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 46eb872bd..989160035 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -257,19 +257,21 @@ int main(int argc, const char* argv[]) { Json::Value root; exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly); - if (exitCode == 0 && !parseOnly) { - std::string rewrite; - exitCode = rewriteValueTree(rewritePath, root, rewrite); - if (exitCode == 0) { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree(rewrite, - rewriteActualPath, - "rewrite", - rewriteRoot, - features, - parseOnly); - } + if (exitCode != 0 || parseOnly) { + return exitCode; } + std::string rewrite; + exitCode = rewriteValueTree(rewritePath, root, rewrite); + if (exitCode =! 0) { + return exitCode; + } + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree(rewrite, + rewriteActualPath, + "rewrite", + rewriteRoot, + features, + parseOnly); } catch (const std::exception& e) { printf("Unhandled exception:\n%s\n", e.what()); From 05810a760761a3e341395d8d0a7078db5b142725 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:06:27 -0600 Subject: [PATCH 044/885] cleaner --- src/jsontestrunner/main.cpp | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 989160035..0b2f8e24a 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -257,21 +257,20 @@ int main(int argc, const char* argv[]) { Json::Value root; exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly); - if (exitCode != 0 || parseOnly) { + if (exitCode || parseOnly) { return exitCode; } std::string rewrite; exitCode = rewriteValueTree(rewritePath, root, rewrite); - if (exitCode =! 0) { + if (exitCode) { return exitCode; } Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree(rewrite, - rewriteActualPath, - "rewrite", - rewriteRoot, - features, - parseOnly); + exitCode = parseAndSaveValueTree( + rewrite, rewriteActualPath, "rewrite", rewriteRoot, features, parseOnly); + if (exitCode) { + return exitCode; + } } catch (const std::exception& e) { printf("Unhandled exception:\n%s\n", e.what()); From 632c9b5032e84167e20e6d36728ca7c8bf6dddc2 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:09:04 -0600 Subject: [PATCH 045/885] cleaner --- src/jsontestrunner/main.cpp | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 0b2f8e24a..3ded3bd93 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -130,11 +130,11 @@ printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") { static int parseAndSaveValueTree(const std::string& input, const std::string& actual, const std::string& kind, - Json::Value& root, const Json::Features& features, - bool parseOnly) { + bool parseOnly, + Json::Value* root) { Json::Reader reader(features); - bool parsingSuccessful = reader.parse(input, root); + bool parsingSuccessful = reader.parse(input, *root); if (!parsingSuccessful) { printf("Failed to parse %s file: \n%s\n", kind.c_str(), @@ -148,7 +148,7 @@ static int parseAndSaveValueTree(const std::string& input, printf("Failed to create %s actual file.\n", kind.c_str()); return 2; } - printValueTree(factual, root); + printValueTree(factual, *root); fclose(factual); } return 0; @@ -156,19 +156,19 @@ static int parseAndSaveValueTree(const std::string& input, static int rewriteValueTree(const std::string& rewritePath, const Json::Value& root, - std::string& rewrite) { + std::string* rewrite) { // Json::FastWriter writer; // writer.enableYAMLCompatibility(); Json::StyledStreamWriter writer; std::ostringstream sout; writer.write(sout, root); - rewrite = sout.str(); + *rewrite = sout.str(); FILE* fout = fopen(rewritePath.c_str(), "wt"); if (!fout) { printf("Failed to create rewrite file: %s\n", rewritePath.c_str()); return 2; } - fprintf(fout, "%s\n", rewrite.c_str()); + fprintf(fout, "%s\n", rewrite->c_str()); fclose(fout); return 0; } @@ -250,24 +250,24 @@ int main(int argc, const char* argv[]) { return 3; } - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; + std::string const actualPath = basePath + ".actual"; + std::string const rewritePath = basePath + ".rewrite"; + std::string const rewriteActualPath = basePath + ".actual-rewrite"; Json::Value root; exitCode = parseAndSaveValueTree( - input, actualPath, "input", root, features, parseOnly); + input, actualPath, "input", features, parseOnly, &root); if (exitCode || parseOnly) { return exitCode; } std::string rewrite; - exitCode = rewriteValueTree(rewritePath, root, rewrite); + exitCode = rewriteValueTree(rewritePath, root, &rewrite); if (exitCode) { return exitCode; } Json::Value rewriteRoot; exitCode = parseAndSaveValueTree( - rewrite, rewriteActualPath, "rewrite", rewriteRoot, features, parseOnly); + rewrite, rewriteActualPath, "rewrite", features, parseOnly, &rewriteRoot); if (exitCode) { return exitCode; } From 79211e1aebdcbb8c4734a24fc40a06cb5c5ffad6 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:27:19 -0600 Subject: [PATCH 046/885] Options class for test --- src/jsontestrunner/main.cpp | 84 ++++++++++++++++++++++--------------- 1 file changed, 51 insertions(+), 33 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 3ded3bd93..5db8115d2 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -132,7 +132,8 @@ static int parseAndSaveValueTree(const std::string& input, const std::string& kind, const Json::Features& features, bool parseOnly, - Json::Value* root) { + Json::Value* root) +{ Json::Reader reader(features); bool parsingSuccessful = reader.parse(input, *root); if (!parsingSuccessful) { @@ -153,16 +154,31 @@ static int parseAndSaveValueTree(const std::string& input, } return 0; } - -static int rewriteValueTree(const std::string& rewritePath, - const Json::Value& root, - std::string* rewrite) { - // Json::FastWriter writer; - // writer.enableYAMLCompatibility(); +// static std::string useFastWriter(Json::Value const& root) { +// Json::FastWriter writer; +// writer.enableYAMLCompatibility(); +// return writer.write(root); +// } +static std::string useStyledWriter( + Json::Value const& root) +{ + Json::StyledWriter writer; + return writer.write(root); +} +static std::string useStyledStreamWriter( + Json::Value const& root) +{ Json::StyledStreamWriter writer; std::ostringstream sout; writer.write(sout, root); - *rewrite = sout.str(); + return sout.str(); +} +static int rewriteValueTree( + const std::string& rewritePath, + const Json::Value& root, + std::string* rewrite) +{ + *rewrite = useStyledWriter(root); FILE* fout = fopen(rewritePath.c_str(), "wt"); if (!fout) { printf("Failed to create rewrite file: %s\n", rewritePath.c_str()); @@ -197,56 +213,56 @@ static int printUsage(const char* argv[]) { return 3; } -int parseCommandLine(int argc, - const char* argv[], - Json::Features& features, - std::string& path, - bool& parseOnly) { - parseOnly = false; +struct Options +{ + std::string path; + Json::Features features; + bool parseOnly; +}; +static int parseCommandLine( + int argc, const char* argv[], Options* opts) +{ + opts->parseOnly = false; if (argc < 2) { return printUsage(argv); } - int index = 1; if (std::string(argv[1]) == "--json-checker") { - features = Json::Features::strictMode(); - parseOnly = true; + opts->features = Json::Features::strictMode(); + opts->parseOnly = true; ++index; } - if (std::string(argv[1]) == "--json-config") { printConfig(); return 3; } - if (index == argc || index + 1 < argc) { return printUsage(argv); } - - path = argv[index]; + opts->path = argv[index]; return 0; } - +static void tryTest(Options const& opts) +{ +} int main(int argc, const char* argv[]) { - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine(argc, argv, features, path, parseOnly); + Options opts; + int exitCode = parseCommandLine(argc, argv, &opts); if (exitCode != 0) { return exitCode; } try { - std::string input = readInputTestFile(path.c_str()); + std::string input = readInputTestFile(opts.path.c_str()); if (input.empty()) { - printf("Failed to read input or empty input: %s\n", path.c_str()); + printf("Failed to read input or empty input: %s\n", opts.path.c_str()); return 3; } std::string basePath = removeSuffix(argv[1], ".json"); - if (!parseOnly && basePath.empty()) { + if (!opts.parseOnly && basePath.empty()) { printf("Bad input path. Path does not end with '.expected':\n%s\n", - path.c_str()); + opts.path.c_str()); return 3; } @@ -256,8 +272,9 @@ int main(int argc, const char* argv[]) { Json::Value root; exitCode = parseAndSaveValueTree( - input, actualPath, "input", features, parseOnly, &root); - if (exitCode || parseOnly) { + input, actualPath, "input", + opts.features, opts.parseOnly, &root); + if (exitCode || opts.parseOnly) { return exitCode; } std::string rewrite; @@ -267,7 +284,8 @@ int main(int argc, const char* argv[]) { } Json::Value rewriteRoot; exitCode = parseAndSaveValueTree( - rewrite, rewriteActualPath, "rewrite", features, parseOnly, &rewriteRoot); + rewrite, rewriteActualPath, "rewrite", + opts.features, opts.parseOnly, &rewriteRoot); if (exitCode) { return exitCode; } From 08cfd02d8cab9a275419d1441db291fc203ea366 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:33:47 -0600 Subject: [PATCH 047/885] fix minor bugs in test-runner --- src/jsontestrunner/main.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 5db8115d2..a44a08188 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -232,7 +232,7 @@ static int parseCommandLine( opts->parseOnly = true; ++index; } - if (std::string(argv[1]) == "--json-config") { + if (std::string(argv[index]) == "--json-config") { printConfig(); return 3; } @@ -259,7 +259,7 @@ int main(int argc, const char* argv[]) { return 3; } - std::string basePath = removeSuffix(argv[1], ".json"); + std::string basePath = removeSuffix(opts.path, ".json"); if (!opts.parseOnly && basePath.empty()) { printf("Bad input path. Path does not end with '.expected':\n%s\n", opts.path.c_str()); From 58c31ac55056b3ef3394b6d0b8f37ae18b3e8455 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:36:55 -0600 Subject: [PATCH 048/885] mv try-block --- src/jsontestrunner/main.cpp | 84 +++++++++++++++++++------------------ 1 file changed, 43 insertions(+), 41 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index a44a08188..deb06875c 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -242,58 +242,60 @@ static int parseCommandLine( opts->path = argv[index]; return 0; } -static void tryTest(Options const& opts) +static int runTest(Options const& opts) { + int exitCode = 0; + + std::string input = readInputTestFile(opts.path.c_str()); + if (input.empty()) { + printf("Failed to read input or empty input: %s\n", opts.path.c_str()); + return 3; + } + + std::string basePath = removeSuffix(opts.path, ".json"); + if (!opts.parseOnly && basePath.empty()) { + printf("Bad input path. Path does not end with '.expected':\n%s\n", + opts.path.c_str()); + return 3; + } + + std::string const actualPath = basePath + ".actual"; + std::string const rewritePath = basePath + ".rewrite"; + std::string const rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( + input, actualPath, "input", + opts.features, opts.parseOnly, &root); + if (exitCode || opts.parseOnly) { + return exitCode; + } + std::string rewrite; + exitCode = rewriteValueTree(rewritePath, root, &rewrite); + if (exitCode) { + return exitCode; + } + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( + rewrite, rewriteActualPath, "rewrite", + opts.features, opts.parseOnly, &rewriteRoot); + if (exitCode) { + return exitCode; + } + return 0; } int main(int argc, const char* argv[]) { Options opts; int exitCode = parseCommandLine(argc, argv, &opts); if (exitCode != 0) { + printf("Failed to parse command-line."); return exitCode; } - try { - std::string input = readInputTestFile(opts.path.c_str()); - if (input.empty()) { - printf("Failed to read input or empty input: %s\n", opts.path.c_str()); - return 3; - } - - std::string basePath = removeSuffix(opts.path, ".json"); - if (!opts.parseOnly && basePath.empty()) { - printf("Bad input path. Path does not end with '.expected':\n%s\n", - opts.path.c_str()); - return 3; - } - - std::string const actualPath = basePath + ".actual"; - std::string const rewritePath = basePath + ".rewrite"; - std::string const rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( - input, actualPath, "input", - opts.features, opts.parseOnly, &root); - if (exitCode || opts.parseOnly) { - return exitCode; - } - std::string rewrite; - exitCode = rewriteValueTree(rewritePath, root, &rewrite); - if (exitCode) { - return exitCode; - } - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( - rewrite, rewriteActualPath, "rewrite", - opts.features, opts.parseOnly, &rewriteRoot); - if (exitCode) { - return exitCode; - } + return runTest(opts); } catch (const std::exception& e) { printf("Unhandled exception:\n%s\n", e.what()); - exitCode = 1; + return 1; } - - return exitCode; } From 3682f60927cf5029eb6299b63ef45082d946a70e Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:46:05 -0600 Subject: [PATCH 049/885] --json-writer arg --- src/jsontestrunner/main.cpp | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index deb06875c..f6f12b8a7 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -15,6 +15,15 @@ #pragma warning(disable : 4996) // disable fopen deprecation warning #endif +struct Options +{ + std::string path; + Json::Features features; + bool parseOnly; + typedef std::string (*writeFuncType)(Json::Value const&); + writeFuncType write; +}; + static std::string normalizeFloatingPointStr(double value) { char buffer[32]; #if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) @@ -176,9 +185,10 @@ static std::string useStyledStreamWriter( static int rewriteValueTree( const std::string& rewritePath, const Json::Value& root, + Options::writeFuncType write, std::string* rewrite) { - *rewrite = useStyledWriter(root); + *rewrite = write(root); FILE* fout = fopen(rewritePath.c_str(), "wt"); if (!fout) { printf("Failed to create rewrite file: %s\n", rewritePath.c_str()); @@ -213,21 +223,16 @@ static int printUsage(const char* argv[]) { return 3; } -struct Options -{ - std::string path; - Json::Features features; - bool parseOnly; -}; static int parseCommandLine( int argc, const char* argv[], Options* opts) { opts->parseOnly = false; + opts->write = &useStyledWriter; if (argc < 2) { return printUsage(argv); } int index = 1; - if (std::string(argv[1]) == "--json-checker") { + if (std::string(argv[index]) == "--json-checker") { opts->features = Json::Features::strictMode(); opts->parseOnly = true; ++index; @@ -236,6 +241,18 @@ static int parseCommandLine( printConfig(); return 3; } + if (std::string(argv[index]) == "--json-writer") { + ++index; + std::string const writerName(argv[index++]); + if (writerName == "StyledWriter") { + opts->write = &useStyledWriter; + } else if (writerName == "StyledStreamWriter") { + opts->write = &useStyledStreamWriter; + } else { + printf("Unknown '--json-writer %s'\n", writerName.c_str()); + return 4; + } + } if (index == argc || index + 1 < argc) { return printUsage(argv); } @@ -271,7 +288,7 @@ static int runTest(Options const& opts) return exitCode; } std::string rewrite; - exitCode = rewriteValueTree(rewritePath, root, &rewrite); + exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite); if (exitCode) { return exitCode; } From 26c52861b9e5d73eb20c9db2e0952c0e7cdd57b7 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 11:53:16 -0600 Subject: [PATCH 050/885] pass --json-writer StyledWriter --- test/runjsontests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/runjsontests.py b/test/runjsontests.py index 724306334..ffe033f48 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -72,6 +72,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, is_json_checker_test = (input_path in test_jsonchecker) or expect_failure print('TESTING:', input_path, end=' ') options = is_json_checker_test and '--json-checker' or '' + options += ' --json-writer StyledWriter' cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, input_path) From ac6bbbc7392293fec3721b1cec1df2f846b9aa11 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Tue, 20 Jan 2015 11:36:05 -0600 Subject: [PATCH 051/885] show cmd in runjsontests.py --- test/runjsontests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/runjsontests.py b/test/runjsontests.py index ffe033f48..5230965c7 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -14,6 +14,7 @@ def getStatusOutput(cmd): Return int, unicode (for both Python 2 and 3). Note: os.popen().close() would return None for 0. """ + print(cmd, file=sys.stderr) pipe = os.popen(cmd) process_output = pipe.read() try: From 70704b9a707b83d66660425783e81f704bc2ce0c Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 12:04:14 -0600 Subject: [PATCH 052/885] test both StyledWriter and StyledStreamWriter --- test/runjsontests.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index 5230965c7..19903b96b 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -58,7 +58,8 @@ def safeReadFile( path ): return '' % (path,e) def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): + use_valgrind=False, with_json_checker=False, + writerClass='StyledWriter'): if not input_dir: input_dir = os.path.join( os.getcwd(), 'data' ) tests = glob( os.path.join( input_dir, '*.json' ) ) @@ -73,7 +74,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, is_json_checker_test = (input_path in test_jsonchecker) or expect_failure print('TESTING:', input_path, end=' ') options = is_json_checker_test and '--json-checker' or '' - options += ' --json-writer StyledWriter' + options += ' --json-writer %s'%writerClass cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, input_path) @@ -147,7 +148,15 @@ def main(): else: input_path = None status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + use_valgrind=options.valgrind, + with_json_checker=options.with_json_checker, + writerClass='StyledWriter') + if status: + sys.exit( status ) + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, + with_json_checker=options.with_json_checker, + writerClass='StyledStreamWriter') sys.exit( status ) if __name__ == '__main__': From 3efc587fbabd4a4e6114a82a0acd80b844e64a0e Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 09:03:00 -0600 Subject: [PATCH 053/885] make StyledStreamWriter work more like StyledWriter tests pass --- src/lib_json/json_writer.cpp | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 778661c30..099cc29d5 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -628,7 +628,20 @@ void StyledStreamWriter::unindent() { void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { if (!root.hasComment(commentBefore)) return; - *document_ << root.getComment(commentBefore); + + *document_ << "\n"; + writeIndent(); + const std::string& comment = root.getComment(commentBefore); + std::string::const_iterator iter = comment.begin(); + while (iter != comment.end()) { + *document_ << *iter; + if (*iter == '\n' && + (iter != comment.end() && *(iter + 1) == '/')) + writeIndent(); + ++iter; + } + + // Comments are stripped of trailing newlines, so add one here *document_ << "\n"; } From d383056fbbb97dc5bd6d8c559c25e8b75a34ecf1 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 13:09:43 -0600 Subject: [PATCH 054/885] avoid extra newlines in StyledStreamWriter Add indented_ as a bitfield. (Verified that sizeof(StyledStreamWriter) remains 96 for binary compatibility. But the new symbol requires a minor version-bump.) --- include/json/writer.h | 3 ++- src/lib_json/json_writer.cpp | 25 +++++++++++-------------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index dc9e46f4b..cacb10e29 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -187,7 +187,8 @@ class JSON_API StyledStreamWriter { std::string indentString_; int rightMargin_; std::string indentation_; - bool addChildValues_; + bool addChildValues_ : 1; + bool indented_ : 1; }; #if defined(JSON_HAS_INT64) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 099cc29d5..467ab82da 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -463,6 +463,7 @@ void StyledStreamWriter::write(std::ostream& out, const Value& root) { document_ = &out; addChildValues_ = false; indentString_ = ""; + indented_ = false; writeCommentBeforeValue(root); writeValue(root); writeCommentAfterValueOnSameLine(root); @@ -539,8 +540,10 @@ void StyledStreamWriter::writeArrayValue(const Value& value) { if (hasChildValue) writeWithIndent(childValues_[index]); else { - writeIndent(); + if (!indented_) writeIndent(); + indented_ = true; writeValue(childValue); + indented_ = false; } if (++index == size) { writeCommentAfterValueOnSameLine(childValue); @@ -598,24 +601,17 @@ void StyledStreamWriter::pushValue(const std::string& value) { } void StyledStreamWriter::writeIndent() { - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ + // blep intended this to look at the so-far-written string + // to determine whether we are already indented, but + // with a stream we cannot do that. So we rely on some saved state. + // The caller checks indented_. *document_ << '\n' << indentString_; } void StyledStreamWriter::writeWithIndent(const std::string& value) { - writeIndent(); + if (!indented_) writeIndent(); *document_ << value; + indented_ = false; } void StyledStreamWriter::indent() { indentString_ += indentation_; } @@ -643,6 +639,7 @@ void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { // Comments are stripped of trailing newlines, so add one here *document_ << "\n"; + indented_ = false; } void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { From f8ca6cbb25806af6af937623096597214ae87839 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 14:22:41 -0600 Subject: [PATCH 055/885] 1.4.0 <- 1.3.0 Minor version bump, but we will wait for a few more commits this time before tagging the release. --- include/json/version.h | 4 ++-- version | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/include/json/version.h b/include/json/version.h index 37e082b6c..9b338b818 100644 --- a/include/json/version.h +++ b/include/json/version.h @@ -4,9 +4,9 @@ #ifndef JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED -# define JSONCPP_VERSION_STRING "1.3.0" +# define JSONCPP_VERSION_STRING "1.4.0" # define JSONCPP_VERSION_MAJOR 1 -# define JSONCPP_VERSION_MINOR 3 +# define JSONCPP_VERSION_MINOR 4 # define JSONCPP_VERSION_PATCH 0 # define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) diff --git a/version b/version index 589268e6f..4c0cef3b6 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.3.0 \ No newline at end of file +1.4.0 \ No newline at end of file From 8d15e51228f736a2a969dbc802e5e485518c9f7f Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 12:03:41 -0600 Subject: [PATCH 056/885] add test_comment_00 one-element array with comment, for issue #103 --- test/data/test_comment_00.expected | 4 ++++ test/data/test_comment_00.json | 5 +++++ 2 files changed, 9 insertions(+) create mode 100644 test/data/test_comment_00.expected create mode 100644 test/data/test_comment_00.json diff --git a/test/data/test_comment_00.expected b/test/data/test_comment_00.expected new file mode 100644 index 000000000..284a797d7 --- /dev/null +++ b/test/data/test_comment_00.expected @@ -0,0 +1,4 @@ +// Comment for array +.=[] +// Comment within array +.[0]="one-element" diff --git a/test/data/test_comment_00.json b/test/data/test_comment_00.json new file mode 100644 index 000000000..4df577a8a --- /dev/null +++ b/test/data/test_comment_00.json @@ -0,0 +1,5 @@ +// Comment for array +[ + // Comment within array + "one-element" +] From 216ecd30851d5cebc2a4f3786196d981a44d26e0 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 12:46:10 -0600 Subject: [PATCH 057/885] fix test_comment_00 for #103 --- src/lib_json/json_writer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 467ab82da..d8a304632 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -376,6 +376,9 @@ bool StyledWriter::isMultineArray(const Value& value) { addChildValues_ = true; int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' for (int index = 0; index < size; ++index) { + if (hasCommentForValue(value[index])) { + isMultiLine = true; + } writeValue(value[index]); lineLength += int(childValues_[index].length()); } @@ -584,6 +587,9 @@ bool StyledStreamWriter::isMultineArray(const Value& value) { addChildValues_ = true; int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' for (int index = 0; index < size; ++index) { + if (hasCommentForValue(value[index])) { + isMultiLine = true; + } writeValue(value[index]); lineLength += int(childValues_[index].length()); } From 2bc6137ada4195be3a737c3d9410eaa60ec13a10 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 13:42:37 -0600 Subject: [PATCH 058/885] fix gcc warnings --- CMakeLists.txt | 4 ++-- include/json/value.h | 4 ++-- src/lib_json/json_value.cpp | 2 +- src/test_lib_json/jsontest.cpp | 2 +- src/test_lib_json/jsontest.h | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a8558a302..96219e27e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -85,10 +85,10 @@ endif( MSVC ) if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") # using regular Clang or AppleClang - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=c++11") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall") elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") # using GCC - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=c++0x") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -Wpedantic") endif() IF(JSONCPP_WITH_WARNING_AS_ERROR) diff --git a/include/json/value.h b/include/json/value.h index b7ac2ad21..b2746889c 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -505,10 +505,10 @@ Json::Value obj_value(Json::objectValue); // {} #endif } value_; ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. + unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. #ifdef JSON_VALUE_USE_INTERNAL_MAP unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. + unsigned int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. #endif CommentInfo* comments_; diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index d64605588..1a1c76335 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -340,7 +340,7 @@ Value::Value(const Value& other) case stringValue: if (other.value_.string_) { value_.string_ = duplicateStringValue(other.value_.string_); - allocated_ |= true; + allocated_ = true; } else { value_.string_ = 0; allocated_ = false; diff --git a/src/test_lib_json/jsontest.cpp b/src/test_lib_json/jsontest.cpp index ef9c543fa..bd9463fa5 100644 --- a/src/test_lib_json/jsontest.cpp +++ b/src/test_lib_json/jsontest.cpp @@ -323,7 +323,7 @@ void Runner::listTests() const { } int Runner::runCommandLine(int argc, const char* argv[]) const { - typedef std::deque TestNames; + // typedef std::deque TestNames; Runner subrunner; for (int index = 1; index < argc; ++index) { std::string opt = argv[index]; diff --git a/src/test_lib_json/jsontest.h b/src/test_lib_json/jsontest.h index 5c56a40b0..cf1ef6bd2 100644 --- a/src/test_lib_json/jsontest.h +++ b/src/test_lib_json/jsontest.h @@ -214,7 +214,7 @@ TestResult& checkStringEqual(TestResult& result, #define JSONTEST_ASSERT_PRED(expr) \ { \ JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr \ + result_->predicateId_, __FILE__, __LINE__, #expr, nullptr, nullptr \ }; \ result_->predicateStackTail_->next_ = &_minitest_Context; \ result_->predicateId_ += 1; \ From 7d82b1472674c5ed3907d65789811ca04ac25d5b Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 14:34:54 -0600 Subject: [PATCH 059/885] fix issue #90 We are static-casting to U, so we really have no reason to use references. However, if this comes up again, try applying -ffloat-store to the target executable, per https://github.com/open-source-parsers/jsoncpp/issues/90 --- src/test_lib_json/CMakeLists.txt | 3 +++ src/test_lib_json/jsontest.h | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/test_lib_json/CMakeLists.txt b/src/test_lib_json/CMakeLists.txt index 420d65996..24dc56ff2 100644 --- a/src/test_lib_json/CMakeLists.txt +++ b/src/test_lib_json/CMakeLists.txt @@ -11,6 +11,9 @@ ADD_EXECUTABLE( jsoncpp_test TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib) +# another way to solve issue #90 +#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store) + # Run unit tests in post-build # (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?) IF(JSONCPP_WITH_POST_BUILD_UNITTEST) diff --git a/src/test_lib_json/jsontest.h b/src/test_lib_json/jsontest.h index cf1ef6bd2..127b7c221 100644 --- a/src/test_lib_json/jsontest.h +++ b/src/test_lib_json/jsontest.h @@ -178,8 +178,8 @@ class Runner { template TestResult& checkEqual(TestResult& result, - const T& expected, - const U& actual, + T expected, + U actual, const char* file, unsigned int line, const char* expr) { From 494950a63d11b0952b187deb38dff20f6f1c269b Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 15:29:52 -0600 Subject: [PATCH 060/885] rm extra whitespace in python, per PEP8 --- amalgamate.py | 172 ++++++++++++++++---------------- devtools/antglob.py | 106 ++++++++++---------- devtools/batchbuild.py | 173 ++++++++++++++++---------------- devtools/fixeol.py | 24 ++--- devtools/licenseupdater.py | 26 ++--- devtools/tarball.py | 24 ++--- doxybuild.py | 56 +++++------ makerelease.py | 196 ++++++++++++++++++------------------- scons-tools/globtool.py | 28 +++--- scons-tools/srcdist.py | 15 ++- scons-tools/substinfile.py | 2 +- scons-tools/targz.py | 16 +-- test/cleantests.py | 4 +- test/generate_expected.py | 4 +- test/pyjsontestrunner.py | 48 ++++----- test/runjsontests.py | 67 +++++++------ test/rununittests.py | 33 +++---- 17 files changed, 494 insertions(+), 500 deletions(-) diff --git a/amalgamate.py b/amalgamate.py index 550f6a676..22b825bf9 100644 --- a/amalgamate.py +++ b/amalgamate.py @@ -10,46 +10,46 @@ import sys class AmalgamationFile: - def __init__( self, top_dir ): + def __init__(self, top_dir): self.top_dir = top_dir self.blocks = [] - def add_text( self, text ): - if not text.endswith( "\n" ): + def add_text(self, text): + if not text.endswith("\n"): text += "\n" - self.blocks.append( text ) - - def add_file( self, relative_input_path, wrap_in_comment=False ): - def add_marker( prefix ): - self.add_text( "" ) - self.add_text( "// " + "/"*70 ) - self.add_text( "// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")) ) - self.add_text( "// " + "/"*70 ) - self.add_text( "" ) - add_marker( "Beginning" ) - f = open( os.path.join( self.top_dir, relative_input_path ), "rt" ) + self.blocks.append(text) + + def add_file(self, relative_input_path, wrap_in_comment=False): + def add_marker(prefix): + self.add_text("") + self.add_text("// " + "/"*70) + self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/"))) + self.add_text("// " + "/"*70) + self.add_text("") + add_marker("Beginning") + f = open(os.path.join(self.top_dir, relative_input_path), "rt") content = f.read() if wrap_in_comment: content = "/*\n" + content + "\n*/" - self.add_text( content ) + self.add_text(content) f.close() - add_marker( "End" ) - self.add_text( "\n\n\n\n" ) - - def get_value( self ): - return "".join( self.blocks ).replace("\r\n","\n") - - def write_to( self, output_path ): - output_dir = os.path.dirname( output_path ) - if output_dir and not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - f = open( output_path, "wb" ) - f.write( str.encode(self.get_value(), 'UTF-8') ) + add_marker("End") + self.add_text("\n\n\n\n") + + def get_value(self): + return "".join(self.blocks).replace("\r\n","\n") + + def write_to(self, output_path): + output_dir = os.path.dirname(output_path) + if output_dir and not os.path.isdir(output_dir): + os.makedirs(output_dir) + f = open(output_path, "wb") + f.write(str.encode(self.get_value(), 'UTF-8')) f.close() -def amalgamate_source( source_top_dir=None, +def amalgamate_source(source_top_dir=None, target_source_path=None, - header_include_path=None ): + header_include_path=None): """Produces amalgated source. Parameters: source_top_dir: top-directory @@ -57,69 +57,69 @@ def amalgamate_source( source_top_dir=None, header_include_path: generated header path relative to target_source_path. """ print("Amalgating header...") - header = AmalgamationFile( source_top_dir ) - header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." ) - header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) - header.add_file( "LICENSE", wrap_in_comment=True ) - header.add_text( "#ifndef JSON_AMALGATED_H_INCLUDED" ) - header.add_text( "# define JSON_AMALGATED_H_INCLUDED" ) - header.add_text( "/// If defined, indicates that the source file is amalgated" ) - header.add_text( "/// to prevent private header inclusion." ) - header.add_text( "#define JSON_IS_AMALGAMATION" ) - header.add_file( "include/json/version.h" ) - header.add_file( "include/json/config.h" ) - header.add_file( "include/json/forwards.h" ) - header.add_file( "include/json/features.h" ) - header.add_file( "include/json/value.h" ) - header.add_file( "include/json/reader.h" ) - header.add_file( "include/json/writer.h" ) - header.add_file( "include/json/assertions.h" ) - header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" ) - - target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + header = AmalgamationFile(source_top_dir) + header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).") + header.add_text("/// It is intented to be used with #include <%s>" % header_include_path) + header.add_file("LICENSE", wrap_in_comment=True) + header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED") + header.add_text("# define JSON_AMALGATED_H_INCLUDED") + header.add_text("/// If defined, indicates that the source file is amalgated") + header.add_text("/// to prevent private header inclusion.") + header.add_text("#define JSON_IS_AMALGAMATION") + header.add_file("include/json/version.h") + header.add_file("include/json/config.h") + header.add_file("include/json/forwards.h") + header.add_file("include/json/features.h") + header.add_file("include/json/value.h") + header.add_file("include/json/reader.h") + header.add_file("include/json/writer.h") + header.add_file("include/json/assertions.h") + header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED") + + target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path) print("Writing amalgated header to %r" % target_header_path) - header.write_to( target_header_path ) + header.write_to(target_header_path) - base, ext = os.path.splitext( header_include_path ) + base, ext = os.path.splitext(header_include_path) forward_header_include_path = base + "-forwards" + ext print("Amalgating forward header...") - header = AmalgamationFile( source_top_dir ) - header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." ) - header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path ) - header.add_text( "/// This header provides forward declaration for all JsonCpp types." ) - header.add_file( "LICENSE", wrap_in_comment=True ) - header.add_text( "#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" ) - header.add_text( "# define JSON_FORWARD_AMALGATED_H_INCLUDED" ) - header.add_text( "/// If defined, indicates that the source file is amalgated" ) - header.add_text( "/// to prevent private header inclusion." ) - header.add_text( "#define JSON_IS_AMALGAMATION" ) - header.add_file( "include/json/config.h" ) - header.add_file( "include/json/forwards.h" ) - header.add_text( "#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" ) - - target_forward_header_path = os.path.join( os.path.dirname(target_source_path), - forward_header_include_path ) + header = AmalgamationFile(source_top_dir) + header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).") + header.add_text("/// It is intented to be used with #include <%s>" % forward_header_include_path) + header.add_text("/// This header provides forward declaration for all JsonCpp types.") + header.add_file("LICENSE", wrap_in_comment=True) + header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED") + header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED") + header.add_text("/// If defined, indicates that the source file is amalgated") + header.add_text("/// to prevent private header inclusion.") + header.add_text("#define JSON_IS_AMALGAMATION") + header.add_file("include/json/config.h") + header.add_file("include/json/forwards.h") + header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED") + + target_forward_header_path = os.path.join(os.path.dirname(target_source_path), + forward_header_include_path) print("Writing amalgated forward header to %r" % target_forward_header_path) - header.write_to( target_forward_header_path ) + header.write_to(target_forward_header_path) print("Amalgating source...") - source = AmalgamationFile( source_top_dir ) - source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." ) - source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) - source.add_file( "LICENSE", wrap_in_comment=True ) - source.add_text( "" ) - source.add_text( "#include <%s>" % header_include_path ) - source.add_text( "" ) + source = AmalgamationFile(source_top_dir) + source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).") + source.add_text("/// It is intented to be used with #include <%s>" % header_include_path) + source.add_file("LICENSE", wrap_in_comment=True) + source.add_text("") + source.add_text("#include <%s>" % header_include_path) + source.add_text("") lib_json = "src/lib_json" - source.add_file( os.path.join(lib_json, "json_tool.h") ) - source.add_file( os.path.join(lib_json, "json_reader.cpp") ) - source.add_file( os.path.join(lib_json, "json_batchallocator.h") ) - source.add_file( os.path.join(lib_json, "json_valueiterator.inl") ) - source.add_file( os.path.join(lib_json, "json_value.cpp") ) - source.add_file( os.path.join(lib_json, "json_writer.cpp") ) + source.add_file(os.path.join(lib_json, "json_tool.h")) + source.add_file(os.path.join(lib_json, "json_reader.cpp")) + source.add_file(os.path.join(lib_json, "json_batchallocator.h")) + source.add_file(os.path.join(lib_json, "json_valueiterator.inl")) + source.add_file(os.path.join(lib_json, "json_value.cpp")) + source.add_file(os.path.join(lib_json, "json_writer.cpp")) print("Writing amalgated source to %r" % target_source_path) - source.write_to( target_source_path ) + source.write_to(target_source_path) def main(): usage = """%prog [options] @@ -137,12 +137,12 @@ def main(): parser.enable_interspersed_args() options, args = parser.parse_args() - msg = amalgamate_source( source_top_dir=options.top_dir, + msg = amalgamate_source(source_top_dir=options.top_dir, target_source_path=options.target_source_path, - header_include_path=options.header_include_path ) + header_include_path=options.header_include_path) if msg: - sys.stderr.write( msg + "\n" ) - sys.exit( 1 ) + sys.stderr.write(msg + "\n") + sys.exit(1) else: print("Source succesfully amalagated") diff --git a/devtools/antglob.py b/devtools/antglob.py index 8b7b4ca29..afd4c48b0 100644 --- a/devtools/antglob.py +++ b/devtools/antglob.py @@ -54,9 +54,9 @@ ALL_NO_LINK = DIR | FILE ALL = DIR | FILE | LINKS -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) +_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)') -def ant_pattern_to_re( ant_pattern ): +def ant_pattern_to_re(ant_pattern): """Generates a regular expression from the ant pattern. Matching convention: **/a: match 'a', 'dir/a', 'dir1/dir2/a' @@ -65,30 +65,30 @@ def ant_pattern_to_re( ant_pattern ): """ rex = ['^'] next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) + sep_rex = r'(?:/|%s)' % re.escape(os.path.sep) ## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): + for match in _ANT_RE.finditer(ant_pattern): ## print 'Matched', match.group() ## print match.start(0), next_pos if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) + raise ValueError("Invalid ant pattern") if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + rex.append(sep_rex + '(?:.*%s)?' % sep_rex) elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) + rex.append('(?:.*%s)?' % sep_rex) elif match.group(3): # /** - rex.append( sep_rex + '.*' ) + rex.append(sep_rex + '.*') elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + rex.append('[^/%s]*' % re.escape(os.path.sep)) elif match.group(5): # / - rex.append( sep_rex ) + rex.append(sep_rex) else: # somepath - rex.append( re.escape(match.group(6)) ) + rex.append(re.escape(match.group(6))) next_pos = match.end() rex.append('$') - return re.compile( ''.join( rex ) ) + return re.compile(''.join(rex)) -def _as_list( l ): +def _as_list(l): if isinstance(l, basestring): return l.split() return l @@ -105,37 +105,37 @@ def glob(dir_path, dir_path = dir_path.replace('/',os.path.sep) entry_type_filter = entry_type - def is_pruned_dir( dir_name ): + def is_pruned_dir(dir_name): for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): + if fnmatch.fnmatch(dir_name, pattern): return True return False - def apply_filter( full_path, filter_rexs ): + def apply_filter(full_path, filter_rexs): """Return True if at least one of the filter regular expression match full_path.""" for rex in filter_rexs: - if rex.match( full_path ): + if rex.match(full_path): return True return False - def glob_impl( root_dir_path ): + def glob_impl(root_dir_path): child_dirs = [root_dir_path] while child_dirs: dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) + for entry in listdir(dir_path): + full_path = os.path.join(dir_path, entry) ## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? + is_dir = os.path.isdir(full_path) + if is_dir and not is_pruned_dir(entry): # explore child directory ? ## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) + child_dirs.append(full_path) + included = apply_filter(full_path, include_filter) + rejected = apply_filter(full_path, exclude_filter) if not included or rejected: # do not include entry ? ## print '=> not included or rejected' continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) + link = os.path.islink(full_path) + is_file = os.path.isfile(full_path) if not is_file and not is_dir: ## print '=> unknown entry type' continue @@ -146,57 +146,57 @@ def glob_impl( root_dir_path ): ## print '=> type: %d' % entry_type, if (entry_type & entry_type_filter) != 0: ## print ' => KEEP' - yield os.path.join( dir_path, entry ) + yield os.path.join(dir_path, entry) ## else: ## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) + return list(glob_impl(dir_path)) if __name__ == "__main__": import unittest class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) +## def test_conversion(self): +## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern) - def test_matching( self ): - test_cases = [ ( 'path', + def test_matching(self): + test_cases = [ ('path', ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', + ['somepath', 'pathsuffix', '/path', '/path']), + ('*.py', ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']), + ('**/path', ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']), + ('path/**', ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']), + ('/**/path', ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath']), + ('a/b', ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', + ['somea/b', 'a/bsuffix', 'a/b/c']), + ('**/*.py', ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', + ['script.pyc', 'script.pyo', 'a.py/b']), + ('src/**/*.py', ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), + ['a/src/a.py', '/src/a.py']), ] for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): + def local_path(paths): return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches))) for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) + rex = ant_pattern_to_re(ant_pattern) print('ant_pattern:', ant_pattern, ' => ', rex.pattern) for accepted_match in accepted_matches: print('Accepted?:', accepted_match) - self.assertTrue( rex.match( accepted_match ) is not None ) + self.assertTrue(rex.match(accepted_match) is not None) for rejected_match in rejected_matches: print('Rejected?:', rejected_match) - self.assertTrue( rex.match( rejected_match ) is None ) + self.assertTrue(rex.match(rejected_match) is None) unittest.main() diff --git a/devtools/batchbuild.py b/devtools/batchbuild.py index 6f57945a7..0eb0690e8 100644 --- a/devtools/batchbuild.py +++ b/devtools/batchbuild.py @@ -18,62 +18,62 @@ def __init__(self, prepend_envs=None, variables=None, build_type=None, generator self.build_type = build_type self.generator = generator - def merged_with( self, build_desc ): + def merged_with(self, build_desc): """Returns a new BuildDesc by merging field content. Prefer build_desc fields to self fields for single valued field. """ - return BuildDesc( self.prepend_envs + build_desc.prepend_envs, + return BuildDesc(self.prepend_envs + build_desc.prepend_envs, self.variables + build_desc.variables, build_desc.build_type or self.build_type, - build_desc.generator or self.generator ) + build_desc.generator or self.generator) - def env( self ): + def env(self): environ = os.environ.copy() for values_by_name in self.prepend_envs: for var, value in list(values_by_name.items()): var = var.upper() if type(value) is unicode: - value = value.encode( sys.getdefaultencoding() ) + value = value.encode(sys.getdefaultencoding()) if var in environ: environ[var] = value + os.pathsep + environ[var] else: environ[var] = value return environ - def cmake_args( self ): + def cmake_args(self): args = ["-D%s" % var for var in self.variables] # skip build type for Visual Studio solution as it cause warning if self.build_type and 'Visual' not in self.generator: - args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type ) + args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type) if self.generator: - args.extend( ['-G', self.generator] ) + args.extend(['-G', self.generator]) return args - def __repr__( self ): - return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type) + def __repr__(self): + return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type) class BuildData: - def __init__( self, desc, work_dir, source_dir ): + def __init__(self, desc, work_dir, source_dir): self.desc = desc self.work_dir = work_dir self.source_dir = source_dir - self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' ) - self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' ) + self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log') + self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log') self.cmake_succeeded = False self.build_succeeded = False def execute_build(self): print('Build %s' % self.desc) - self._make_new_work_dir( ) - self.cmake_succeeded = self._generate_makefiles( ) + self._make_new_work_dir() + self.cmake_succeeded = self._generate_makefiles() if self.cmake_succeeded: - self.build_succeeded = self._build_using_makefiles( ) + self.build_succeeded = self._build_using_makefiles() return self.build_succeeded def _generate_makefiles(self): print(' Generating makefiles: ', end=' ') - cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )] - succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path ) + cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)] + succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path) print('done' if succeeded else 'FAILED') return succeeded @@ -82,58 +82,58 @@ def _build_using_makefiles(self): cmd = ['cmake', '--build', self.work_dir] if self.desc.build_type: cmd += ['--config', self.desc.build_type] - succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path ) + succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path) print('done' if succeeded else 'FAILED') return succeeded def _execute_build_subprocess(self, cmd, env, log_path): - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, - env=env ) - stdout, _ = process.communicate( ) + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, + env=env) + stdout, _ = process.communicate() succeeded = (process.returncode == 0) - with open( log_path, 'wb' ) as flog: - log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode - flog.write( fix_eol( log ) ) + with open(log_path, 'wb') as flog: + log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode + flog.write(fix_eol(log)) return succeeded def _make_new_work_dir(self): - if os.path.isdir( self.work_dir ): + if os.path.isdir(self.work_dir): print(' Removing work directory', self.work_dir) - shutil.rmtree( self.work_dir, ignore_errors=True ) - if not os.path.isdir( self.work_dir ): - os.makedirs( self.work_dir ) + shutil.rmtree(self.work_dir, ignore_errors=True) + if not os.path.isdir(self.work_dir): + os.makedirs(self.work_dir) -def fix_eol( stdout ): +def fix_eol(stdout): """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). """ - return re.sub( '\r*\n', os.linesep, stdout ) + return re.sub('\r*\n', os.linesep, stdout) -def load_build_variants_from_config( config_path ): - with open( config_path, 'rb' ) as fconfig: - data = json.load( fconfig ) +def load_build_variants_from_config(config_path): + with open(config_path, 'rb') as fconfig: + data = json.load(fconfig) variants = data[ 'cmake_variants' ] - build_descs_by_axis = collections.defaultdict( list ) + build_descs_by_axis = collections.defaultdict(list) for axis in variants: axis_name = axis["name"] build_descs = [] if "generators" in axis: for generator_data in axis["generators"]: for generator in generator_data["generator"]: - build_desc = BuildDesc( generator=generator, - prepend_envs=generator_data.get("env_prepend") ) - build_descs.append( build_desc ) + build_desc = BuildDesc(generator=generator, + prepend_envs=generator_data.get("env_prepend")) + build_descs.append(build_desc) elif "variables" in axis: for variables in axis["variables"]: - build_desc = BuildDesc( variables=variables ) - build_descs.append( build_desc ) + build_desc = BuildDesc(variables=variables) + build_descs.append(build_desc) elif "build_types" in axis: for build_type in axis["build_types"]: - build_desc = BuildDesc( build_type=build_type ) - build_descs.append( build_desc ) - build_descs_by_axis[axis_name].extend( build_descs ) + build_desc = BuildDesc(build_type=build_type) + build_descs.append(build_desc) + build_descs_by_axis[axis_name].extend(build_descs) return build_descs_by_axis -def generate_build_variants( build_descs_by_axis ): +def generate_build_variants(build_descs_by_axis): """Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" axis_names = list(build_descs_by_axis.keys()) build_descs = [] @@ -141,8 +141,8 @@ def generate_build_variants( build_descs_by_axis ): if len(build_descs): # for each existing build_desc and each axis build desc, create a new build_desc new_build_descs = [] - for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs): - new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) ) + for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs): + new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc)) build_descs = new_build_descs else: build_descs = axis_build_descs @@ -174,60 +174,57 @@ def generate_build_variants( build_descs_by_axis ): ''') -def generate_html_report( html_report_path, builds ): - report_dir = os.path.dirname( html_report_path ) +def generate_html_report(html_report_path, builds): + report_dir = os.path.dirname(html_report_path) # Vertical axis: generator # Horizontal: variables, then build_type - builds_by_generator = collections.defaultdict( list ) + builds_by_generator = collections.defaultdict(list) variables = set() - build_types_by_variable = collections.defaultdict( set ) + build_types_by_variable = collections.defaultdict(set) build_by_pos_key = {} # { (generator, var_key, build_type): build } for build in builds: - builds_by_generator[build.desc.generator].append( build ) + builds_by_generator[build.desc.generator].append(build) var_key = tuple(sorted(build.desc.variables)) - variables.add( var_key ) - build_types_by_variable[var_key].add( build.desc.build_type ) + variables.add(var_key) + build_types_by_variable[var_key].add(build.desc.build_type) pos_key = (build.desc.generator, var_key, build.desc.build_type) build_by_pos_key[pos_key] = build - variables = sorted( variables ) + variables = sorted(variables) th_vars = [] th_build_types = [] for variable in variables: - build_types = sorted( build_types_by_variable[variable] ) + build_types = sorted(build_types_by_variable[variable]) nb_build_type = len(build_types_by_variable[variable]) - th_vars.append( '%s' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) ) + th_vars.append('%s' % (nb_build_type, cgi.escape(' '.join(variable)))) for build_type in build_types: - th_build_types.append( '%s' % cgi.escape(build_type) ) + th_build_types.append('%s' % cgi.escape(build_type)) tr_builds = [] - for generator in sorted( builds_by_generator ): - tds = [ '%s\n' % cgi.escape( generator ) ] + for generator in sorted(builds_by_generator): + tds = [ '%s\n' % cgi.escape(generator) ] for variable in variables: - build_types = sorted( build_types_by_variable[variable] ) + build_types = sorted(build_types_by_variable[variable]) for build_type in build_types: pos_key = (generator, variable, build_type) build = build_by_pos_key.get(pos_key) if build: cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' build_status = 'ok' if build.build_succeeded else 'FAILED' - cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir ) - build_log_url = os.path.relpath( build.build_log_path, report_dir ) - td = 'CMake: %s' % ( - build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) + cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir) + build_log_url = os.path.relpath(build.build_log_path, report_dir) + td = 'CMake: %s' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) if build.cmake_succeeded: - td += '
Build: %s' % ( - build_log_url, build_status.lower(), build_status) + td += '
Build: %s' % ( build_log_url, build_status.lower(), build_status) td += '' else: td = '' - tds.append( td ) - tr_builds.append( '%s' % '\n'.join( tds ) ) - html = HTML_TEMPLATE.substitute( - title='Batch build report', + tds.append(td) + tr_builds.append('%s' % '\n'.join(tds)) + html = HTML_TEMPLATE.substitute( title='Batch build report', th_vars=' '.join(th_vars), - th_build_types=' '.join( th_build_types), - tr_builds='\n'.join( tr_builds ) ) - with open( html_report_path, 'wt' ) as fhtml: - fhtml.write( html ) + th_build_types=' '.join(th_build_types), + tr_builds='\n'.join(tr_builds)) + with open(html_report_path, 'wt') as fhtml: + fhtml.write(html) print('HTML report generated in:', html_report_path) def main(): @@ -246,33 +243,33 @@ def main(): parser.enable_interspersed_args() options, args = parser.parse_args() if len(args) < 3: - parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." ) + parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.") work_dir = args[0] source_dir = args[1].rstrip('/\\') config_paths = args[2:] for config_path in config_paths: - if not os.path.isfile( config_path ): - parser.error( "Can not read: %r" % config_path ) + if not os.path.isfile(config_path): + parser.error("Can not read: %r" % config_path) # generate build variants build_descs = [] for config_path in config_paths: - build_descs_by_axis = load_build_variants_from_config( config_path ) - build_descs.extend( generate_build_variants( build_descs_by_axis ) ) + build_descs_by_axis = load_build_variants_from_config(config_path) + build_descs.extend(generate_build_variants(build_descs_by_axis)) print('Build variants (%d):' % len(build_descs)) # assign build directory for each variant - if not os.path.isdir( work_dir ): - os.makedirs( work_dir ) + if not os.path.isdir(work_dir): + os.makedirs(work_dir) builds = [] - with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap: - for index, build_desc in enumerate( build_descs ): - build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) ) - builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) ) - fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) ) + with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap: + for index, build_desc in enumerate(build_descs): + build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1)) + builds.append(BuildData(build_desc, build_desc_work_dir, source_dir)) + fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc)) for build in builds: build.execute_build() - html_report_path = os.path.join( work_dir, 'batchbuild-report.html' ) - generate_html_report( html_report_path, builds ) + html_report_path = os.path.join(work_dir, 'batchbuild-report.html') + generate_html_report(html_report_path, builds) print('Done') diff --git a/devtools/fixeol.py b/devtools/fixeol.py index 53af7612b..a76880f96 100644 --- a/devtools/fixeol.py +++ b/devtools/fixeol.py @@ -1,10 +1,10 @@ from __future__ import print_function import os.path -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): +def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'): """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) + if not os.path.isfile(path): + raise ValueError('Path "%s" is not a file' % path) try: f = open(path, 'rb') except IOError as msg: @@ -29,27 +29,27 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): ## ## ## -##def _do_fix( is_dry_run = True ): +##def _do_fix(is_dry_run = True): ## from waftools import antglob -## python_sources = antglob.glob( '.', +## python_sources = antglob.glob('.', ## includes = '**/*.py **/wscript **/wscript_build', ## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## prune_dirs = antglob.prune_dirs + 'waf-* ./build') ## for path in python_sources: -## _fix_python_source( path, is_dry_run ) +## _fix_python_source(path, is_dry_run) ## -## cpp_sources = antglob.glob( '.', +## cpp_sources = antglob.glob('.', ## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## prune_dirs = antglob.prune_dirs + 'waf-* ./build') ## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) +## _fix_source_eol(path, is_dry_run) ## ## ##def dry_fix(context): -## _do_fix( is_dry_run = True ) +## _do_fix(is_dry_run = True) ## ##def fix(context): -## _do_fix( is_dry_run = False ) +## _do_fix(is_dry_run = False) ## ##def shutdown(): ## pass diff --git a/devtools/licenseupdater.py b/devtools/licenseupdater.py index 8cb71d737..6f823618f 100644 --- a/devtools/licenseupdater.py +++ b/devtools/licenseupdater.py @@ -13,7 +13,7 @@ """.replace('\r\n','\n') -def update_license( path, dry_run, show_diff ): +def update_license(path, dry_run, show_diff): """Update the license statement in the specified file. Parameters: path: path of the C++ source file to update. @@ -22,28 +22,28 @@ def update_license( path, dry_run, show_diff ): show_diff: if True, print the path of the file that would be modified, as well as the change made to the file. """ - with open( path, 'rt' ) as fin: + with open(path, 'rt') as fin: original_text = fin.read().replace('\r\n','\n') newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): + if not original_text.startswith(LICENSE_BEGIN): # No existing license found => prepend it new_text = BRIEF_LICENSE + original_text else: - license_end_index = original_text.index( '\n\n' ) # search first blank line + license_end_index = original_text.index('\n\n') # search first blank line new_text = BRIEF_LICENSE + original_text[license_end_index+2:] if original_text != new_text: if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) + with open(path, 'wb') as fout: + fout.write(new_text.replace('\n', newline)) print('Updated', path) if show_diff: import difflib - print('\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) )) + print('\n'.join(difflib.unified_diff(original_text.split('\n'), + new_text.split('\n')))) return True return False -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): +def update_license_in_source_directories(source_dirs, dry_run, show_diff): """Updates license text in C++ source files found in directory source_dirs. Parameters: source_dirs: list of directory to scan for C++ sources. Directories are @@ -56,11 +56,11 @@ def update_license_in_source_directories( source_dirs, dry_run, show_diff ): from devtools import antglob prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, + cpp_sources = antglob.glob(source_dir, includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) + prune_dirs = prune_dirs) for source in cpp_sources: - update_license( source, dry_run, show_diff ) + update_license(source, dry_run, show_diff) def main(): usage = """%prog DIR [DIR2...] @@ -83,7 +83,7 @@ def main(): help="""On update, show change made to the file.""") parser.enable_interspersed_args() options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) + update_license_in_source_directories(args, options.dry_run, options.show_diff) print('Done') if __name__ == '__main__': diff --git a/devtools/tarball.py b/devtools/tarball.py index ccbda3942..e44d870ac 100644 --- a/devtools/tarball.py +++ b/devtools/tarball.py @@ -13,41 +13,41 @@ def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' to make them child of root. """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): + base_dir = os.path.normpath(os.path.abspath(base_dir)) + def archive_name(path): """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) + path = os.path.normpath(os.path.abspath(path)) + common_path = os.path.commonprefix((base_dir, path)) archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): + if os.path.isabs(archive_name): archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) + return os.path.join(prefix_dir, archive_name) def visit(tar, dirname, names): for name in names: path = os.path.join(dirname, name) if os.path.isfile(path): path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) + tar.add(path, path_in_tar) compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + tar = tarfile.TarFile.gzopen(tarball_path, 'w', compresslevel=compression) try: for source in sources: source_path = source - if os.path.isdir( source ): + if os.path.isdir(source): os.path.walk(source_path, visit, tar) else: path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname + tar.add(source_path, path_in_tar) # filename, arcname finally: tar.close() -def decompress( tarball_path, base_dir ): +def decompress(tarball_path, base_dir): """Decompress the gzipped tarball into directory base_dir. """ # !!! This class method is not documented in the online doc # nor is bz2open! tar = tarfile.TarFile.gzopen(tarball_path, mode='r') try: - tar.extractall( base_dir ) + tar.extractall(base_dir) finally: tar.close() diff --git a/doxybuild.py b/doxybuild.py index 9c49df268..4ad900ed7 100644 --- a/doxybuild.py +++ b/doxybuild.py @@ -14,7 +14,7 @@ def find_program(*filenames): @return: the full path of the filename if found, or '' if filename could not be found """ paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or '' for filename in filenames: for name in [filename+ext for ext in suffixes.split()]: for directory in paths: @@ -47,24 +47,24 @@ def do_subst_in_file(targetfile, sourcefile, dict): raise def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) + config_file = os.path.abspath(config_file) doxygen_path = doxygen_path old_cwd = os.getcwd() try: - os.chdir( working_dir ) + os.chdir(working_dir) cmd = [doxygen_path, config_file] - print('Running:', ' '.join( cmd )) + print('Running:', ' '.join(cmd)) try: import subprocess except: - if os.system( ' '.join( cmd ) ) != 0: + if os.system(' '.join(cmd)) != 0: print('Documentation generation failed') return False else: if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) else: - process = subprocess.Popen( cmd ) + process = subprocess.Popen(cmd) stdout, _ = process.communicate() if process.returncode: print('Documentation generation failed:') @@ -72,9 +72,9 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent): return False return True finally: - os.chdir( old_cwd ) + os.chdir(old_cwd) -def build_doc( options, make_release=False ): +def build_doc(options, make_release=False): if make_release: options.make_tarball = True options.with_dot = True @@ -85,35 +85,35 @@ def build_doc( options, make_release=False ): version = open('version','rt').read().strip() output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + top_dir = os.path.abspath('.') html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): + tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz') + warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log') + html_output_path = os.path.join(output_dir, html_output_dirname) + def yesno(bool): return bool and 'YES' or 'NO' subst_keys = { '%JSONCPP_VERSION%': version, '%DOC_TOPDIR%': '', '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname), '%HAVE_DOT%': yesno(options.with_dot), '%DOT_PATH%': os.path.split(options.dot_path)[0], '%HTML_HELP%': yesno(options.with_html_help), '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + '%WARNING_LOG_PATH%': os.path.join('..', warning_log_path) } - if os.path.isdir( output_dir ): + if os.path.isdir(output_dir): print('Deleting directory:', output_dir) - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) + shutil.rmtree(output_dir) + if not os.path.isdir(output_dir): + os.makedirs(output_dir) - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + do_subst_in_file('doc/doxyfile', 'doc/doxyfile.in', subst_keys) + ok = run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent) if not options.silent: print(open(warning_log_path, 'rb').read()) index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) @@ -121,7 +121,7 @@ def yesno( bool ): print(index_path) if options.open: import webbrowser - webbrowser.open( 'file://' + index_path ) + webbrowser.open('file://' + index_path) if options.make_tarball: print('Generating doc tarball to', tarball_path) tarball_sources = [ @@ -131,8 +131,8 @@ def yesno( bool ): 'NEWS.txt', 'version' ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + tarball_basedir = os.path.join(output_dir, html_output_dirname) + tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname) return tarball_path, html_output_dirname def main(): @@ -163,7 +163,7 @@ def main(): help="""Hides doxygen output""") parser.enable_interspersed_args() options, args = parser.parse_args() - build_doc( options ) + build_doc(options) if __name__ == '__main__': main() diff --git a/makerelease.py b/makerelease.py index 90276d120..b7235db6c 100644 --- a/makerelease.py +++ b/makerelease.py @@ -34,57 +34,57 @@ SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' SOURCEFORGE_PROJECT = 'jsoncpp' -def set_version( version ): +def set_version(version): with open('version','wb') as f: - f.write( version.strip() ) + f.write(version.strip()) -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) +def rmdir_if_exist(dir_path): + if os.path.isdir(dir_path): + shutil.rmtree(dir_path) class SVNError(Exception): pass -def svn_command( command, *args ): +def svn_command(command, *args): cmd = ['svn', '--non-interactive', command] + list(args) - print('Running:', ' '.join( cmd )) - process = subprocess.Popen( cmd, + print('Running:', ' '.join(cmd)) + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) + stderr=subprocess.STDOUT) stdout = process.communicate()[0] if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) + error = SVNError('SVN command failed:\n' + stdout) error.returncode = process.returncode raise error return stdout def check_no_pending_commit(): """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) + stdout = svn_command('status', '--xml') + etree = ElementTree.fromstring(stdout) msg = [] - for entry in etree.getiterator( 'entry' ): + for entry in etree.getiterator('entry'): path = entry.get('path') status = entry.find('wc-status').get('item') if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + msg.append('File "%s" has pending change (status="%s")' % (path, status)) if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!') + return '\n'.join(msg) -def svn_join_url(http://github.com/base_url,%20suffix): +def svn_join_url(http://github.com/base_url,%20suffix): if not base_url.endswith('/'): base_url += '/' if suffix.startswith('/'): suffix = suffix[1:] return base_url + suffix -def svn_check_if_tag_exist( tag_url ): +def svn_check_if_tag_exist(tag_url): """Checks if a tag exist. Returns: True if the tag exist, False otherwise. """ try: - list_stdout = svn_command( 'list', tag_url ) + list_stdout = svn_command('list', tag_url) except SVNError as e: if e.returncode != 1 or not str(e).find('tag_url'): raise e @@ -92,82 +92,82 @@ def svn_check_if_tag_exist( tag_url ): return False return True -def svn_commit( message ): +def svn_commit(message): """Commit the sandbox, providing the specified comment. """ - svn_command( 'ci', '-m', message ) + svn_command('ci', '-m', message) -def svn_tag_sandbox( tag_url, message ): +def svn_tag_sandbox(tag_url, message): """Makes a tag based on the sandbox revisions. """ - svn_command( 'copy', '-m', message, '.', tag_url ) + svn_command('copy', '-m', message, '.', tag_url) -def svn_remove_tag( tag_url, message ): +def svn_remove_tag(tag_url, message): """Removes an existing tag. """ - svn_command( 'delete', '-m', message, tag_url ) + svn_command('delete', '-m', message, tag_url) -def svn_export( tag_url, export_dir ): +def svn_export(tag_url, export_dir): """Exports the tag_url revision to export_dir. Target directory, including its parent is created if it does not exist. If the directory export_dir exist, it is deleted before export proceed. """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) + rmdir_if_exist(export_dir) + svn_command('export', tag_url, export_dir) -def fix_sources_eol( dist_dir ): +def fix_sources_eol(dist_dir): """Set file EOL for tarball distribution. """ print('Preparing exported source file EOL for distribution...') prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, + win_sources = antglob.glob(dist_dir, includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, + prune_dirs = prune_dirs) + unix_sources = antglob.glob(dist_dir, includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in sconscript *.json *.expected AUTHORS LICENSE''', excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) + prune_dirs = prune_dirs) for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n') for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n') -def download( url, target_path ): +def download(url, target_path): """Download file represented by url to target_path. """ - f = urllib2.urlopen( url ) + f = urllib2.urlopen(url) try: data = f.read() finally: f.close() - fout = open( target_path, 'wb' ) + fout = open(target_path, 'wb') try: - fout.write( data ) + fout.write(data) finally: fout.close() -def check_compile( distcheck_top_dir, platform ): +def check_compile(distcheck_top_dir, platform): cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print('Running:', ' '.join( cmd )) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) + print('Running:', ' '.join(cmd)) + log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform) + flog = open(log_path, 'wb') try: - process = subprocess.Popen( cmd, + process = subprocess.Popen(cmd, stdout=flog, stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) + cwd=distcheck_top_dir) stdout = process.communicate()[0] status = (process.returncode == 0) finally: flog.close() return (status, log_path) -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) +def write_tempfile(content, **kwargs): + fd, path = tempfile.mkstemp(**kwargs) + f = os.fdopen(fd, 'wt') try: - f.write( content ) + f.write(content) finally: f.close() return path @@ -175,34 +175,34 @@ def write_tempfile( content, **kwargs ): class SFTPError(Exception): pass -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) +def run_sftp_batch(userhost, sftp, batch, retry=0): + path = write_tempfile(batch, suffix='.sftp', text=True) # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] error = None for retry_index in range(0, max(1,retry)): heading = retry_index == 0 and 'Running:' or 'Retrying:' - print(heading, ' '.join( cmd )) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + print(heading, ' '.join(cmd)) + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout = process.communicate()[0] if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) + error = SFTPError('SFTP batch failed:\n' + stdout) else: break if error: raise error return stdout -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): +def sourceforge_web_synchro(sourceforge_project, doc_dir, + user=None, sftp='sftp'): """Notes: does not synchronize sub-directory of doc-dir. """ userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ + stdout = run_sftp_batch(userhost, sftp, """ cd htdocs dir exit -""" ) +""") existing_paths = set() collect = 0 for line in stdout.split('\n'): @@ -216,15 +216,15 @@ def sourceforge_web_synchro( sourceforge_project, doc_dir, elif collect == 2: path = line.strip().split()[-1:] if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + existing_paths.add(path[0]) + upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)]) paths_to_remove = existing_paths - upload_paths if paths_to_remove: print('Removing the following file from web:') - print('\n'.join( paths_to_remove )) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs + print('\n'.join(paths_to_remove)) + stdout = run_sftp_batch(userhost, sftp, """cd htdocs rm %s -exit""" % ' '.join(paths_to_remove) ) +exit""" % ' '.join(paths_to_remove)) print('Uploading %d files:' % len(upload_paths)) batch_size = 10 upload_paths = list(upload_paths) @@ -235,17 +235,17 @@ def sourceforge_web_synchro( sourceforge_project, doc_dir, remaining_files = len(upload_paths) - index remaining_sec = file_per_sec * remaining_files print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)) - run_sftp_batch( userhost, sftp, """cd htdocs + run_sftp_batch(userhost, sftp, """cd htdocs lcd %s mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) +exit""" % (doc_dir, ' '.join(paths)), retry=3) -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): +def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'): userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ + run_sftp_batch(userhost, sftp, """ mput %s exit -""" % (' '.join(paths),) ) +""" % (' '.join(paths),)) def main(): @@ -286,12 +286,12 @@ def main(): options, args = parser.parse_args() if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) + parser.error('release_version missing on command-line.') release_version = args[0] next_version = args[1] if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) + parser.error('You must specify either --platform or --no-test option.') if options.ignore_pending_commit: msg = '' @@ -299,86 +299,86 @@ def main(): msg = check_no_pending_commit() if not msg: print('Setting version to', release_version) - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url(http://github.com/SVN_TAG_ROOT,%20release_version) - if svn_check_if_tag_exist( tag_url ): + set_version(release_version) + svn_commit('Release ' + release_version) + tag_url = svn_join_url(http://github.com/SVN_TAG_ROOT,%20release_version) + if svn_check_if_tag_exist(tag_url): if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) + svn_remove_tag(tag_url, 'Overwriting previous tag') else: print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url) - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) + sys.exit(1) + svn_tag_sandbox(tag_url, 'Release ' + release_version) print('Generated doxygen document...') ## doc_dirname = r'jsoncpp-api-html-0.5.0' ## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True) doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + tarball.decompress(doc_tarball_path, doc_distcheck_dir) + doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname) export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) + svn_export(tag_url, export_dir) + fix_sources_eol(export_dir) source_dir = 'jsoncpp-src-' + release_version source_tarball_path = 'dist/%s.tar.gz' % source_dir print('Generating source tarball to', source_tarball_path) - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir) amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir print('Generating amalgamation source tarball to', amalgamation_tarball_path) amalgamation_dir = 'dist/amalgamation' - amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) + amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h') amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version - tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], - amalgamation_dir, prefix_dir=amalgamation_source_dir ) + tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir], + amalgamation_dir, prefix_dir=amalgamation_source_dir) # Decompress source tarball, download and install scons-local distcheck_dir = 'dist/distcheck' distcheck_top_dir = distcheck_dir + '/' + source_dir print('Decompressing source tarball to', distcheck_dir) - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) + rmdir_if_exist(distcheck_dir) + tarball.decompress(source_tarball_path, distcheck_dir) scons_local_path = 'dist/scons-local.tar.gz' print('Downloading scons-local to', scons_local_path) - download( SCONS_LOCAL_URL, scons_local_path ) + download(SCONS_LOCAL_URL, scons_local_path) print('Decompressing scons-local to', distcheck_top_dir) - tarball.decompress( scons_local_path, distcheck_top_dir ) + tarball.decompress(scons_local_path, distcheck_top_dir) # Run compilation print('Compiling decompressed tarball') all_build_status = True for platform in options.platforms.split(','): print('Testing platform:', platform) - build_status, log_path = check_compile( distcheck_top_dir, platform ) + build_status, log_path = check_compile(distcheck_top_dir, platform) print('see build log:', log_path) print(build_status and '=> ok' or '=> FAILED') all_build_status = all_build_status and build_status if not build_status: print('Testing failed on at least one platform, aborting...') - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + svn_remove_tag(tag_url, 'Removing tag due to failed testing') sys.exit(1) if options.user: if not options.no_web: print('Uploading documentation using user', options.user) - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp) print('Completed documentation upload') print('Uploading source and documentation tarballs for release using user', options.user) - sourceforge_release_tarball( SOURCEFORGE_PROJECT, + sourceforge_release_tarball(SOURCEFORGE_PROJECT, [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) + user=options.user, sftp=options.sftp) print('Source and doc release tarballs uploaded') else: print('No upload user specified. Web site and download tarbal were not uploaded.') print('Tarball can be found at:', doc_tarball_path) # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) + set_version(next_version) + svn_commit('Released ' + release_version) else: - sys.stderr.write( msg + '\n' ) + sys.stderr.write(msg + '\n') if __name__ == '__main__': main() diff --git a/scons-tools/globtool.py b/scons-tools/globtool.py index 811140e8a..ea7db2d3d 100644 --- a/scons-tools/globtool.py +++ b/scons-tools/globtool.py @@ -1,9 +1,9 @@ import fnmatch import os -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') +def generate(env): + def Glob(env, includes = None, excludes = None, dir = '.'): + """Adds Glob(includes = Split('*'), excludes = None, dir = '.') helper function to environment. Glob both the file-system files. @@ -12,36 +12,36 @@ def Glob( env, includes = None, excludes = None, dir = '.' ): excludes: list of file name pattern exluced from the return list. Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + sources = env.Glob(("*.cpp", '*.h'), "~*.cpp", "#src") """ def filterFilename(path): - abs_path = os.path.join( dir, path ) + abs_path = os.path.join(dir, path) if not os.path.isfile(abs_path): return 0 fn = os.path.basename(path) match = 0 for include in includes: - if fnmatch.fnmatchcase( fn, include ): + if fnmatch.fnmatchcase(fn, include): match = 1 break if match == 1 and not excludes is None: for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): + if fnmatch.fnmatchcase(fn, exclude): match = 0 break return match if includes is None: includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): + elif type(includes) in (type(''), type(u'')): includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): + if type(excludes) in (type(''), type(u'')): excludes = (excludes,) dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) + paths = os.listdir(dir) + def makeAbsFileNode(path): + return env.File(os.path.join(dir, path)) + nodes = filter(filterFilename, paths) + return map(makeAbsFileNode, nodes) from SCons.Script import Environment Environment.Glob = Glob diff --git a/scons-tools/srcdist.py b/scons-tools/srcdist.py index 864ff4081..663a23443 100644 --- a/scons-tools/srcdist.py +++ b/scons-tools/srcdist.py @@ -47,7 +47,7 @@ ## elif token == "=": ## data[key] = list() ## else: -## append_data( data, key, new_data, token ) +## append_data(data, key, new_data, token) ## new_data = True ## ## last_token = token @@ -55,7 +55,7 @@ ## ## if last_token == '\\' and token != '\n': ## new_data = False -## append_data( data, key, new_data, '\\' ) +## append_data(data, key, new_data, '\\') ## ## # compress lists of len 1 into single strings ## for (k, v) in data.items(): @@ -116,7 +116,7 @@ ## else: ## for pattern in file_patterns: ## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) +## sources = map(lambda path: env.File(path), sources) ## return sources ## ## @@ -143,7 +143,7 @@ def srcDistEmitter(source, target, env): ## # add our output locations ## for (k, v) in output_formats.items(): ## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## targets.append(env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1])))) ## ## # don't clobber targets ## for node in targets: @@ -161,14 +161,13 @@ def generate(env): Add builders and construction variables for the SrcDist tool. """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, +## doxyfile_scanner = env.Scanner(## DoxySourceScan, ## "DoxySourceScan", ## scan_check = DoxySourceScanCheck, -## ) +##) if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) + srcdist_builder = targz.makeBuilder(srcDistEmitter) env['BUILDERS']['SrcDist'] = srcdist_builder diff --git a/scons-tools/substinfile.py b/scons-tools/substinfile.py index ef18b4edb..33e5de0d3 100644 --- a/scons-tools/substinfile.py +++ b/scons-tools/substinfile.py @@ -70,7 +70,7 @@ def subst_emitter(target, source, env): return target, source ## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + subst_action = SCons.Action.Action(subst_in_file, subst_in_file_string) env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) def exists(env): diff --git a/scons-tools/targz.py b/scons-tools/targz.py index f5432003d..6a4f3fa8d 100644 --- a/scons-tools/targz.py +++ b/scons-tools/targz.py @@ -27,9 +27,9 @@ if internal_targz: def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) + def archive_name(path): + path = os.path.normpath(os.path.abspath(path)) + common_path = os.path.commonprefix((base_dir, path)) archive_name = path[len(common_path):] return archive_name @@ -37,23 +37,23 @@ def visit(tar, dirname, names): for name in names: path = os.path.join(dirname, name) if os.path.isfile(path): - tar.add(path, archive_name(path) ) + tar.add(path, archive_name(path)) compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + base_dir = os.path.normpath(env.get('TARGZ_BASEDIR', env.Dir('.')).abspath) target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) + fileobj = gzip.GzipFile(target_path, 'wb', compression) tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) for source in source: source_path = str(source) if source.isdir(): os.path.walk(source_path, visit, tar) else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.add(source_path, archive_name(source_path)) # filename, arcname tar.close() targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - def makeBuilder( emitter = None ): + def makeBuilder(emitter = None): return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), source_factory = SCons.Node.FS.Entry, source_scanner = SCons.Defaults.DirScanner, diff --git a/test/cleantests.py b/test/cleantests.py index c38fd8ffd..1a4f1f1f3 100644 --- a/test/cleantests.py +++ b/test/cleantests.py @@ -4,7 +4,7 @@ paths = [] for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) + paths += glob.glob('data/' + pattern) for path in paths: - os.unlink( path ) + os.unlink(path) diff --git a/test/generate_expected.py b/test/generate_expected.py index f668da238..2bbf569c1 100644 --- a/test/generate_expected.py +++ b/test/generate_expected.py @@ -1,10 +1,10 @@ from __future__ import print_function import glob import os.path -for path in glob.glob( '*.json' ): +for path in glob.glob('*.json'): text = file(path,'rt').read() target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): + if os.path.exists(target): print('skipping:', target) else: print('creating:', target) diff --git a/test/pyjsontestrunner.py b/test/pyjsontestrunner.py index 3f08a8a73..afaabd9d0 100644 --- a/test/pyjsontestrunner.py +++ b/test/pyjsontestrunner.py @@ -15,50 +15,50 @@ rewrite_path = base_path + '.rewrite' rewrite_actual_path = base_path + '.actual-rewrite' -def valueTreeToString( fout, value, path = '.' ): +def valueTreeToString(fout, value, path = '.'): ty = type(value) if ty is types.DictType: - fout.write( '%s={}\n' % path ) + fout.write('%s={}\n' % path) suffix = path[-1] != '.' and '.' or '' names = value.keys() names.sort() for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) + valueTreeToString(fout, value[name], path + suffix + name) elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) + fout.write('%s=[]\n' % path) + for index, childValue in zip(xrange(0,len(value)), value): + valueTreeToString(fout, childValue, path + '[%d]' % index) elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) + fout.write('%s="%s"\n' % (path,value)) elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) + fout.write('%s=%d\n' % (path,value)) elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) + fout.write('%s=%.16g\n' % (path,value)) elif value is True: - fout.write( '%s=true\n' % path ) + fout.write('%s=true\n' % path) elif value is False: - fout.write( '%s=false\n' % path ) + fout.write('%s=false\n' % path) elif value is None: - fout.write( '%s=null\n' % path ) + fout.write('%s=null\n' % path) else: assert False and "Unexpected value type" -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) +def parseAndSaveValueTree(input, actual_path): + root = json.loads(input) + fout = file(actual_path, 'wt') + valueTreeToString(fout, root) fout.close() return root -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) +def rewriteValueTree(value, rewrite_path): + rewrite = json.dumps(value) #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) + file(rewrite_path, 'wt').write(rewrite + '\n') return rewrite -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) +input = file(input_path, 'rt').read() +root = parseAndSaveValueTree(input, actual_path) +rewrite = rewriteValueTree(json.write(root), rewrite_path) +rewrite_root = parseAndSaveValueTree(rewrite, rewrite_actual_path) -sys.exit( 0 ) +sys.exit(0) diff --git a/test/runjsontests.py b/test/runjsontests.py index 19903b96b..728d41565 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -26,11 +26,11 @@ def getStatusOutput(cmd): pass # python3 status = pipe.close() return status, process_output -def compareOutputs( expected, actual, message ): +def compareOutputs(expected, actual, message): expected = expected.strip().replace('\r','').split('\n') actual = actual.strip().replace('\r','').split('\n') diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) + max_line_to_compare = min(len(expected), len(actual)) for index in range(0,max_line_to_compare): if expected[index].strip() != actual[index].strip(): diff_line = index + 1 @@ -39,7 +39,7 @@ def compareOutputs( expected, actual, message ): diff_line = max_line_to_compare+1 if diff_line == 0: return None - def safeGetLine( lines, index ): + def safeGetLine(lines, index): index += -1 if index >= len(lines): return '' @@ -49,66 +49,65 @@ def safeGetLine( lines, index ): Actual: '%s' """ % (message, diff_line, safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) + safeGetLine(actual,diff_line)) -def safeReadFile( path ): +def safeReadFile(path): try: - return open( path, 'rt', encoding = 'utf-8' ).read() + return open(path, 'rt', encoding = 'utf-8').read() except IOError as e: return '' % (path,e) -def runAllTests( jsontest_executable_path, input_dir = None, +def runAllTests(jsontest_executable_path, input_dir = None, use_valgrind=False, with_json_checker=False, writerClass='StyledWriter'): if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) + input_dir = os.path.join(os.getcwd(), 'data') + tests = glob(os.path.join(input_dir, '*.json')) if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + test_jsonchecker = glob(os.path.join(input_dir, '../jsonchecker', '*.json')) else: test_jsonchecker = [] failed_tests = [] valgrind_path = use_valgrind and VALGRIND_CMD or '' for input_path in tests + test_jsonchecker: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + expect_failure = os.path.basename(input_path).startswith('fail') is_json_checker_test = (input_path in test_jsonchecker) or expect_failure print('TESTING:', input_path, end=' ') options = is_json_checker_test and '--json-checker' or '' options += ' --json-writer %s'%writerClass - cmd = '%s%s %s "%s"' % ( - valgrind_path, jsontest_executable_path, options, + cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, input_path) status, process_output = getStatusOutput(cmd) if is_json_checker_test: if expect_failure: if not status: print('FAILED') - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) + failed_tests.append((input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path))) else: print('OK') else: if status: print('FAILED') - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + failed_tests.append((input_path, 'Parsing failed:\n' + process_output)) else: print('OK') else: base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - open(base_path + '.process-output', 'wt', encoding = 'utf-8').write( process_output ) + actual_output = safeReadFile(base_path + '.actual') + actual_rewrite_output = safeReadFile(base_path + '.actual-rewrite') + open(base_path + '.process-output', 'wt', encoding = 'utf-8').write(process_output) if status: print('parsing failed') - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + failed_tests.append((input_path, 'Parsing failed:\n' + process_output)) else: expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = open( expected_output_path, 'rt', encoding = 'utf-8' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + expected_output = open(expected_output_path, 'rt', encoding = 'utf-8').read() + detail = (compareOutputs(expected_output, actual_output, 'input') + or compareOutputs(expected_output, actual_rewrite_output, 'rewrite')) if detail: print('FAILED') - failed_tests.append( (input_path, detail) ) + failed_tests.append((input_path, detail)) else: print('OK') @@ -120,7 +119,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, print(failed_test[1]) print() print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) )) + len(failed_tests))) return 1 else: print('All %d tests passed.' % len(tests)) @@ -128,7 +127,7 @@ def runAllTests( jsontest_executable_path, input_dir = None, def main(): from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser = OptionParser(usage="%prog [options] [test case directory]") parser.add_option("--valgrind", action="http://github.com/store_true", dest="valgrind", default=False, help="run all the tests using valgrind to detect memory leaks") @@ -139,25 +138,25 @@ def main(): options, args = parser.parse_args() if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) + parser.error('Must provides at least path to jsontestrunner executable.') + sys.exit(1) - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + jsontest_executable_path = os.path.normpath(os.path.abspath(args[0])) if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) + input_path = os.path.normpath(os.path.abspath(args[1])) else: input_path = None - status = runAllTests( jsontest_executable_path, input_path, + status = runAllTests(jsontest_executable_path, input_path, use_valgrind=options.valgrind, with_json_checker=options.with_json_checker, writerClass='StyledWriter') if status: - sys.exit( status ) - status = runAllTests( jsontest_executable_path, input_path, + sys.exit(status) + status = runAllTests(jsontest_executable_path, input_path, use_valgrind=options.valgrind, with_json_checker=options.with_json_checker, writerClass='StyledStreamWriter') - sys.exit( status ) + sys.exit(status) if __name__ == '__main__': main() diff --git a/test/rununittests.py b/test/rununittests.py index 54c4da42c..dd1c9a379 100644 --- a/test/rununittests.py +++ b/test/rununittests.py @@ -11,18 +11,18 @@ VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + def __init__(self, test_exe_path, use_valgrind=False): + self.test_exe_path = os.path.normpath(os.path.abspath(test_exe_path)) self.use_valgrind = use_valgrind - def run( self, options ): + def run(self, options): if self.use_valgrind: cmd = VALGRIND_CMD.split() else: cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + cmd.extend([self.test_exe_path, '--test-auto'] + options) try: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except: print(cmd) raise @@ -31,9 +31,9 @@ def run( self, options ): return False, stdout return True, stdout -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) +def runAllTests(exe_path, use_valgrind=False): + test_proxy = TestProxy(exe_path, use_valgrind=use_valgrind) + status, test_names = test_proxy.run(['--list-tests']) if not status: print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr) return 1 @@ -41,11 +41,11 @@ def runAllTests( exe_path, use_valgrind=False ): failures = [] for name in test_names: print('TESTING %s:' % name, end=' ') - succeed, result = test_proxy.run( ['--test', name] ) + succeed, result = test_proxy.run(['--test', name]) if succeed: print('OK') else: - failures.append( (name, result) ) + failures.append((name, result)) print('FAILED') failed_count = len(failures) pass_count = len(test_names) - failed_count @@ -53,8 +53,7 @@ def runAllTests( exe_path, use_valgrind=False ): print() for name, result in failures: print(result) - print('%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count)) + print('%d/%d tests passed (%d failure(s))' % ( pass_count, len(test_names), failed_count)) return 1 else: print('All %d tests passed' % len(test_names)) @@ -62,7 +61,7 @@ def runAllTests( exe_path, use_valgrind=False ): def main(): from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) + parser = OptionParser(usage="%prog [options] ") parser.add_option("--valgrind", action="http://github.com/store_true", dest="valgrind", default=False, help="run all the tests using valgrind to detect memory leaks") @@ -70,11 +69,11 @@ def main(): options, args = parser.parse_args() if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) + parser.error('Must provides at least path to test_lib_json executable.') + sys.exit(1) - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) + exit_code = runAllTests(args[0], use_valgrind=options.valgrind) + sys.exit(exit_code) if __name__ == '__main__': main() From 9cc0bb80b2957c31bd0cff150b7fbd2fe78f5830 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 15:44:51 -0600 Subject: [PATCH 061/885] update TarFile usage --- devtools/tarball.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/devtools/tarball.py b/devtools/tarball.py index e44d870ac..e461e8fed 100644 --- a/devtools/tarball.py +++ b/devtools/tarball.py @@ -1,5 +1,5 @@ +from contextlib import closing import os.path -import gzip import tarfile TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 @@ -29,8 +29,8 @@ def visit(tar, dirname, names): path_in_tar = archive_name(path) tar.add(path, path_in_tar) compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen(tarball_path, 'w', compresslevel=compression) - try: + with closing(tarfile.TarFile.open(tarball_path, 'w:gz', + compresslevel=compression)) as tar: for source in sources: source_path = source if os.path.isdir(source): @@ -38,16 +38,9 @@ def visit(tar, dirname, names): else: path_in_tar = archive_name(source_path) tar.add(source_path, path_in_tar) # filename, arcname - finally: - tar.close() def decompress(tarball_path, base_dir): """Decompress the gzipped tarball into directory base_dir. """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: + with closing(tarfile.TarFile.open(tarball_path)) as tar: tar.extractall(base_dir) - finally: - tar.close() From ff5abe76a552e4fa53fb29e603040629286124c9 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 15:54:08 -0600 Subject: [PATCH 062/885] update doxbuild.py --- doxybuild.py | 77 ++++++++++++++++++++++++++-------------------------- 1 file changed, 39 insertions(+), 38 deletions(-) diff --git a/doxybuild.py b/doxybuild.py index 4ad900ed7..6f7b889bc 100644 --- a/doxybuild.py +++ b/doxybuild.py @@ -2,12 +2,25 @@ """ from __future__ import print_function from devtools import tarball +from contextlib import contextmanager +import subprocess import re import os -import os.path import sys import shutil +@contextmanager +def cd(newdir): + """ + http://stackoverflow.com/questions/431684/how-do-i-cd-in-python + """ + prevdir = os.getcwd() + os.chdir(newdir) + try: + yield + finally: + os.chdir(prevdir) + def find_program(*filenames): """find a program in folders path_lst, and sets env[var] @param filenames: a list of possible names of the program to search for @@ -28,51 +41,39 @@ def do_subst_in_file(targetfile, sourcefile, dict): For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, then all instances of %VERSION% in the file will be replaced with 1.2345 etc. """ - try: - f = open(sourcefile, 'rb') + with open(sourcefile, 'rb') as f: contents = f.read() - f.close() - except: - print("Can't read source file %s"%sourcefile) - raise for (k,v) in list(dict.items()): v = v.replace('\\','\\\\') contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') + with open(targetfile, 'wb') as f: f.write(contents) - f.close() - except: - print("Can't write target file %s"%targetfile) - raise + +def getstatusoutput(cmd): + """cmd is a list. + """ + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output, _ = process.communicate() + status = process.returncode + return status, output + +def run_cmd(cmd, silent=False): + print('Running:', repr(' '.join(cmd)), 'in', repr(os.getcwd())) + sys.stdout.flush() + if silent: + status, output = getstatusoutput(cmd) + else: + status, output = os.system(' '.join(cmd)), '' + if status: + msg = 'error=%d, output="""\n%s\n"""' %(status, output) + print(msg) + #raise Exception(msg) def run_doxygen(doxygen_path, config_file, working_dir, is_silent): config_file = os.path.abspath(config_file) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir(working_dir) + with cd(working_dir): cmd = [doxygen_path, config_file] - print('Running:', ' '.join(cmd)) - try: - import subprocess - except: - if os.system(' '.join(cmd)) != 0: - print('Documentation generation failed') - return False - else: - if is_silent: - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - else: - process = subprocess.Popen(cmd) - stdout, _ = process.communicate() - if process.returncode: - print('Documentation generation failed:') - print(stdout) - return False - return True - finally: - os.chdir(old_cwd) + run_cmd(cmd, is_silent) def build_doc(options, make_release=False): if make_release: @@ -113,7 +114,7 @@ def yesno(bool): os.makedirs(output_dir) do_subst_in_file('doc/doxyfile', 'doc/doxyfile.in', subst_keys) - ok = run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent) + run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent) if not options.silent: print(open(warning_log_path, 'rb').read()) index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) From bb0c80b3e5b7a38edbd12a09b5d2f458cd7a5792 Mon Sep 17 00:00:00 2001 From: Florian Meier Date: Sat, 24 Jan 2015 15:48:38 -0600 Subject: [PATCH 063/885] Doxybuild: Error message if doxygen not found This patch introduces a better error message. See discussion at pull #129. --- doxybuild.py | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/doxybuild.py b/doxybuild.py index 6f7b889bc..8d19f708f 100644 --- a/doxybuild.py +++ b/doxybuild.py @@ -4,6 +4,7 @@ from devtools import tarball from contextlib import contextmanager import subprocess +import traceback import re import os import sys @@ -52,24 +53,39 @@ def do_subst_in_file(targetfile, sourcefile, dict): def getstatusoutput(cmd): """cmd is a list. """ - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - output, _ = process.communicate() - status = process.returncode + try: + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output, _ = process.communicate() + status = process.returncode + except: + status = -1 + output = traceback.format_exc() return status, output def run_cmd(cmd, silent=False): - print('Running:', repr(' '.join(cmd)), 'in', repr(os.getcwd())) + """Raise exception on failure. + """ + info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd()) + print(info) sys.stdout.flush() if silent: status, output = getstatusoutput(cmd) else: status, output = os.system(' '.join(cmd)), '' if status: - msg = 'error=%d, output="""\n%s\n"""' %(status, output) - print(msg) - #raise Exception(msg) + msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output) + raise Exception(msg) + +def assert_is_exe(path): + if not path: + raise Exception('path is empty.') + if not os.path.isfile(path): + raise Exception('%r is not a file.' %path) + if not os.access(path, os.X_OK): + raise Exception('%r is not executable by this user.' %path) def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + assert_is_exe(doxygen_path) config_file = os.path.abspath(config_file) with cd(working_dir): cmd = [doxygen_path, config_file] From f3576888936b4a3bb14fa852d4315e0a2b8d6150 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 16:20:25 -0600 Subject: [PATCH 064/885] make doxybuild.py work with python3.4 --- devtools/tarball.py | 5 +++-- doxybuild.py | 9 +++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/devtools/tarball.py b/devtools/tarball.py index e461e8fed..b2030fbdb 100644 --- a/devtools/tarball.py +++ b/devtools/tarball.py @@ -1,5 +1,5 @@ from contextlib import closing -import os.path +import os import tarfile TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 @@ -34,7 +34,8 @@ def visit(tar, dirname, names): for source in sources: source_path = source if os.path.isdir(source): - os.path.walk(source_path, visit, tar) + for dirpath, dirnames, filenames in os.walk(source_path): + visit(tar, dirpath, filenames) else: path_in_tar = archive_name(source_path) tar.add(source_path, path_in_tar) # filename, arcname diff --git a/doxybuild.py b/doxybuild.py index 8d19f708f..62218505d 100644 --- a/doxybuild.py +++ b/doxybuild.py @@ -1,6 +1,7 @@ """Script to generate doxygen documentation. """ from __future__ import print_function +from __future__ import unicode_literals from devtools import tarball from contextlib import contextmanager import subprocess @@ -42,12 +43,12 @@ def do_subst_in_file(targetfile, sourcefile, dict): For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, then all instances of %VERSION% in the file will be replaced with 1.2345 etc. """ - with open(sourcefile, 'rb') as f: + with open(sourcefile, 'r') as f: contents = f.read() for (k,v) in list(dict.items()): v = v.replace('\\','\\\\') contents = re.sub(k, v, contents) - with open(targetfile, 'wb') as f: + with open(targetfile, 'w') as f: f.write(contents) def getstatusoutput(cmd): @@ -100,7 +101,7 @@ def build_doc(options, make_release=False): options.open = False options.silent = True - version = open('version','rt').read().strip() + version = open('version', 'rt').read().strip() output_dir = 'dist/doxygen' # relative to doc/doxyfile location. if not os.path.isdir(output_dir): os.makedirs(output_dir) @@ -132,7 +133,7 @@ def yesno(bool): do_subst_in_file('doc/doxyfile', 'doc/doxyfile.in', subst_keys) run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent) if not options.silent: - print(open(warning_log_path, 'rb').read()) + print(open(warning_log_path, 'r').read()) index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) print('Generated documentation can be found in:') print(index_path) From f4bc0bf4eccc92d2be75bf8568852f8635a45e43 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 15:43:23 -0600 Subject: [PATCH 065/885] README.md --- NEWS.txt | 2 +- SConstruct | 2 +- doxybuild.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/NEWS.txt b/NEWS.txt index 1be7b8ef8..5733fcd5e 100644 --- a/NEWS.txt +++ b/NEWS.txt @@ -80,7 +80,7 @@ New in SVN (e.g. MSVC 2008 command prompt in start menu) before running scons. - Added support for amalgamated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgamated source and header" + Refer to README.md section "Generating amalgamated source and header" for detail. * Value diff --git a/SConstruct b/SConstruct index 1c55bcd09..f3a73f773 100644 --- a/SConstruct +++ b/SConstruct @@ -237,7 +237,7 @@ RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) env.Alias( 'check' ) srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct + AUTHORS README.md SConstruct """.split() ) env.Alias( 'src-dist', srcdist_cmd ) diff --git a/doxybuild.py b/doxybuild.py index 0b61c397e..9c49df268 100644 --- a/doxybuild.py +++ b/doxybuild.py @@ -126,7 +126,7 @@ def yesno( bool ): print('Generating doc tarball to', tarball_path) tarball_sources = [ output_dir, - 'README.txt', + 'README.md', 'LICENSE', 'NEWS.txt', 'version' From dd91914b1b3d720190fcd3468c3a904780f7310a Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 10:34:49 -0600 Subject: [PATCH 066/885] TravisCI gcc-4.6 does not yet support -Wpedantic --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 96219e27e..d99849517 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -88,7 +88,7 @@ if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall") elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") # using GCC - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -Wpedantic") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -Wextra -pedantic") endif() IF(JSONCPP_WITH_WARNING_AS_ERROR) From 2f046b584d989e74918af8b1c20fd8a47af9784c Mon Sep 17 00:00:00 2001 From: datadiode Date: Sun, 25 Jan 2015 11:01:22 +0100 Subject: [PATCH 067/885] Fix a border case which causes Value::CommentInfo::setComment() to crash re: pull #140 --- src/lib_json/json_value.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 1a1c76335..150eff98b 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -142,8 +142,10 @@ Value::CommentInfo::~CommentInfo() { } void Value::CommentInfo::setComment(const char* text) { - if (comment_) + if (comment_) { releaseStringValue(comment_); + comment_ = 0; + } JSON_ASSERT(text != 0); JSON_ASSERT_MESSAGE( text[0] == '\0' || text[0] == '/', From 1e3149ab75245cfd02d06353f9c78487c230431b Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 14:16:13 -0600 Subject: [PATCH 068/885] rm trailing newlines for *all* comments This will make it easier to fix newlines consistently. --- include/json/value.h | 6 ++++-- src/lib_json/json_reader.cpp | 3 --- src/lib_json/json_value.cpp | 21 +++++++++++++++------ 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index b2746889c..efc34ac51 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -432,9 +432,11 @@ Json::Value obj_value(Json::objectValue); // {} // EnumValues enumValues() const; //# endif - /// Comments must be //... or /* ... */ + /// \deprecated Always pass len. void setComment(const char* comment, CommentPlacement placement); /// Comments must be //... or /* ... */ + void setComment(const char* comment, size_t len, CommentPlacement placement); + /// Comments must be //... or /* ... */ void setComment(const std::string& comment, CommentPlacement placement); bool hasComment(CommentPlacement placement) const; /// Include delimiters and embedded newlines. @@ -477,7 +479,7 @@ Json::Value obj_value(Json::objectValue); // {} CommentInfo(); ~CommentInfo(); - void setComment(const char* text); + void setComment(const char* text, size_t len); char* comment_; }; diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index 9e6d1616a..d2cff9a1c 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -135,9 +135,6 @@ bool Reader::readValue() { bool successful = true; if (collectComments_ && !commentsBefore_.empty()) { - // Remove newline at the end of the comment - if (commentsBefore_[commentsBefore_.size() - 1] == '\n') - commentsBefore_.resize(commentsBefore_.size() - 1); currentValue().setComment(commentsBefore_, commentBefore); commentsBefore_ = ""; } diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 150eff98b..ed5aafe03 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -141,7 +141,7 @@ Value::CommentInfo::~CommentInfo() { releaseStringValue(comment_); } -void Value::CommentInfo::setComment(const char* text) { +void Value::CommentInfo::setComment(const char* text, size_t len) { if (comment_) { releaseStringValue(comment_); comment_ = 0; @@ -151,7 +151,7 @@ void Value::CommentInfo::setComment(const char* text) { text[0] == '\0' || text[0] == '/', "in Json::Value::setComment(): Comments must start with /"); // It seems that /**/ style comments are acceptable as well. - comment_ = duplicateStringValue(text); + comment_ = duplicateStringValue(text, len); } // ////////////////////////////////////////////////////////////////// @@ -369,7 +369,8 @@ Value::Value(const Value& other) for (int comment = 0; comment < numberOfCommentPlacement; ++comment) { const CommentInfo& otherComment = other.comments_[comment]; if (otherComment.comment_) - comments_[comment].setComment(otherComment.comment_); + comments_[comment].setComment( + otherComment.comment_, strlen(otherComment.comment_)); } } } @@ -1227,14 +1228,22 @@ bool Value::isArray() const { return type_ == arrayValue; } bool Value::isObject() const { return type_ == objectValue; } -void Value::setComment(const char* comment, CommentPlacement placement) { +void Value::setComment(const char* comment, size_t len, CommentPlacement placement) { if (!comments_) comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment(comment); + if ((len > 0) && (comment[len-1] == '\n')) { + // Always discard trailing newline, to aid indentation. + len -= 1; + } + comments_[placement].setComment(comment, len); +} + +void Value::setComment(const char* comment, CommentPlacement placement) { + setComment(comment, strlen(comment), placement); } void Value::setComment(const std::string& comment, CommentPlacement placement) { - setComment(comment.c_str(), placement); + setComment(comment.c_str(), comment.length(), placement); } bool Value::hasComment(CommentPlacement placement) const { From 74c2d82e19864133c7ce2c251d311ad891aa794f Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 13:25:58 -0600 Subject: [PATCH 069/885] proper newlines for comments The logic is still messy, but it seems to work. --- src/lib_json/json_writer.cpp | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index d8a304632..f621a4f98 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -466,8 +466,10 @@ void StyledStreamWriter::write(std::ostream& out, const Value& root) { document_ = &out; addChildValues_ = false; indentString_ = ""; - indented_ = false; + indented_ = true; writeCommentBeforeValue(root); + if (!indented_) writeIndent(); + indented_ = true; writeValue(root); writeCommentAfterValueOnSameLine(root); *document_ << "\n"; @@ -631,32 +633,29 @@ void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { if (!root.hasComment(commentBefore)) return; - *document_ << "\n"; - writeIndent(); + if (!indented_) writeIndent(); const std::string& comment = root.getComment(commentBefore); std::string::const_iterator iter = comment.begin(); while (iter != comment.end()) { *document_ << *iter; if (*iter == '\n' && (iter != comment.end() && *(iter + 1) == '/')) - writeIndent(); + // writeIndent(); // would include newline + *document_ << indentString_; ++iter; } - - // Comments are stripped of trailing newlines, so add one here - *document_ << "\n"; indented_ = false; } void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { if (root.hasComment(commentAfterOnSameLine)) - *document_ << " " + root.getComment(commentAfterOnSameLine); + *document_ << root.getComment(commentAfterOnSameLine); if (root.hasComment(commentAfter)) { - *document_ << "\n"; + writeIndent(); *document_ << root.getComment(commentAfter); - *document_ << "\n"; } + indented_ = false; } bool StyledStreamWriter::hasCommentForValue(const Value& value) { From 964affd3338c7bd1935e422d4fa268d5346a8830 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 15:49:02 -0600 Subject: [PATCH 070/885] add back space before trailing comment --- src/lib_json/json_writer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index f621a4f98..f1e3b583c 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -649,7 +649,7 @@ void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { if (root.hasComment(commentAfterOnSameLine)) - *document_ << root.getComment(commentAfterOnSameLine); + *document_ << ' ' << root.getComment(commentAfterOnSameLine); if (root.hasComment(commentAfter)) { writeIndent(); From 948f29032e2ee379f1a457088a12750227aee013 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 12:19:30 -0600 Subject: [PATCH 071/885] update docs --- doc/header.html | 2 +- doc/jsoncpp.dox | 26 ++++++++++++++++++-------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/doc/header.html b/doc/header.html index 6bd2ad9e3..4b2a5e921 100644 --- a/doc/header.html +++ b/doc/header.html @@ -16,7 +16,7 @@ - JsonCpp home page + JsonCpp home page diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index a9ed47ec4..f1937194e 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -4,11 +4,21 @@ JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. Here is an example of JSON data: \verbatim +{ + "encoding" : "UTF-8", + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim +JsonCpp supports comments as meta-data: +\code // Configuration options { // Default encoding for text @@ -17,22 +27,22 @@ Here is an example of JSON data: // Plug-ins loaded at start-up "plug-ins" : [ "python", - "c++", + "c++", // trailing comment "ruby" ], // Tab indent size - "indent" : { "length" : 3, "use_space": true } + // (multi-line comment) + "indent" : { /*embedded comment*/ "length" : 3, "use_space": true } } -\endverbatim -jsoncpp supports comments as meta-data. +\endcode \section _features Features - read and write JSON document - attach C++ style comments to element during parsing - rewrite JSON document preserving original comments -Notes: Comments used to be supported in JSON but where removed for +Notes: Comments used to be supported in JSON but were removed for portability (C like comments are not supported in Python). Since comments are useful in configuration/input file, this feature was preserved. @@ -40,7 +50,7 @@ preserved. \section _example Code example \code -Json::Value root; // will contains the root value after parsing. +Json::Value root; // will contain the root value after parsing. Json::Reader reader; bool parsingSuccessful = reader.parse( config_doc, root ); if ( !parsingSuccessful ) From 5fbfe3cdb948a32706ad88635563a41a54819f83 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 22 Jan 2015 14:31:32 -0600 Subject: [PATCH 072/885] StreamWriter --- include/json/writer.h | 29 +++++++++++++++++++ src/lib_json/json_writer.cpp | 54 ++++++++++++++++++++++++++++++++++-- 2 files changed, 80 insertions(+), 3 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index cacb10e29..41984983e 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -23,6 +23,35 @@ namespace Json { class Value; +class JSON_API StreamWriter { +protected: + std::ostream& sout_; // not owned; will not delete +public: + StreamWriter(std::ostream* sout); + virtual ~StreamWriter(); + /// Write Value into document as configured in sub-class. + /// \return zero on success + /// \throw std::exception possibly, depending on configuration + virtual int write(Value const& root) const = 0; +}; + +class JSON_API StreamWriterBuilder { +public: + virtual ~StreamWriterBuilder(); + /// Do not delete stream (i.e. not owned), but keep a reference. + virtual StreamWriter* newStreamWriter(std::ostream* stream) const; +}; + +class JSON_API StreamWriterBuilderFactory { +public: + virtual ~StreamWriterBuilderFactory(); + virtual StreamWriterBuilder* newStreamWriterBuilder(); +}; + +/// \brief Write into stringstream, then return string, for convenience. +std::string writeString(Value const& root, StreamWriterBuilder const& builder); + + /** \brief Abstract class for writers. */ class JSON_API Writer { diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index f1e3b583c..3f53f9402 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -7,13 +7,14 @@ #include #include "json_tool.h" #endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include #include #include +#include #include #include -#include -#include -#include #if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below #include @@ -670,4 +671,51 @@ std::ostream& operator<<(std::ostream& sout, const Value& root) { return sout; } +StreamWriter::StreamWriter(std::ostream* sout) + : sout_(*sout) +{ +} +StreamWriter::~StreamWriter() +{ +} +struct MyStreamWriter : public StreamWriter { +public: + MyStreamWriter(std::ostream* sout); + virtual ~MyStreamWriter(); + virtual int write(Value const& root) const = 0; +}; +MyStreamWriter::MyStreamWriter(std::ostream* sout) + : StreamWriter(sout) +{ +} +MyStreamWriter::~MyStreamWriter() +{ +} +int MyStreamWriter::write(Value const& root) const +{ + sout_ << root; + return 0; +} +StreamWriterBuilder::~StreamWriterBuilder() +{ +} +StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const +{ + // return new StyledStreamWriter(stream); + return nullptr; +} +StreamWriterBuilderFactory::~StreamWriterBuilderFactory() +{ +} +StreamWriterBuilder* StreamWriterBuilderFactory::newStreamWriterBuilder() +{ + return new StreamWriterBuilder; +} +std::string writeString(Value const& root, StreamWriterBuilder const& builder) { + std::ostringstream sout; + std::unique_ptr const sw(builder.newStreamWriter(&sout)); + sw->write(root); + return sout.str(); +} + } // namespace Json From 489707ff602217cbfa9e6699078a54787c9dd73d Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 22 Jan 2015 15:25:30 -0600 Subject: [PATCH 073/885] StreamWriter::Builder --- include/json/writer.h | 45 +++++++++++++++++++++++++++--------- src/lib_json/json_writer.cpp | 30 +++++++++++++++++++++++- 2 files changed, 63 insertions(+), 12 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index 41984983e..d5306c9ea 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -22,30 +22,53 @@ namespace Json { class Value; +class StreamWriterBuilder; + +/** + +Usage: + + using namespace Json; + Value value; + StreamWriterBuilderFactory f; + StreamWriter::Builder builder(&f); + builder.setCommentStyle(StreamWriter::CommentStyle::None); + std::shared_ptr writer(builder.newStreamWriter(&std::cout)); + writer.write(value); +*/ +class JSON_API StreamWriterBuilderFactory { +public: + virtual ~StreamWriterBuilderFactory(); + virtual StreamWriterBuilder* newStreamWriterBuilder() const; +}; class JSON_API StreamWriter { protected: std::ostream& sout_; // not owned; will not delete public: + enum class CommentStyle {None, Some, All}; + StreamWriter(std::ostream* sout); virtual ~StreamWriter(); /// Write Value into document as configured in sub-class. /// \return zero on success /// \throw std::exception possibly, depending on configuration virtual int write(Value const& root) const = 0; -}; -class JSON_API StreamWriterBuilder { -public: - virtual ~StreamWriterBuilder(); - /// Do not delete stream (i.e. not owned), but keep a reference. - virtual StreamWriter* newStreamWriter(std::ostream* stream) const; -}; + /// Because this Builder is non-virtual, we can safely add + /// methods without a major version bump. + /// \see http://stackoverflow.com/questions/14875052/pure-virtual-functions-and-binary-compatibility + class Builder { + StreamWriterBuilder* own_; + public: + Builder(StreamWriterBuilderFactory const*); + ~Builder(); // delete underlying StreamWriterBuilder -class JSON_API StreamWriterBuilderFactory { -public: - virtual ~StreamWriterBuilderFactory(); - virtual StreamWriterBuilder* newStreamWriterBuilder(); + void setCommentStyle(CommentStyle cs); /// default: All + + /// Do not take ownership of sout, but maintain a reference. + StreamWriter* newStreamWriter(std::ostream* sout); + }; }; /// \brief Write into stringstream, then return string, for convenience. diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 3f53f9402..15fcf644d 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -696,9 +696,21 @@ int MyStreamWriter::write(Value const& root) const sout_ << root; return 0; } +class StreamWriterBuilder { + typedef StreamWriter::CommentStyle CommentStyle; + CommentStyle cs_; +public: + virtual ~StreamWriterBuilder(); + virtual void setCommentStyle(CommentStyle cs); + virtual StreamWriter* newStreamWriter(std::ostream* sout) const; +}; StreamWriterBuilder::~StreamWriterBuilder() { } +void StreamWriterBuilder::setCommentStyle(CommentStyle cs) +{ + cs_ = cs; +} StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { // return new StyledStreamWriter(stream); @@ -707,10 +719,26 @@ StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const StreamWriterBuilderFactory::~StreamWriterBuilderFactory() { } -StreamWriterBuilder* StreamWriterBuilderFactory::newStreamWriterBuilder() +StreamWriterBuilder* StreamWriterBuilderFactory::newStreamWriterBuilder() const { return new StreamWriterBuilder; } + +StreamWriter::Builder::Builder(StreamWriterBuilderFactory const* f) + : own_(f->newStreamWriterBuilder()) +{ +} +StreamWriter::Builder::~Builder() +{ + delete own_; +} +void StreamWriter::Builder::setCommentStyle(CommentStyle cs) +{ + own_->setCommentStyle(cs); +} + +/// Do not take ownership of sout, but maintain a reference. +StreamWriter* newStreamWriter(std::ostream* sout); std::string writeString(Value const& root, StreamWriterBuilder const& builder) { std::ostringstream sout; std::unique_ptr const sw(builder.newStreamWriter(&sout)); From 4d649402b02b6bac4b419949a5223a720c6e9956 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 22 Jan 2015 16:08:21 -0600 Subject: [PATCH 074/885] setIndentation() --- include/json/writer.h | 7 +++++++ src/lib_json/json_writer.cpp | 23 +++++++++++++++++++---- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index d5306c9ea..2aff64214 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -65,6 +65,13 @@ class JSON_API StreamWriter { ~Builder(); // delete underlying StreamWriterBuilder void setCommentStyle(CommentStyle cs); /// default: All + /** \brief Write in human-friendly style. + + If "", then skip all indentation, newlines, and comments, + which implies CommentStyle::None. + Default: "\t" + */ + void setIndentation(std::string indentation); /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout); diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 15fcf644d..14fef065c 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -699,17 +699,24 @@ int MyStreamWriter::write(Value const& root) const class StreamWriterBuilder { typedef StreamWriter::CommentStyle CommentStyle; CommentStyle cs_; + std::string indentation_; public: virtual ~StreamWriterBuilder(); virtual void setCommentStyle(CommentStyle cs); + virtual void setIndentation(std::string indentation); virtual StreamWriter* newStreamWriter(std::ostream* sout) const; }; StreamWriterBuilder::~StreamWriterBuilder() { } -void StreamWriterBuilder::setCommentStyle(CommentStyle cs) +void StreamWriterBuilder::setCommentStyle(CommentStyle v) { - cs_ = cs; + cs_ = v; +} +void StreamWriterBuilder::setIndentation(std::string v) +{ + indentation_ = v; + if (indentation_.empty()) cs_ = CommentStyle::None; } StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { @@ -732,9 +739,17 @@ StreamWriter::Builder::~Builder() { delete own_; } -void StreamWriter::Builder::setCommentStyle(CommentStyle cs) +void StreamWriter::Builder::setCommentStyle(CommentStyle v) +{ + own_->setCommentStyle(v); +} +void StreamWriter::Builder::setIndentation(std::string v) +{ + own_->setIndentation(v); +} +StreamWriter* StreamWriter::Builder::newStreamWriter(std::ostream* sout) { - own_->setCommentStyle(cs); + return own_->newStreamWriter(sout); } /// Do not take ownership of sout, but maintain a reference. From d49ab5aee194781ae343e7a701bc1b8f20f62b4a Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 22 Jan 2015 16:10:10 -0600 Subject: [PATCH 075/885] use new BuiltStyledStreamWriter in operator<<() --- src/lib_json/json_writer.cpp | 39 +++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 14fef065c..e69224cd3 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -665,12 +665,29 @@ bool StyledStreamWriter::hasCommentForValue(const Value& value) { value.hasComment(commentAfter); } -std::ostream& operator<<(std::ostream& sout, const Value& root) { - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} +struct BuiltStyledStreamWriter : public StreamWriter +{ + mutable StyledStreamWriter old_; + BuiltStyledStreamWriter( + std::ostream* sout, + std::string const& indentation, + StreamWriter::CommentStyle cs); + virtual int write(Value const& root) const; +}; +BuiltStyledStreamWriter::BuiltStyledStreamWriter( + std::ostream* sout, + std::string const& indentation, + StreamWriter::CommentStyle cs) + : StreamWriter(sout) + , old_(indentation) +{ +} +int BuiltStyledStreamWriter::write(Value const& root) const +{ + old_.write(sout_, root); + return 0; +} StreamWriter::StreamWriter(std::ostream* sout) : sout_(*sout) { @@ -720,8 +737,7 @@ void StreamWriterBuilder::setIndentation(std::string v) } StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { - // return new StyledStreamWriter(stream); - return nullptr; + return new BuiltStyledStreamWriter(stream, indentation_, cs_); } StreamWriterBuilderFactory::~StreamWriterBuilderFactory() { @@ -761,4 +777,13 @@ std::string writeString(Value const& root, StreamWriterBuilder const& builder) { return sout.str(); } +std::ostream& operator<<(std::ostream& sout, const Value& root) { + StreamWriterBuilderFactory f; + StreamWriter::Builder builder(&f); + builder.setCommentStyle(StreamWriter::CommentStyle::Some); + std::shared_ptr writer(builder.newStreamWriter(&sout)); + writer->write(root); + return sout; +} + } // namespace Json From 77ce057f14f0dbe19d4fa7b90ae38d4a5b934db1 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 07:11:06 -0600 Subject: [PATCH 076/885] fix comment --- include/json/writer.h | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index 2aff64214..bef756cf0 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -33,8 +33,10 @@ class StreamWriterBuilder; StreamWriterBuilderFactory f; StreamWriter::Builder builder(&f); builder.setCommentStyle(StreamWriter::CommentStyle::None); - std::shared_ptr writer(builder.newStreamWriter(&std::cout)); - writer.write(value); + std::shared_ptr writer( + builder.newStreamWriter(&std::cout)); + writer->write(value); + std::cout.flush(); */ class JSON_API StreamWriterBuilderFactory { public: From ceef7f52198ee524b0e3d7beb931821b5548d66a Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 07:33:01 -0600 Subject: [PATCH 077/885] copied impl of StyledStreamWriter --- src/lib_json/json_writer.cpp | 219 ++++++++++++++++++++++++++++++++++- 1 file changed, 215 insertions(+), 4 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index e69224cd3..8bc75ed12 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -667,27 +667,238 @@ bool StyledStreamWriter::hasCommentForValue(const Value& value) { struct BuiltStyledStreamWriter : public StreamWriter { - mutable StyledStreamWriter old_; - BuiltStyledStreamWriter( std::ostream* sout, std::string const& indentation, StreamWriter::CommentStyle cs); virtual int write(Value const& root) const; +private: + void writeValue(const Value& value); + void writeArrayValue(const Value& value); + bool isMultineArray(const Value& value); + void pushValue(const std::string& value); + void writeIndent(); + void writeWithIndent(const std::string& value); + void indent(); + void unindent(); + void writeCommentBeforeValue(const Value& root); + void writeCommentAfterValueOnSameLine(const Value& root); + bool hasCommentForValue(const Value& value); + static std::string normalizeEOL(const std::string& text); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; }; BuiltStyledStreamWriter::BuiltStyledStreamWriter( std::ostream* sout, std::string const& indentation, StreamWriter::CommentStyle cs) : StreamWriter(sout) - , old_(indentation) + , indentation_(indentation) + , rightMargin_(74) { } int BuiltStyledStreamWriter::write(Value const& root) const { - old_.write(sout_, root); + write(sout_, root); return 0; } +void BuiltStyledStreamWriter::write(std::ostream& out, const Value& root) { + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue(root); + writeValue(root); + writeCommentAfterValueOnSameLine(root); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + +void BuiltStyledStreamWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + pushValue("null"); + break; + case intValue: + pushValue(valueToString(value.asLargestInt())); + break; + case uintValue: + pushValue(valueToString(value.asLargestUInt())); + break; + case realValue: + pushValue(valueToString(value.asDouble())); + break; + case stringValue: + pushValue(valueToQuotedString(value.asCString())); + break; + case booleanValue: + pushValue(valueToString(value.asBool())); + break; + case arrayValue: + writeArrayValue(value); + break; + case objectValue: { + Value::Members members(value.getMemberNames()); + if (members.empty()) + pushValue("{}"); + else { + writeWithIndent("{"); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) { + const std::string& name = *it; + const Value& childValue = value[name]; + writeCommentBeforeValue(childValue); + writeWithIndent(valueToQuotedString(name.c_str())); + *document_ << " : "; + writeValue(childValue); + if (++it == members.end()) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("}"); + } + } break; + } +} + +void BuiltStyledStreamWriter::writeArrayValue(const Value& value) { + unsigned size = value.size(); + if (size == 0) + pushValue("[]"); + else { + bool isArrayMultiLine = isMultineArray(value); + if (isArrayMultiLine) { + writeWithIndent("["); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index = 0; + for (;;) { + const Value& childValue = value[index]; + writeCommentBeforeValue(childValue); + if (hasChildValue) + writeWithIndent(childValues_[index]); + else { + writeIndent(); + writeValue(childValue); + } + if (++index == size) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("]"); + } else // output on a single line + { + assert(childValues_.size() == size); + *document_ << "[ "; + for (unsigned index = 0; index < size; ++index) { + if (index > 0) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + +bool BuiltStyledStreamWriter::isMultineArray(const Value& value) { + int size = value.size(); + bool isMultiLine = size * 3 >= rightMargin_; + childValues_.clear(); + for (int index = 0; index < size && !isMultiLine; ++index) { + const Value& childValue = value[index]; + isMultiLine = + isMultiLine || ((childValue.isArray() || childValue.isObject()) && + childValue.size() > 0); + } + if (!isMultiLine) // check if line length > max line length + { + childValues_.reserve(size); + addChildValues_ = true; + int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' + for (int index = 0; index < size; ++index) { + writeValue(value[index]); + lineLength += int(childValues_[index].length()); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + +void BuiltStyledStreamWriter::pushValue(const std::string& value) { + if (addChildValues_) + childValues_.push_back(value); + else + *document_ << value; +} + +void BuiltStyledStreamWriter::writeIndent() { + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + +void BuiltStyledStreamWriter::writeWithIndent(const std::string& value) { + writeIndent(); + *document_ << value; +} + +void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; } + +void BuiltStyledStreamWriter::unindent() { + assert(indentString_.size() >= indentation_.size()); + indentString_.resize(indentString_.size() - indentation_.size()); +} + +void BuiltStyledStreamWriter::writeCommentBeforeValue(const Value& root) { + if (!root.hasComment(commentBefore)) + return; + *document_ << root.getComment(commentBefore); + *document_ << "\n"; +} + +void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { + if (root.hasComment(commentAfterOnSameLine)) + *document_ << " " + root.getComment(commentAfterOnSameLine); + + if (root.hasComment(commentAfter)) { + *document_ << "\n"; + *document_ << root.getComment(commentAfter); + *document_ << "\n"; + } +} + +bool BuiltStyledStreamWriter::hasCommentForValue(const Value& value) { + return value.hasComment(commentBefore) || + value.hasComment(commentAfterOnSameLine) || + value.hasComment(commentAfter); +} + StreamWriter::StreamWriter(std::ostream* sout) : sout_(*sout) { From beb6f35c63e3ffedc049b105f7a599913c9a0ffa Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 07:51:40 -0600 Subject: [PATCH 078/885] non-const write --- include/json/writer.h | 2 +- src/lib_json/json_writer.cpp | 101 +++++++++++++++++------------------ 2 files changed, 49 insertions(+), 54 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index bef756cf0..e4a761c40 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -55,7 +55,7 @@ class JSON_API StreamWriter { /// Write Value into document as configured in sub-class. /// \return zero on success /// \throw std::exception possibly, depending on configuration - virtual int write(Value const& root) const = 0; + virtual int write(Value const& root) = 0; /// Because this Builder is non-virtual, we can safely add /// methods without a major version bump. diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 8bc75ed12..05189992a 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -671,28 +671,28 @@ struct BuiltStyledStreamWriter : public StreamWriter std::ostream* sout, std::string const& indentation, StreamWriter::CommentStyle cs); - virtual int write(Value const& root) const; + virtual int write(Value const& root); private: - void writeValue(const Value& value); - void writeArrayValue(const Value& value); - bool isMultineArray(const Value& value); - void pushValue(const std::string& value); + void writeValue(Value const& value); + void writeArrayValue(Value const& value); + bool isMultineArray(Value const& value); + void pushValue(std::string const& value); void writeIndent(); - void writeWithIndent(const std::string& value); + void writeWithIndent(std::string const& value); void indent(); void unindent(); - void writeCommentBeforeValue(const Value& root); - void writeCommentAfterValueOnSameLine(const Value& root); + void writeCommentBeforeValue(Value const& root); + void writeCommentAfterValueOnSameLine(Value const& root); bool hasCommentForValue(const Value& value); - static std::string normalizeEOL(const std::string& text); + static std::string normalizeEOL(std::string const& text); typedef std::vector ChildValues; ChildValues childValues_; - std::ostream* document_; std::string indentString_; int rightMargin_; std::string indentation_; + CommentStyle cs_; bool addChildValues_; }; BuiltStyledStreamWriter::BuiltStyledStreamWriter( @@ -700,27 +700,22 @@ BuiltStyledStreamWriter::BuiltStyledStreamWriter( std::string const& indentation, StreamWriter::CommentStyle cs) : StreamWriter(sout) - , indentation_(indentation) , rightMargin_(74) + , indentation_(indentation) + , cs_(cs) { } -int BuiltStyledStreamWriter::write(Value const& root) const +int BuiltStyledStreamWriter::write(Value const& root) { - write(sout_, root); - return 0; -} -void BuiltStyledStreamWriter::write(std::ostream& out, const Value& root) { - document_ = &out; addChildValues_ = false; indentString_ = ""; writeCommentBeforeValue(root); writeValue(root); writeCommentAfterValueOnSameLine(root); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. + sout_ << "\n"; + return 0; } - -void BuiltStyledStreamWriter::writeValue(const Value& value) { +void BuiltStyledStreamWriter::writeValue(Value const& value) { switch (value.type()) { case nullValue: pushValue("null"); @@ -752,17 +747,17 @@ void BuiltStyledStreamWriter::writeValue(const Value& value) { indent(); Value::Members::iterator it = members.begin(); for (;;) { - const std::string& name = *it; - const Value& childValue = value[name]; + std::string const& name = *it; + Value const& childValue = value[name]; writeCommentBeforeValue(childValue); writeWithIndent(valueToQuotedString(name.c_str())); - *document_ << " : "; + sout_ << " : "; writeValue(childValue); if (++it == members.end()) { writeCommentAfterValueOnSameLine(childValue); break; } - *document_ << ","; + sout_ << ","; writeCommentAfterValueOnSameLine(childValue); } unindent(); @@ -772,7 +767,7 @@ void BuiltStyledStreamWriter::writeValue(const Value& value) { } } -void BuiltStyledStreamWriter::writeArrayValue(const Value& value) { +void BuiltStyledStreamWriter::writeArrayValue(Value const& value) { unsigned size = value.size(); if (size == 0) pushValue("[]"); @@ -784,7 +779,7 @@ void BuiltStyledStreamWriter::writeArrayValue(const Value& value) { bool hasChildValue = !childValues_.empty(); unsigned index = 0; for (;;) { - const Value& childValue = value[index]; + Value const& childValue = value[index]; writeCommentBeforeValue(childValue); if (hasChildValue) writeWithIndent(childValues_[index]); @@ -796,7 +791,7 @@ void BuiltStyledStreamWriter::writeArrayValue(const Value& value) { writeCommentAfterValueOnSameLine(childValue); break; } - *document_ << ","; + sout_ << ","; writeCommentAfterValueOnSameLine(childValue); } unindent(); @@ -804,23 +799,23 @@ void BuiltStyledStreamWriter::writeArrayValue(const Value& value) { } else // output on a single line { assert(childValues_.size() == size); - *document_ << "[ "; + sout_ << "[ "; for (unsigned index = 0; index < size; ++index) { if (index > 0) - *document_ << ", "; - *document_ << childValues_[index]; + sout_ << ", "; + sout_ << childValues_[index]; } - *document_ << " ]"; + sout_ << " ]"; } } } -bool BuiltStyledStreamWriter::isMultineArray(const Value& value) { +bool BuiltStyledStreamWriter::isMultineArray(Value const& value) { int size = value.size(); bool isMultiLine = size * 3 >= rightMargin_; childValues_.clear(); for (int index = 0; index < size && !isMultiLine; ++index) { - const Value& childValue = value[index]; + Value const& childValue = value[index]; isMultiLine = isMultiLine || ((childValue.isArray() || childValue.isObject()) && childValue.size() > 0); @@ -840,32 +835,32 @@ bool BuiltStyledStreamWriter::isMultineArray(const Value& value) { return isMultiLine; } -void BuiltStyledStreamWriter::pushValue(const std::string& value) { +void BuiltStyledStreamWriter::pushValue(std::string const& value) { if (addChildValues_) childValues_.push_back(value); else - *document_ << value; + sout_ << value; } void BuiltStyledStreamWriter::writeIndent() { /* Some comments in this method would have been nice. ;-) - if ( !document_.empty() ) + if ( !sout_.empty() ) { - char last = document_[document_.length()-1]; + char last = sout_[sout_.length()-1]; if ( last == ' ' ) // already indented return; if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; + sout_ << '\n'; } */ - *document_ << '\n' << indentString_; + sout_ << '\n' << indentString_; } -void BuiltStyledStreamWriter::writeWithIndent(const std::string& value) { +void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) { writeIndent(); - *document_ << value; + sout_ << value; } void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; } @@ -875,21 +870,21 @@ void BuiltStyledStreamWriter::unindent() { indentString_.resize(indentString_.size() - indentation_.size()); } -void BuiltStyledStreamWriter::writeCommentBeforeValue(const Value& root) { +void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) { if (!root.hasComment(commentBefore)) return; - *document_ << root.getComment(commentBefore); - *document_ << "\n"; + sout_ << root.getComment(commentBefore); + sout_ << "\n"; } -void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { +void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) { if (root.hasComment(commentAfterOnSameLine)) - *document_ << " " + root.getComment(commentAfterOnSameLine); + sout_ << " " + root.getComment(commentAfterOnSameLine); if (root.hasComment(commentAfter)) { - *document_ << "\n"; - *document_ << root.getComment(commentAfter); - *document_ << "\n"; + sout_ << "\n"; + sout_ << root.getComment(commentAfter); + sout_ << "\n"; } } @@ -910,7 +905,7 @@ struct MyStreamWriter : public StreamWriter { public: MyStreamWriter(std::ostream* sout); virtual ~MyStreamWriter(); - virtual int write(Value const& root) const = 0; + virtual int write(Value const& root) = 0; }; MyStreamWriter::MyStreamWriter(std::ostream* sout) : StreamWriter(sout) @@ -919,7 +914,7 @@ MyStreamWriter::MyStreamWriter(std::ostream* sout) MyStreamWriter::~MyStreamWriter() { } -int MyStreamWriter::write(Value const& root) const +int MyStreamWriter::write(Value const& root) { sout_ << root; return 0; @@ -988,7 +983,7 @@ std::string writeString(Value const& root, StreamWriterBuilder const& builder) { return sout.str(); } -std::ostream& operator<<(std::ostream& sout, const Value& root) { +std::ostream& operator<<(std::ostream& sout, Value const& root) { StreamWriterBuilderFactory f; StreamWriter::Builder builder(&f); builder.setCommentStyle(StreamWriter::CommentStyle::Some); From 9243d602fe88b8cfd173dfbd50dfdae2ed310b1a Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 08:38:32 -0600 Subject: [PATCH 079/885] const stuff --- include/json/writer.h | 4 ++-- src/jsontestrunner/main.cpp | 11 ++++++++++- src/lib_json/json_writer.cpp | 10 +++++----- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index e4a761c40..b2078495b 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -76,12 +76,12 @@ class JSON_API StreamWriter { void setIndentation(std::string indentation); /// Do not take ownership of sout, but maintain a reference. - StreamWriter* newStreamWriter(std::ostream* sout); + StreamWriter* newStreamWriter(std::ostream* sout) const; }; }; /// \brief Write into stringstream, then return string, for convenience. -std::string writeString(Value const& root, StreamWriterBuilder const& builder); +std::string writeString(Value const& root, StreamWriter::Builder const& builder); /** \brief Abstract class for writers. diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index f6f12b8a7..61411b46b 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -151,7 +151,6 @@ static int parseAndSaveValueTree(const std::string& input, reader.getFormattedErrorMessages().c_str()); return 1; } - if (!parseOnly) { FILE* factual = fopen(actual.c_str(), "wt"); if (!factual) { @@ -182,6 +181,14 @@ static std::string useStyledStreamWriter( writer.write(sout, root); return sout.str(); } +static std::string useBuiltStyledStreamWriter( + Json::Value const& root) +{ + Json::StreamWriterBuilderFactory f; + Json::StreamWriter::Builder builder(&f); + builder.setCommentStyle(Json::StreamWriter::CommentStyle::All); + return writeString(root, builder); +} static int rewriteValueTree( const std::string& rewritePath, const Json::Value& root, @@ -248,6 +255,8 @@ static int parseCommandLine( opts->write = &useStyledWriter; } else if (writerName == "StyledStreamWriter") { opts->write = &useStyledStreamWriter; + } else if (writerName == "BuiltStyledStreamWriter") { + opts->write = &useBuiltStyledStreamWriter; } else { printf("Unknown '--json-writer %s'\n", writerName.c_str()); return 4; diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 05189992a..27ff6e3f8 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -772,8 +772,8 @@ void BuiltStyledStreamWriter::writeArrayValue(Value const& value) { if (size == 0) pushValue("[]"); else { - bool isArrayMultiLine = isMultineArray(value); - if (isArrayMultiLine) { + bool isMultiLine = (cs_ == CommentStyle::All) || isMultineArray(value); + if (isMultiLine) { writeWithIndent("["); indent(); bool hasChildValue = !childValues_.empty(); @@ -969,14 +969,14 @@ void StreamWriter::Builder::setIndentation(std::string v) { own_->setIndentation(v); } -StreamWriter* StreamWriter::Builder::newStreamWriter(std::ostream* sout) +StreamWriter* StreamWriter::Builder::newStreamWriter(std::ostream* sout) const { return own_->newStreamWriter(sout); } /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout); -std::string writeString(Value const& root, StreamWriterBuilder const& builder) { +std::string writeString(Value const& root, StreamWriter::Builder const& builder) { std::ostringstream sout; std::unique_ptr const sw(builder.newStreamWriter(&sout)); sw->write(root); @@ -986,7 +986,7 @@ std::string writeString(Value const& root, StreamWriterBuilder const& builder) { std::ostream& operator<<(std::ostream& sout, Value const& root) { StreamWriterBuilderFactory f; StreamWriter::Builder builder(&f); - builder.setCommentStyle(StreamWriter::CommentStyle::Some); + builder.setCommentStyle(StreamWriter::CommentStyle::All); std::shared_ptr writer(builder.newStreamWriter(&sout)); writer->write(root); return sout; From 9e4bcf354f0a77128eeef278ba566f46a9c259cb Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 14:39:57 -0600 Subject: [PATCH 080/885] test BuiltStyledStreamWriter too --- test/runjsontests.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/test/runjsontests.py b/test/runjsontests.py index 728d41565..597bf2f06 100644 --- a/test/runjsontests.py +++ b/test/runjsontests.py @@ -147,16 +147,23 @@ def main(): else: input_path = None status = runAllTests(jsontest_executable_path, input_path, - use_valgrind=options.valgrind, - with_json_checker=options.with_json_checker, - writerClass='StyledWriter') + use_valgrind=options.valgrind, + with_json_checker=options.with_json_checker, + writerClass='StyledWriter') if status: sys.exit(status) status = runAllTests(jsontest_executable_path, input_path, - use_valgrind=options.valgrind, - with_json_checker=options.with_json_checker, - writerClass='StyledStreamWriter') - sys.exit(status) + use_valgrind=options.valgrind, + with_json_checker=options.with_json_checker, + writerClass='StyledStreamWriter') + if status: + sys.exit(status) + status = runAllTests(jsontest_executable_path, input_path, + use_valgrind=options.valgrind, + with_json_checker=options.with_json_checker, + writerClass='BuiltStyledStreamWriter') + if status: + sys.exit(status) if __name__ == '__main__': main() From 94665eab72c482c5622d8c9d89af62fc1e072f96 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Fri, 23 Jan 2015 14:49:17 -0600 Subject: [PATCH 081/885] copy fixes from StyledStreamWriter --- src/lib_json/json_writer.cpp | 58 +++++++++++++++++++++++++----------- 1 file changed, 40 insertions(+), 18 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 27ff6e3f8..b6127d34a 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -665,6 +665,9 @@ bool StyledStreamWriter::hasCommentForValue(const Value& value) { value.hasComment(commentAfter); } +////////////////////////// +// BuiltStyledStreamWriter + struct BuiltStyledStreamWriter : public StreamWriter { BuiltStyledStreamWriter( @@ -683,8 +686,7 @@ struct BuiltStyledStreamWriter : public StreamWriter void unindent(); void writeCommentBeforeValue(Value const& root); void writeCommentAfterValueOnSameLine(Value const& root); - bool hasCommentForValue(const Value& value); - static std::string normalizeEOL(std::string const& text); + static bool hasCommentForValue(const Value& value); typedef std::vector ChildValues; @@ -693,7 +695,8 @@ struct BuiltStyledStreamWriter : public StreamWriter int rightMargin_; std::string indentation_; CommentStyle cs_; - bool addChildValues_; + bool addChildValues_ : 1; + bool indented_ : 1; }; BuiltStyledStreamWriter::BuiltStyledStreamWriter( std::ostream* sout, @@ -703,11 +706,14 @@ BuiltStyledStreamWriter::BuiltStyledStreamWriter( , rightMargin_(74) , indentation_(indentation) , cs_(cs) + , addChildValues_(false) + , indented_(false) { } int BuiltStyledStreamWriter::write(Value const& root) { addChildValues_ = false; + indented_ = false; indentString_ = ""; writeCommentBeforeValue(root); writeValue(root); @@ -784,8 +790,10 @@ void BuiltStyledStreamWriter::writeArrayValue(Value const& value) { if (hasChildValue) writeWithIndent(childValues_[index]); else { - writeIndent(); + if (!indented_) writeIndent(); + indented_ = true; writeValue(childValue); + indented_ = false; } if (++index == size) { writeCommentAfterValueOnSameLine(childValue); @@ -826,6 +834,9 @@ bool BuiltStyledStreamWriter::isMultineArray(Value const& value) { addChildValues_ = true; int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' for (int index = 0; index < size; ++index) { + if (hasCommentForValue(value[index])) { + isMultiLine = true; + } writeValue(value[index]); lineLength += int(childValues_[index].length()); } @@ -843,24 +854,17 @@ void BuiltStyledStreamWriter::pushValue(std::string const& value) { } void BuiltStyledStreamWriter::writeIndent() { - /* - Some comments in this method would have been nice. ;-) - - if ( !sout_.empty() ) - { - char last = sout_[sout_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - sout_ << '\n'; - } - */ + // blep intended this to look at the so-far-written string + // to determine whether we are already indented, but + // with a stream we cannot do that. So we rely on some saved state. + // The caller checks indented_. sout_ << '\n' << indentString_; } void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) { - writeIndent(); + if (!indented_) writeIndent(); sout_ << value; + indented_ = false; } void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; } @@ -873,8 +877,22 @@ void BuiltStyledStreamWriter::unindent() { void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) { if (!root.hasComment(commentBefore)) return; - sout_ << root.getComment(commentBefore); + sout_ << "\n"; + writeIndent(); + const std::string& comment = root.getComment(commentBefore); + std::string::const_iterator iter = comment.begin(); + while (iter != comment.end()) { + sout_ << *iter; + if (*iter == '\n' && + (iter != comment.end() && *(iter + 1) == '/')) + writeIndent(); + ++iter; + } + + // Comments are stripped of trailing newlines, so add one here + sout_ << "\n"; + indented_ = false; } void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) { @@ -888,12 +906,16 @@ void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root } } +// static bool BuiltStyledStreamWriter::hasCommentForValue(const Value& value) { return value.hasComment(commentBefore) || value.hasComment(commentAfterOnSameLine) || value.hasComment(commentAfter); } +/////////////// +// StreamWriter + StreamWriter::StreamWriter(std::ostream* sout) : sout_(*sout) { From fe3979cd8a94c4a23994171a6e48c6905cef05b5 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 13:54:28 -0600 Subject: [PATCH 082/885] drop StreamWriterBuilderFactory, for now --- include/json/writer.h | 14 +++++--------- src/jsontestrunner/main.cpp | 3 +-- src/lib_json/json_writer.cpp | 19 +++++++++++++++---- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index b2078495b..6e46cf147 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -30,26 +30,20 @@ class StreamWriterBuilder; using namespace Json; Value value; - StreamWriterBuilderFactory f; - StreamWriter::Builder builder(&f); + StreamWriter::Builder builder; builder.setCommentStyle(StreamWriter::CommentStyle::None); std::shared_ptr writer( builder.newStreamWriter(&std::cout)); writer->write(value); std::cout.flush(); */ -class JSON_API StreamWriterBuilderFactory { -public: - virtual ~StreamWriterBuilderFactory(); - virtual StreamWriterBuilder* newStreamWriterBuilder() const; -}; - class JSON_API StreamWriter { protected: std::ostream& sout_; // not owned; will not delete public: enum class CommentStyle {None, Some, All}; + /// Keep a reference, but do not take ownership of `sout`. StreamWriter(std::ostream* sout); virtual ~StreamWriter(); /// Write Value into document as configured in sub-class. @@ -62,8 +56,10 @@ class JSON_API StreamWriter { /// \see http://stackoverflow.com/questions/14875052/pure-virtual-functions-and-binary-compatibility class Builder { StreamWriterBuilder* own_; + Builder(Builder const&); // noncopyable + void operator=(Builder const&); // noncopyable public: - Builder(StreamWriterBuilderFactory const*); + Builder(); ~Builder(); // delete underlying StreamWriterBuilder void setCommentStyle(CommentStyle cs); /// default: All diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 61411b46b..28894cef5 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -184,8 +184,7 @@ static std::string useStyledStreamWriter( static std::string useBuiltStyledStreamWriter( Json::Value const& root) { - Json::StreamWriterBuilderFactory f; - Json::StreamWriter::Builder builder(&f); + Json::StreamWriter::Builder builder; builder.setCommentStyle(Json::StreamWriter::CommentStyle::All); return writeString(root, builder); } diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index b6127d34a..a3fa50df2 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -967,6 +967,13 @@ StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { return new BuiltStyledStreamWriter(stream, indentation_, cs_); } + +// This might become public someday. +class StreamWriterBuilderFactory { +public: + virtual ~StreamWriterBuilderFactory(); + virtual StreamWriterBuilder* newStreamWriterBuilder() const; +}; StreamWriterBuilderFactory::~StreamWriterBuilderFactory() { } @@ -975,14 +982,19 @@ StreamWriterBuilder* StreamWriterBuilderFactory::newStreamWriterBuilder() const return new StreamWriterBuilder; } -StreamWriter::Builder::Builder(StreamWriterBuilderFactory const* f) - : own_(f->newStreamWriterBuilder()) +StreamWriter::Builder::Builder() + : own_(StreamWriterBuilderFactory().newStreamWriterBuilder()) { } StreamWriter::Builder::~Builder() { delete own_; } +StreamWriter::Builder::Builder(Builder const&) + : own_(nullptr) +{abort();} +void StreamWriter::Builder::operator=(Builder const&) +{abort();} void StreamWriter::Builder::setCommentStyle(CommentStyle v) { own_->setCommentStyle(v); @@ -1006,8 +1018,7 @@ std::string writeString(Value const& root, StreamWriter::Builder const& builder) } std::ostream& operator<<(std::ostream& sout, Value const& root) { - StreamWriterBuilderFactory f; - StreamWriter::Builder builder(&f); + StreamWriter::Builder builder; builder.setCommentStyle(StreamWriter::CommentStyle::All); std::shared_ptr writer(builder.newStreamWriter(&sout)); writer->write(root); From 648843d1482f895f3476f78859a5c10174d2ac40 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 24 Jan 2015 13:57:29 -0600 Subject: [PATCH 083/885] clarify CommentStyle --- include/json/writer.h | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/include/json/writer.h b/include/json/writer.h index 6e46cf147..e4d665e5b 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -41,7 +41,11 @@ class JSON_API StreamWriter { protected: std::ostream& sout_; // not owned; will not delete public: - enum class CommentStyle {None, Some, All}; + /// `All`: Keep all comments. + /// `None`: Drop all comments. + /// Use `Most` to recover the odd behavior of previous versions. + /// Only `All` is currently implemented. + enum class CommentStyle {None, Most, All}; /// Keep a reference, but do not take ownership of `sout`. StreamWriter(std::ostream* sout); From dea6f8d9a63f2219e77d57d86caf6f68dd2482c8 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 15:46:33 -0600 Subject: [PATCH 084/885] incorporate 'proper newlines for comments' into new StreamWriter --- src/lib_json/json_writer.cpp | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index a3fa50df2..1c9b7a419 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -713,9 +713,11 @@ BuiltStyledStreamWriter::BuiltStyledStreamWriter( int BuiltStyledStreamWriter::write(Value const& root) { addChildValues_ = false; - indented_ = false; + indented_ = true; indentString_ = ""; writeCommentBeforeValue(root); + if (!indented_) writeIndent(); + indented_ = true; writeValue(root); writeCommentAfterValueOnSameLine(root); sout_ << "\n"; @@ -878,20 +880,17 @@ void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) { if (!root.hasComment(commentBefore)) return; - sout_ << "\n"; - writeIndent(); + if (!indented_) writeIndent(); const std::string& comment = root.getComment(commentBefore); std::string::const_iterator iter = comment.begin(); while (iter != comment.end()) { sout_ << *iter; if (*iter == '\n' && (iter != comment.end() && *(iter + 1) == '/')) - writeIndent(); + // writeIndent(); // would write extra newline + sout_ << indentString_; ++iter; } - - // Comments are stripped of trailing newlines, so add one here - sout_ << "\n"; indented_ = false; } @@ -900,9 +899,8 @@ void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root sout_ << " " + root.getComment(commentAfterOnSameLine); if (root.hasComment(commentAfter)) { - sout_ << "\n"; + writeIndent(); sout_ << root.getComment(commentAfter); - sout_ << "\n"; } } From 1e21e63853f461d3f35df77c2f1aef26b648161e Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 16:01:59 -0600 Subject: [PATCH 085/885] default \t indentation, All comments --- src/lib_json/json_writer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 1c9b7a419..eea17b641 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -944,11 +944,17 @@ class StreamWriterBuilder { CommentStyle cs_; std::string indentation_; public: + StreamWriterBuilder(); virtual ~StreamWriterBuilder(); virtual void setCommentStyle(CommentStyle cs); virtual void setIndentation(std::string indentation); virtual StreamWriter* newStreamWriter(std::ostream* sout) const; }; +StreamWriterBuilder::StreamWriterBuilder() + : cs_(CommentStyle::All) + , indentation_("\t") +{ +} StreamWriterBuilder::~StreamWriterBuilder() { } From c6e0688e5a67c17ef03a0d1ee1ee434af72ad766 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 17:32:11 -0600 Subject: [PATCH 086/885] implement CommentStyle::None/indentation_=="" --- src/lib_json/json_writer.cpp | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index eea17b641..43166acbe 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -720,7 +720,9 @@ int BuiltStyledStreamWriter::write(Value const& root) indented_ = true; writeValue(root); writeCommentAfterValueOnSameLine(root); - sout_ << "\n"; + if (!indentation_.empty()) { + sout_ << "\n"; + } return 0; } void BuiltStyledStreamWriter::writeValue(Value const& value) { @@ -759,7 +761,9 @@ void BuiltStyledStreamWriter::writeValue(Value const& value) { Value const& childValue = value[name]; writeCommentBeforeValue(childValue); writeWithIndent(valueToQuotedString(name.c_str())); - sout_ << " : "; + if (!indentation_.empty()) sout_ << " "; + sout_ << ":"; + if (!indentation_.empty()) sout_ << " "; writeValue(childValue); if (++it == members.end()) { writeCommentAfterValueOnSameLine(childValue); @@ -809,13 +813,15 @@ void BuiltStyledStreamWriter::writeArrayValue(Value const& value) { } else // output on a single line { assert(childValues_.size() == size); - sout_ << "[ "; + sout_ << "["; + if (!indentation_.empty()) sout_ << " "; for (unsigned index = 0; index < size; ++index) { if (index > 0) sout_ << ", "; sout_ << childValues_[index]; } - sout_ << " ]"; + if (!indentation_.empty()) sout_ << " "; + sout_ << "]"; } } } @@ -860,7 +866,11 @@ void BuiltStyledStreamWriter::writeIndent() { // to determine whether we are already indented, but // with a stream we cannot do that. So we rely on some saved state. // The caller checks indented_. - sout_ << '\n' << indentString_; + + if (!indentation_.empty()) { + // In this case, drop newlines too. + sout_ << '\n' << indentString_; + } } void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) { @@ -877,6 +887,7 @@ void BuiltStyledStreamWriter::unindent() { } void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) { + if (cs_ == CommentStyle::None) return; if (!root.hasComment(commentBefore)) return; @@ -895,6 +906,7 @@ void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) { } void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) { + if (cs_ == CommentStyle::None) return; if (root.hasComment(commentAfterOnSameLine)) sout_ << " " + root.getComment(commentAfterOnSameLine); From d78caa3851766504d06bfd8ba7766863bcc59a38 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 18:15:54 -0600 Subject: [PATCH 087/885] implement strange setting from FastWriter --- include/json/writer.h | 18 ++++++++ src/lib_json/json_writer.cpp | 82 ++++++++++++++++++++++++++++++------ 2 files changed, 88 insertions(+), 12 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index e4d665e5b..763a94964 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -74,6 +74,24 @@ class JSON_API StreamWriter { Default: "\t" */ void setIndentation(std::string indentation); + /** \brief Drop the "null" string from the writer's output for nullValues. + * Strictly speaking, this is not valid JSON. But when the output is being + * fed to a browser's Javascript, it makes for smaller output and the + * browser can handle the output just fine. + */ + void setDropNullPlaceholders(bool v); + /** \brief Do not add \n at end of document. + * Normally, we add an extra newline, just because. + */ + void setOmitEndingLineFeed(bool v); + /** \brief Add a space after ':'. + * If indentation is non-empty, we surround colon with whitespace, + * e.g. " : " + * This will add back the trailing space when there is no indentation. + * This seems dubious when the entire document is on a single line, + * but we leave this here to repduce the behavior of the old `FastWriter`. + */ + void setEnableYAMLCompatibility(bool v); /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout) const; diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 43166acbe..bf103d2f5 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -673,7 +673,10 @@ struct BuiltStyledStreamWriter : public StreamWriter BuiltStyledStreamWriter( std::ostream* sout, std::string const& indentation, - StreamWriter::CommentStyle cs); + StreamWriter::CommentStyle cs, + std::string const& colonSymbol, + std::string const& nullSymbol, + std::string const& endingLineFeedSymbol); virtual int write(Value const& root); private: void writeValue(Value const& value); @@ -695,17 +698,26 @@ struct BuiltStyledStreamWriter : public StreamWriter int rightMargin_; std::string indentation_; CommentStyle cs_; + std::string colonSymbol_; + std::string nullSymbol_; + std::string endingLineFeedSymbol_; bool addChildValues_ : 1; bool indented_ : 1; }; BuiltStyledStreamWriter::BuiltStyledStreamWriter( std::ostream* sout, std::string const& indentation, - StreamWriter::CommentStyle cs) + StreamWriter::CommentStyle cs, + std::string const& colonSymbol, + std::string const& nullSymbol, + std::string const& endingLineFeedSymbol) : StreamWriter(sout) , rightMargin_(74) , indentation_(indentation) , cs_(cs) + , colonSymbol_(colonSymbol) + , nullSymbol_(nullSymbol) + , endingLineFeedSymbol_(endingLineFeedSymbol) , addChildValues_(false) , indented_(false) { @@ -720,15 +732,13 @@ int BuiltStyledStreamWriter::write(Value const& root) indented_ = true; writeValue(root); writeCommentAfterValueOnSameLine(root); - if (!indentation_.empty()) { - sout_ << "\n"; - } + sout_ << endingLineFeedSymbol_; return 0; } void BuiltStyledStreamWriter::writeValue(Value const& value) { switch (value.type()) { case nullValue: - pushValue("null"); + pushValue(nullSymbol_); break; case intValue: pushValue(valueToString(value.asLargestInt())); @@ -761,9 +771,7 @@ void BuiltStyledStreamWriter::writeValue(Value const& value) { Value const& childValue = value[name]; writeCommentBeforeValue(childValue); writeWithIndent(valueToQuotedString(name.c_str())); - if (!indentation_.empty()) sout_ << " "; - sout_ << ":"; - if (!indentation_.empty()) sout_ << " "; + sout_ << colonSymbol_; writeValue(childValue); if (++it == members.end()) { writeCommentAfterValueOnSameLine(childValue); @@ -955,16 +963,25 @@ class StreamWriterBuilder { typedef StreamWriter::CommentStyle CommentStyle; CommentStyle cs_; std::string indentation_; + bool dropNullPlaceholders_; + bool omitEndingLineFeed_; + bool enableYAMLCompatibility_; public: StreamWriterBuilder(); virtual ~StreamWriterBuilder(); virtual void setCommentStyle(CommentStyle cs); virtual void setIndentation(std::string indentation); + virtual void setDropNullPlaceholders(bool v); + virtual void setOmitEndingLineFeed(bool v); + virtual void setEnableYAMLCompatibility(bool v); virtual StreamWriter* newStreamWriter(std::ostream* sout) const; }; StreamWriterBuilder::StreamWriterBuilder() : cs_(CommentStyle::All) , indentation_("\t") + , dropNullPlaceholders_(false) + , omitEndingLineFeed_(false) + , enableYAMLCompatibility_(false) { } StreamWriterBuilder::~StreamWriterBuilder() @@ -979,9 +996,39 @@ void StreamWriterBuilder::setIndentation(std::string v) indentation_ = v; if (indentation_.empty()) cs_ = CommentStyle::None; } +void StreamWriterBuilder::setDropNullPlaceholders(bool v) +{ + dropNullPlaceholders_ = v; +} +void StreamWriterBuilder::setOmitEndingLineFeed(bool v) +{ + omitEndingLineFeed_ = v; +} +void StreamWriterBuilder::setEnableYAMLCompatibility(bool v) +{ + enableYAMLCompatibility_ = v; +} StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { - return new BuiltStyledStreamWriter(stream, indentation_, cs_); + std::string colonSymbol = " : "; + if (indentation_.empty()) { + if (enableYAMLCompatibility_) { + colonSymbol = ": "; + } else { + colonSymbol = ":"; + } + } + std::string nullSymbol = "null"; + if (dropNullPlaceholders_) { + nullSymbol = ""; + } + std::string endingLineFeedSymbol = "\n"; + if (omitEndingLineFeed_) { + endingLineFeedSymbol = ""; + } + return new BuiltStyledStreamWriter(stream, + indentation_, cs_, + colonSymbol, nullSymbol, endingLineFeedSymbol); } // This might become public someday. @@ -1019,13 +1066,23 @@ void StreamWriter::Builder::setIndentation(std::string v) { own_->setIndentation(v); } +void StreamWriter::Builder::setDropNullPlaceholders(bool v) +{ + own_->setDropNullPlaceholders(v); +} +void StreamWriter::Builder::setOmitEndingLineFeed(bool v) +{ + own_->setOmitEndingLineFeed(v); +} +void StreamWriter::Builder::setEnableYAMLCompatibility(bool v) +{ + own_->setEnableYAMLCompatibility(v); +} StreamWriter* StreamWriter::Builder::newStreamWriter(std::ostream* sout) const { return own_->newStreamWriter(sout); } -/// Do not take ownership of sout, but maintain a reference. -StreamWriter* newStreamWriter(std::ostream* sout); std::string writeString(Value const& root, StreamWriter::Builder const& builder) { std::ostringstream sout; std::unique_ptr const sw(builder.newStreamWriter(&sout)); @@ -1036,6 +1093,7 @@ std::string writeString(Value const& root, StreamWriter::Builder const& builder) std::ostream& operator<<(std::ostream& sout, Value const& root) { StreamWriter::Builder builder; builder.setCommentStyle(StreamWriter::CommentStyle::All); + builder.setIndentation("\t"); std::shared_ptr writer(builder.newStreamWriter(&sout)); writer->write(root); return sout; From c7b39c2e2536386a1badc46968e82374e85184ea Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 18:45:59 -0600 Subject: [PATCH 088/885] deprecate old Writers also, use withers instead of setters, and update docs --- doc/jsoncpp.dox | 23 +++++++++++++++-------- include/json/writer.h | 16 ++++++++++------ src/jsontestrunner/main.cpp | 2 +- src/lib_json/json_writer.cpp | 19 ++++++++++++------- 4 files changed, 38 insertions(+), 22 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index f1937194e..17d82d584 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -73,24 +73,31 @@ for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the seq setIndentLength( root["indent"].get("length", 3).asInt() ); setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); -// ... -// At application shutdown to make the new configuration document: // Since Json::Value has implicit constructor for all value types, it is not // necessary to explicitly construct the Json::Value object: root["encoding"] = getCurrentEncoding(); root["indent"]["length"] = getCurrentIndentLength(); root["indent"]["use_space"] = getCurrentIndentUseSpace(); -Json::StyledWriter writer; +// To write into a steam with minimal memory overhead, +// create a Builder for a StreamWriter. +Json::StreamWriter::Builder builder; +builder.withIndentation(" "); // or whatever you like + +// Then build a StreamWriter. +// (Of course, you can write to std::ostringstream if you prefer.) +std::shared_ptr writer( + builder.newStreamWriter( &std::cout ); + // Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); +writer->write( root ); + +// If you like the defaults, you can insert directly into a stream. +std::cout << root; -// You can also use streams. This will put the contents of any JSON +// You can also read from a stream. This will put the contents of any JSON // stream at a particular sub-value, if you'd like. std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; \endcode \section _pbuild Build instructions diff --git a/include/json/writer.h b/include/json/writer.h index 763a94964..fb824e335 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -31,7 +31,7 @@ class StreamWriterBuilder; using namespace Json; Value value; StreamWriter::Builder builder; - builder.setCommentStyle(StreamWriter::CommentStyle::None); + builder.withCommentStyle(StreamWriter::CommentStyle::None); std::shared_ptr writer( builder.newStreamWriter(&std::cout)); writer->write(value); @@ -66,24 +66,24 @@ class JSON_API StreamWriter { Builder(); ~Builder(); // delete underlying StreamWriterBuilder - void setCommentStyle(CommentStyle cs); /// default: All + Builder& withCommentStyle(CommentStyle cs); /// default: All /** \brief Write in human-friendly style. If "", then skip all indentation, newlines, and comments, which implies CommentStyle::None. Default: "\t" */ - void setIndentation(std::string indentation); + Builder& withIndentation(std::string indentation); /** \brief Drop the "null" string from the writer's output for nullValues. * Strictly speaking, this is not valid JSON. But when the output is being * fed to a browser's Javascript, it makes for smaller output and the * browser can handle the output just fine. */ - void setDropNullPlaceholders(bool v); + Builder& withDropNullPlaceholders(bool v); /** \brief Do not add \n at end of document. * Normally, we add an extra newline, just because. */ - void setOmitEndingLineFeed(bool v); + Builder& withOmitEndingLineFeed(bool v); /** \brief Add a space after ':'. * If indentation is non-empty, we surround colon with whitespace, * e.g. " : " @@ -91,7 +91,7 @@ class JSON_API StreamWriter { * This seems dubious when the entire document is on a single line, * but we leave this here to repduce the behavior of the old `FastWriter`. */ - void setEnableYAMLCompatibility(bool v); + Builder& withEnableYAMLCompatibility(bool v); /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout) const; @@ -103,6 +103,7 @@ std::string writeString(Value const& root, StreamWriter::Builder const& builder) /** \brief Abstract class for writers. + * \deprecated Use StreamWriter::Builder. */ class JSON_API Writer { public: @@ -118,6 +119,7 @@ class JSON_API Writer { *consumption, * but may be usefull to support feature such as RPC where bandwith is limited. * \sa Reader, Value + * \deprecated Use StreamWriter::Builder. */ class JSON_API FastWriter : public Writer { public: @@ -169,6 +171,7 @@ class JSON_API FastWriter : public Writer { *#CommentPlacement. * * \sa Reader, Value, Value::setComment() + * \deprecated Use StreamWriter::Builder. */ class JSON_API StyledWriter : public Writer { public: @@ -230,6 +233,7 @@ class JSON_API StyledWriter : public Writer { * * \param indentation Each level will be indented by this amount extra. * \sa Reader, Value, Value::setComment() + * \deprecated Use StreamWriter::Builder. */ class JSON_API StyledStreamWriter { public: diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 28894cef5..3a2229c55 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -185,7 +185,7 @@ static std::string useBuiltStyledStreamWriter( Json::Value const& root) { Json::StreamWriter::Builder builder; - builder.setCommentStyle(Json::StreamWriter::CommentStyle::All); + builder.withCommentStyle(Json::StreamWriter::CommentStyle::All); return writeString(root, builder); } static int rewriteValueTree( diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index bf103d2f5..7f542aab7 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -1058,25 +1058,30 @@ StreamWriter::Builder::Builder(Builder const&) {abort();} void StreamWriter::Builder::operator=(Builder const&) {abort();} -void StreamWriter::Builder::setCommentStyle(CommentStyle v) +StreamWriter::Builder& StreamWriter::Builder::withCommentStyle(CommentStyle v) { own_->setCommentStyle(v); + return *this; } -void StreamWriter::Builder::setIndentation(std::string v) +StreamWriter::Builder& StreamWriter::Builder::withIndentation(std::string v) { own_->setIndentation(v); + return *this; } -void StreamWriter::Builder::setDropNullPlaceholders(bool v) +StreamWriter::Builder& StreamWriter::Builder::withDropNullPlaceholders(bool v) { own_->setDropNullPlaceholders(v); + return *this; } -void StreamWriter::Builder::setOmitEndingLineFeed(bool v) +StreamWriter::Builder& StreamWriter::Builder::withOmitEndingLineFeed(bool v) { own_->setOmitEndingLineFeed(v); + return *this; } -void StreamWriter::Builder::setEnableYAMLCompatibility(bool v) +StreamWriter::Builder& StreamWriter::Builder::withEnableYAMLCompatibility(bool v) { own_->setEnableYAMLCompatibility(v); + return *this; } StreamWriter* StreamWriter::Builder::newStreamWriter(std::ostream* sout) const { @@ -1092,8 +1097,8 @@ std::string writeString(Value const& root, StreamWriter::Builder const& builder) std::ostream& operator<<(std::ostream& sout, Value const& root) { StreamWriter::Builder builder; - builder.setCommentStyle(StreamWriter::CommentStyle::All); - builder.setIndentation("\t"); + builder.withCommentStyle(StreamWriter::CommentStyle::All); + builder.withIndentation("\t"); std::shared_ptr writer(builder.newStreamWriter(&sout)); writer->write(root); return sout; From 9da9f84903e8e53a75c62097033c6a413b045a35 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 25 Jan 2015 19:20:43 -0600 Subject: [PATCH 089/885] improve docs including `writeString()` --- doc/jsoncpp.dox | 7 +++++-- include/json/writer.h | 14 ++++++++------ 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 17d82d584..2c19664ce 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -85,9 +85,8 @@ Json::StreamWriter::Builder builder; builder.withIndentation(" "); // or whatever you like // Then build a StreamWriter. -// (Of course, you can write to std::ostringstream if you prefer.) std::shared_ptr writer( - builder.newStreamWriter( &std::cout ); + builder.newStreamWriter( &std::cout ) ); // Make a new JSON document for the configuration. Preserve original comments. writer->write( root ); @@ -95,6 +94,10 @@ writer->write( root ); // If you like the defaults, you can insert directly into a stream. std::cout << root; +// Of course, you can write to `std::ostringstream` if you prefer. Or +// use `writeString()` for convenience. +std::string document = Json::writeString( root, builder ); + // You can also read from a stream. This will put the contents of any JSON // stream at a particular sub-value, if you'd like. std::cin >> root["subtree"]; diff --git a/include/json/writer.h b/include/json/writer.h index fb824e335..54079069e 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -27,7 +27,7 @@ class StreamWriterBuilder; /** Usage: - +\code using namespace Json; Value value; StreamWriter::Builder builder; @@ -36,16 +36,18 @@ class StreamWriterBuilder; builder.newStreamWriter(&std::cout)); writer->write(value); std::cout.flush(); +\endcode */ class JSON_API StreamWriter { protected: std::ostream& sout_; // not owned; will not delete public: - /// `All`: Keep all comments. - /// `None`: Drop all comments. - /// Use `Most` to recover the odd behavior of previous versions. - /// Only `All` is currently implemented. - enum class CommentStyle {None, Most, All}; + /// Decide whether to write comments. + enum class CommentStyle { + None, ///< Drop all comments. + Most, ///< Recover odd behavior of previous versions (not implemented yet). + All ///< Keep all comments. + }; /// Keep a reference, but do not take ownership of `sout`. StreamWriter(std::ostream* sout); From 177b7b8f229e16b1e5bbee1d03f818f57978f902 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 10:35:54 -0600 Subject: [PATCH 090/885] OldCompressingStreamWriterBuilder --- include/json/writer.h | 73 +++++++++++++++++++++++++++++++++++- src/lib_json/json_writer.cpp | 25 +++++++++++- 2 files changed, 96 insertions(+), 2 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index 54079069e..25bc812ca 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -11,6 +11,7 @@ #endif // if !defined(JSON_IS_AMALGAMATION) #include #include +#include // Disable warning C4251: : needs to have dll-interface to // be used by... @@ -60,7 +61,7 @@ class JSON_API StreamWriter { /// Because this Builder is non-virtual, we can safely add /// methods without a major version bump. /// \see http://stackoverflow.com/questions/14875052/pure-virtual-functions-and-binary-compatibility - class Builder { + class JSON_API Builder { StreamWriterBuilder* own_; Builder(Builder const&); // noncopyable void operator=(Builder const&); // noncopyable @@ -98,11 +99,81 @@ class JSON_API StreamWriter { /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout) const; }; + + /** \brief A simple abstract factory. + */ + class JSON_API Factory { + public: + virtual ~Factory(); + /* Because this is only a trivial API (the Factory pattern), we will + * never need to add virtual methods, so we do not need a concrete wrapper. + * This is better than the Builder above, but not everyone will agree. + */ + + /// Do not take ownership of sout, but maintain a reference. + virtual StreamWriter* newStreamWriter(std::ostream* sout) const = 0; + }; + + /** \brief Extensions of this are used to create a StreamWriter::Factory. + */ + class JSON_API FactoryFactory { + virtual ~FactoryFactory(); + virtual Factory* newFactory() const = 0; + /* This class will seem strange to some developers, but it actually + * simplifies our library maintenance. + */ + }; + }; /// \brief Write into stringstream, then return string, for convenience. std::string writeString(Value const& root, StreamWriter::Builder const& builder); +/** \brief Build a StreamWriter implementation. + * Comments are not written, and most whitespace is omitted. + * In addition, there are some special settings to allow compatibility + * with the old FastWriter. + * Usage: + * \code + * OldCompressingStreamWriterBuilder b; + * b.dropNullPlaceHolders_ = true; // etc. + * StreamWriter* w = b.newStreamWriter(&std::cout); + * w.write(value); + * delete w; + * \endcode + */ +class JSON_API OldCompressingStreamWriterBuilder +{ +public: + // Note: We cannot add data-members to this class without a major version bump. + // So these might as well be completely exposed. + + /** \brief Drop the "null" string from the writer's output for nullValues. + * Strictly speaking, this is not valid JSON. But when the output is being + * fed to a browser's Javascript, it makes for smaller output and the + * browser can handle the output just fine. + */ + bool dropNullPlaceholders_; + /** \brief Do not add \n at end of document. + * Normally, we add an extra newline, just because. + */ + bool omitEndingLineFeed_; + /** \brief Add a space after ':'. + * If indentation is non-empty, we surround colon with whitespace, + * e.g. " : " + * This will add back the trailing space when there is no indentation. + * This seems dubious when the entire document is on a single line, + * but we leave this here to repduce the behavior of the old `FastWriter`. + */ + bool enableYAMLCompatibility_; + + OldCompressingStreamWriterBuilder() + : dropNullPlaceholders_(false) + , omitEndingLineFeed_(false) + , enableYAMLCompatibility_(false) + {} + virtual StreamWriter* newStreamWriter(std::ostream*) const; +}; /** \brief Abstract class for writers. * \deprecated Use StreamWriter::Builder. diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 7f542aab7..a24b3f507 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -1083,11 +1083,34 @@ StreamWriter::Builder& StreamWriter::Builder::withEnableYAMLCompatibility(bool v own_->setEnableYAMLCompatibility(v); return *this; } -StreamWriter* StreamWriter::Builder::newStreamWriter(std::ostream* sout) const +StreamWriter* StreamWriter::Builder::newStreamWriter( + std::ostream* sout) const { return own_->newStreamWriter(sout); } +StreamWriter* OldCompressingStreamWriterBuilder::newStreamWriter( + std::ostream* stream) const +{ + std::string colonSymbol = " : "; + if (enableYAMLCompatibility_) { + colonSymbol = ": "; + } else { + colonSymbol = ":"; + } + std::string nullSymbol = "null"; + if (dropNullPlaceholders_) { + nullSymbol = ""; + } + std::string endingLineFeedSymbol = "\n"; + if (omitEndingLineFeed_) { + endingLineFeedSymbol = ""; + } + return new BuiltStyledStreamWriter(stream, + "", StreamWriter::CommentStyle::None, + colonSymbol, nullSymbol, endingLineFeedSymbol); +} + std::string writeString(Value const& root, StreamWriter::Builder const& builder) { std::ostringstream sout; std::unique_ptr const sw(builder.newStreamWriter(&sout)); From 28a20917b0a3e2cf5189cabdf71c9bd6b3363353 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 10:43:39 -0600 Subject: [PATCH 091/885] Move old FastWriter stuff out of new Builder --- doc/jsoncpp.dox | 3 +++ include/json/writer.h | 20 +-------------- src/lib_json/json_writer.cpp | 50 ++---------------------------------- 3 files changed, 6 insertions(+), 67 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 2c19664ce..2dcdbff8b 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -94,6 +94,9 @@ writer->write( root ); // If you like the defaults, you can insert directly into a stream. std::cout << root; +// If desired, remember to add a linefeed and flush. +std::cout << std::endl; + // Of course, you can write to `std::ostringstream` if you prefer. Or // use `writeString()` for convenience. std::string document = Json::writeString( root, builder ); diff --git a/include/json/writer.h b/include/json/writer.h index 25bc812ca..176876ef0 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -36,7 +36,7 @@ class StreamWriterBuilder; std::shared_ptr writer( builder.newStreamWriter(&std::cout)); writer->write(value); - std::cout.flush(); + std::cout << std::endl; // add lf and flush \endcode */ class JSON_API StreamWriter { @@ -77,24 +77,6 @@ class JSON_API StreamWriter { Default: "\t" */ Builder& withIndentation(std::string indentation); - /** \brief Drop the "null" string from the writer's output for nullValues. - * Strictly speaking, this is not valid JSON. But when the output is being - * fed to a browser's Javascript, it makes for smaller output and the - * browser can handle the output just fine. - */ - Builder& withDropNullPlaceholders(bool v); - /** \brief Do not add \n at end of document. - * Normally, we add an extra newline, just because. - */ - Builder& withOmitEndingLineFeed(bool v); - /** \brief Add a space after ':'. - * If indentation is non-empty, we surround colon with whitespace, - * e.g. " : " - * This will add back the trailing space when there is no indentation. - * This seems dubious when the entire document is on a single line, - * but we leave this here to repduce the behavior of the old `FastWriter`. - */ - Builder& withEnableYAMLCompatibility(bool v); /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout) const; diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index a24b3f507..11c564ba3 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -963,25 +963,16 @@ class StreamWriterBuilder { typedef StreamWriter::CommentStyle CommentStyle; CommentStyle cs_; std::string indentation_; - bool dropNullPlaceholders_; - bool omitEndingLineFeed_; - bool enableYAMLCompatibility_; public: StreamWriterBuilder(); virtual ~StreamWriterBuilder(); virtual void setCommentStyle(CommentStyle cs); virtual void setIndentation(std::string indentation); - virtual void setDropNullPlaceholders(bool v); - virtual void setOmitEndingLineFeed(bool v); - virtual void setEnableYAMLCompatibility(bool v); virtual StreamWriter* newStreamWriter(std::ostream* sout) const; }; StreamWriterBuilder::StreamWriterBuilder() : cs_(CommentStyle::All) , indentation_("\t") - , dropNullPlaceholders_(false) - , omitEndingLineFeed_(false) - , enableYAMLCompatibility_(false) { } StreamWriterBuilder::~StreamWriterBuilder() @@ -996,36 +987,14 @@ void StreamWriterBuilder::setIndentation(std::string v) indentation_ = v; if (indentation_.empty()) cs_ = CommentStyle::None; } -void StreamWriterBuilder::setDropNullPlaceholders(bool v) -{ - dropNullPlaceholders_ = v; -} -void StreamWriterBuilder::setOmitEndingLineFeed(bool v) -{ - omitEndingLineFeed_ = v; -} -void StreamWriterBuilder::setEnableYAMLCompatibility(bool v) -{ - enableYAMLCompatibility_ = v; -} StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { std::string colonSymbol = " : "; if (indentation_.empty()) { - if (enableYAMLCompatibility_) { - colonSymbol = ": "; - } else { - colonSymbol = ":"; - } + colonSymbol = ":"; } std::string nullSymbol = "null"; - if (dropNullPlaceholders_) { - nullSymbol = ""; - } - std::string endingLineFeedSymbol = "\n"; - if (omitEndingLineFeed_) { - endingLineFeedSymbol = ""; - } + std::string endingLineFeedSymbol = ""; return new BuiltStyledStreamWriter(stream, indentation_, cs_, colonSymbol, nullSymbol, endingLineFeedSymbol); @@ -1068,21 +1037,6 @@ StreamWriter::Builder& StreamWriter::Builder::withIndentation(std::string v) own_->setIndentation(v); return *this; } -StreamWriter::Builder& StreamWriter::Builder::withDropNullPlaceholders(bool v) -{ - own_->setDropNullPlaceholders(v); - return *this; -} -StreamWriter::Builder& StreamWriter::Builder::withOmitEndingLineFeed(bool v) -{ - own_->setOmitEndingLineFeed(v); - return *this; -} -StreamWriter::Builder& StreamWriter::Builder::withEnableYAMLCompatibility(bool v) -{ - own_->setEnableYAMLCompatibility(v); - return *this; -} StreamWriter* StreamWriter::Builder::newStreamWriter( std::ostream* sout) const { From 6065a1c1424fac66f9d6da25f2de58b19f2350b2 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 11:01:15 -0600 Subject: [PATCH 092/885] make StreamWriterBuilder concrete --- include/json/writer.h | 69 ++++++++++++++---------------------- src/jsontestrunner/main.cpp | 3 +- src/lib_json/json_writer.cpp | 68 ++++------------------------------- 3 files changed, 34 insertions(+), 106 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index 176876ef0..db71cd74a 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -58,58 +58,43 @@ class JSON_API StreamWriter { /// \throw std::exception possibly, depending on configuration virtual int write(Value const& root) = 0; - /// Because this Builder is non-virtual, we can safely add - /// methods without a major version bump. - /// \see http://stackoverflow.com/questions/14875052/pure-virtual-functions-and-binary-compatibility - class JSON_API Builder { - StreamWriterBuilder* own_; - Builder(Builder const&); // noncopyable - void operator=(Builder const&); // noncopyable - public: - Builder(); - ~Builder(); // delete underlying StreamWriterBuilder - - Builder& withCommentStyle(CommentStyle cs); /// default: All - /** \brief Write in human-friendly style. - - If "", then skip all indentation, newlines, and comments, - which implies CommentStyle::None. - Default: "\t" - */ - Builder& withIndentation(std::string indentation); - - /// Do not take ownership of sout, but maintain a reference. - StreamWriter* newStreamWriter(std::ostream* sout) const; - }; - /** \brief A simple abstract factory. */ class JSON_API Factory { public: virtual ~Factory(); - /* Because this is only a trivial API (the Factory pattern), we will - * never need to add virtual methods, so we do not need a concrete wrapper. - * This is better than the Builder above, but not everyone will agree. - */ - /// Do not take ownership of sout, but maintain a reference. virtual StreamWriter* newStreamWriter(std::ostream* sout) const = 0; - }; + }; // Factory +}; // StreamWriter + +/// \brief Write into stringstream, then return string, for convenience. +std::string writeString(Value const& root, StreamWriter::Factory const& factory); - /** \brief Extensions of this are used to create a StreamWriter::Factory. + +/** \brief Build a StreamWriter implementation. + */ +class JSON_API StreamWriterBuilder : public StreamWriter::Factory { + // typedef StreamWriter::CommentStyle CommentStyle; +public: + // Note: We cannot add data-members to this class without a major version bump. + // So these might as well be completely exposed. + + /** \brief How to write comments. + * Default: All */ - class JSON_API FactoryFactory { - virtual ~FactoryFactory(); - virtual Factory* newFactory() const = 0; - /* This class will seem strange to some developers, but it actually - * simplifies our library maintenance. - */ - }; + StreamWriter::CommentStyle cs_ = StreamWriter::CommentStyle::All; + /** \brief Write in human-friendly style. -}; + If "", then skip all indentation and newlines. + In that case, you probably want CommentStyle::None also. + Default: "\t" + */ + std::string indentation_ = "\t"; -/// \brief Write into stringstream, then return string, for convenience. -std::string writeString(Value const& root, StreamWriter::Builder const& builder); + /// Do not take ownership of sout, but maintain a reference. + StreamWriter* newStreamWriter(std::ostream* sout) const; +}; /** \brief Build a StreamWriter implementation. * Comments are not written, and most whitespace is omitted. @@ -124,7 +109,7 @@ std::string writeString(Value const& root, StreamWriter::Builder const& builder) * delete w; * \endcode */ -class JSON_API OldCompressingStreamWriterBuilder +class JSON_API OldCompressingStreamWriterBuilder : public StreamWriter::Factory { public: // Note: We cannot add data-members to this class without a major version bump. diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 3a2229c55..dba943bda 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -184,8 +184,7 @@ static std::string useStyledStreamWriter( static std::string useBuiltStyledStreamWriter( Json::Value const& root) { - Json::StreamWriter::Builder builder; - builder.withCommentStyle(Json::StreamWriter::CommentStyle::All); + Json::StreamWriterBuilder builder; return writeString(root, builder); } static int rewriteValueTree( diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 11c564ba3..b8d1ba894 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -959,34 +959,8 @@ int MyStreamWriter::write(Value const& root) sout_ << root; return 0; } -class StreamWriterBuilder { - typedef StreamWriter::CommentStyle CommentStyle; - CommentStyle cs_; - std::string indentation_; -public: - StreamWriterBuilder(); - virtual ~StreamWriterBuilder(); - virtual void setCommentStyle(CommentStyle cs); - virtual void setIndentation(std::string indentation); - virtual StreamWriter* newStreamWriter(std::ostream* sout) const; -}; -StreamWriterBuilder::StreamWriterBuilder() - : cs_(CommentStyle::All) - , indentation_("\t") -{ -} -StreamWriterBuilder::~StreamWriterBuilder() -{ -} -void StreamWriterBuilder::setCommentStyle(CommentStyle v) -{ - cs_ = v; -} -void StreamWriterBuilder::setIndentation(std::string v) -{ - indentation_ = v; - if (indentation_.empty()) cs_ = CommentStyle::None; -} +StreamWriter::Factory::~Factory() +{} StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { std::string colonSymbol = " : "; @@ -999,7 +973,7 @@ StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const indentation_, cs_, colonSymbol, nullSymbol, endingLineFeedSymbol); } - +/* // This might become public someday. class StreamWriterBuilderFactory { public: @@ -1013,35 +987,7 @@ StreamWriterBuilder* StreamWriterBuilderFactory::newStreamWriterBuilder() const { return new StreamWriterBuilder; } - -StreamWriter::Builder::Builder() - : own_(StreamWriterBuilderFactory().newStreamWriterBuilder()) -{ -} -StreamWriter::Builder::~Builder() -{ - delete own_; -} -StreamWriter::Builder::Builder(Builder const&) - : own_(nullptr) -{abort();} -void StreamWriter::Builder::operator=(Builder const&) -{abort();} -StreamWriter::Builder& StreamWriter::Builder::withCommentStyle(CommentStyle v) -{ - own_->setCommentStyle(v); - return *this; -} -StreamWriter::Builder& StreamWriter::Builder::withIndentation(std::string v) -{ - own_->setIndentation(v); - return *this; -} -StreamWriter* StreamWriter::Builder::newStreamWriter( - std::ostream* sout) const -{ - return own_->newStreamWriter(sout); -} +*/ StreamWriter* OldCompressingStreamWriterBuilder::newStreamWriter( std::ostream* stream) const @@ -1065,7 +1011,7 @@ StreamWriter* OldCompressingStreamWriterBuilder::newStreamWriter( colonSymbol, nullSymbol, endingLineFeedSymbol); } -std::string writeString(Value const& root, StreamWriter::Builder const& builder) { +std::string writeString(Value const& root, StreamWriter::Factory const& builder) { std::ostringstream sout; std::unique_ptr const sw(builder.newStreamWriter(&sout)); sw->write(root); @@ -1073,9 +1019,7 @@ std::string writeString(Value const& root, StreamWriter::Builder const& builder) } std::ostream& operator<<(std::ostream& sout, Value const& root) { - StreamWriter::Builder builder; - builder.withCommentStyle(StreamWriter::CommentStyle::All); - builder.withIndentation("\t"); + StreamWriterBuilder builder; std::shared_ptr writer(builder.newStreamWriter(&sout)); writer->write(root); return sout; From 472d29f57b1453802ac6ab59eedcbd421fac0264 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 11:04:03 -0600 Subject: [PATCH 093/885] fix doc --- doc/jsoncpp.dox | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 2dcdbff8b..fe06d507c 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -81,8 +81,8 @@ root["indent"]["use_space"] = getCurrentIndentUseSpace(); // To write into a steam with minimal memory overhead, // create a Builder for a StreamWriter. -Json::StreamWriter::Builder builder; -builder.withIndentation(" "); // or whatever you like +Json::StreamWriterBuilder builder; +builder.indentation_ = " "; // or whatever you like // Then build a StreamWriter. std::shared_ptr writer( From 999f5912f0f99666dddfcc336f031f24c1a0da2a Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 11:12:53 -0600 Subject: [PATCH 094/885] docs --- include/json/writer.h | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index db71cd74a..0dca690af 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -23,20 +23,18 @@ namespace Json { class Value; -class StreamWriterBuilder; /** Usage: \code using namespace Json; - Value value; - StreamWriter::Builder builder; - builder.withCommentStyle(StreamWriter::CommentStyle::None); - std::shared_ptr writer( - builder.newStreamWriter(&std::cout)); - writer->write(value); - std::cout << std::endl; // add lf and flush + void writeToStdout(StreamWriter::Builder const& builder, Value const& value) { + std::unique_ptr const writer( + builder.newStreamWriter(&std::cout)); + writer->write(value); + std::cout << std::endl; // add lf and flush + } \endcode */ class JSON_API StreamWriter { @@ -73,9 +71,20 @@ std::string writeString(Value const& root, StreamWriter::Factory const& factory) /** \brief Build a StreamWriter implementation. - */ + +Usage: +\code + using namespace Json; + Value value = ...; + StreamWriter::Builder builder; + builder.cs_ = StreamWriter::CommentStyle::None; + std::shared_ptr writer( + builder.newStreamWriter(&std::cout)); + writer->write(value); + std::cout << std::endl; // add lf and flush +\endcode +*/ class JSON_API StreamWriterBuilder : public StreamWriter::Factory { - // typedef StreamWriter::CommentStyle CommentStyle; public: // Note: We cannot add data-members to this class without a major version bump. // So these might as well be completely exposed. From 7eca3b4e88bce2787587ceb06d44181822221a09 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 11:17:42 -0600 Subject: [PATCH 095/885] gcc-4.6 (Travis CI) does not support --- include/json/writer.h | 6 ++++-- src/lib_json/json_writer.cpp | 4 ++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index 0dca690af..be8ea385a 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -92,14 +92,16 @@ class JSON_API StreamWriterBuilder : public StreamWriter::Factory { /** \brief How to write comments. * Default: All */ - StreamWriter::CommentStyle cs_ = StreamWriter::CommentStyle::All; + StreamWriter::CommentStyle cs_; /** \brief Write in human-friendly style. If "", then skip all indentation and newlines. In that case, you probably want CommentStyle::None also. Default: "\t" */ - std::string indentation_ = "\t"; + std::string indentation_; + + StreamWriterBuilder(); /// Do not take ownership of sout, but maintain a reference. StreamWriter* newStreamWriter(std::ostream* sout) const; diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index b8d1ba894..6e0a429bc 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -961,6 +961,10 @@ int MyStreamWriter::write(Value const& root) } StreamWriter::Factory::~Factory() {} +StreamWriterBuilder::StreamWriterBuilder() + : cs_(StreamWriter::CommentStyle::All) + , indentation_("\t") +{} StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { std::string colonSymbol = " : "; From 38042b3892c762c93a98be7c4c5c14f80d7ed46e Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 26 Jan 2015 11:23:31 -0600 Subject: [PATCH 096/885] docs --- doc/jsoncpp.dox | 1 + include/json/writer.h | 14 +++++++------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index fe06d507c..0581476c6 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -79,6 +79,7 @@ root["encoding"] = getCurrentEncoding(); root["indent"]["length"] = getCurrentIndentLength(); root["indent"]["use_space"] = getCurrentIndentUseSpace(); +// (NEW IN 1.4.0) // To write into a steam with minimal memory overhead, // create a Builder for a StreamWriter. Json::StreamWriterBuilder builder; diff --git a/include/json/writer.h b/include/json/writer.h index be8ea385a..c4fd6ed25 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -29,9 +29,9 @@ class Value; Usage: \code using namespace Json; - void writeToStdout(StreamWriter::Builder const& builder, Value const& value) { + void writeToStdout(StreamWriter::Factory const& factory, Value const& value) { std::unique_ptr const writer( - builder.newStreamWriter(&std::cout)); + factory.newStreamWriter(&std::cout)); writer->write(value); std::cout << std::endl; // add lf and flush } @@ -76,7 +76,7 @@ std::string writeString(Value const& root, StreamWriter::Factory const& factory) \code using namespace Json; Value value = ...; - StreamWriter::Builder builder; + StreamWriterBuilder builder; builder.cs_ = StreamWriter::CommentStyle::None; std::shared_ptr writer( builder.newStreamWriter(&std::cout)); @@ -154,7 +154,7 @@ class JSON_API OldCompressingStreamWriterBuilder : public StreamWriter::Factory }; /** \brief Abstract class for writers. - * \deprecated Use StreamWriter::Builder. + * \deprecated Use StreamWriter. */ class JSON_API Writer { public: @@ -170,7 +170,7 @@ class JSON_API Writer { *consumption, * but may be usefull to support feature such as RPC where bandwith is limited. * \sa Reader, Value - * \deprecated Use StreamWriter::Builder. + * \deprecated Use OldCompressingStreamWriterBuilder. */ class JSON_API FastWriter : public Writer { public: @@ -222,7 +222,7 @@ class JSON_API FastWriter : public Writer { *#CommentPlacement. * * \sa Reader, Value, Value::setComment() - * \deprecated Use StreamWriter::Builder. + * \deprecated Use StreamWriterBuilder. */ class JSON_API StyledWriter : public Writer { public: @@ -284,7 +284,7 @@ class JSON_API StyledWriter : public Writer { * * \param indentation Each level will be indented by this amount extra. * \sa Reader, Value, Value::setComment() - * \deprecated Use StreamWriter::Builder. + * \deprecated Use StreamWriterBuilder. */ class JSON_API StyledStreamWriter { public: From 5e8595c0e2861ad9c8b0b9e4ba9a8acc29fbfdb1 Mon Sep 17 00:00:00 2001 From: Peter Spiess-Knafl Date: Tue, 27 Jan 2015 20:01:42 +0000 Subject: [PATCH 097/885] added cmake option to build static and shared libraries at once See #147 and #149. --- README.md | 2 +- src/jsontestrunner/CMakeLists.txt | 8 +++- src/lib_json/CMakeLists.txt | 72 +++++++++++++++++++------------ src/test_lib_json/CMakeLists.txt | 7 ++- 4 files changed, 58 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 099f17fa2..cf8b3dbdd 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,7 @@ Alternatively, from the command-line on Unix in the source directory: mkdir -p build/debug cd build/debug - cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../.. + cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_STATIC=ON -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../.. make Running `cmake -`" will display the list of available generators (passed using diff --git a/src/jsontestrunner/CMakeLists.txt b/src/jsontestrunner/CMakeLists.txt index dd8e2175f..7faf0a5d8 100644 --- a/src/jsontestrunner/CMakeLists.txt +++ b/src/jsontestrunner/CMakeLists.txt @@ -7,7 +7,13 @@ ENDIF(JSONCPP_LIB_BUILD_SHARED) ADD_EXECUTABLE(jsontestrunner_exe main.cpp ) -TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib) + +IF(JSONCPP_LIB_BUILD_SHARED) + TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib) +ELSE(JSONCPP_LIB_BUILD_SHARED) + TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe) IF(PYTHONINTERP_FOUND) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index d0f6a5ea5..133d110f8 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -1,15 +1,10 @@ OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF) +OPTION(JSONCPP_LIB_BUILD_STATIC "Build jsoncpp_lib static library." ON) + IF(BUILD_SHARED_LIBS) SET(JSONCPP_LIB_BUILD_SHARED ON) ENDIF(BUILD_SHARED_LIBS) -IF(JSONCPP_LIB_BUILD_SHARED) - SET(JSONCPP_LIB_TYPE SHARED) - ADD_DEFINITIONS( -DJSON_DLL_BUILD ) -ELSE(JSONCPP_LIB_BUILD_SHARED) - SET(JSONCPP_LIB_TYPE STATIC) -ENDIF(JSONCPP_LIB_BUILD_SHARED) - if( CMAKE_COMPILER_IS_GNUCXX ) #Get compiler version. execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion @@ -36,25 +31,52 @@ SET( PUBLIC_HEADERS SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} ) -ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE} - ${PUBLIC_HEADERS} - json_tool.h - json_reader.cpp - json_batchallocator.h - json_valueiterator.inl - json_value.cpp - json_writer.cpp - version.h.in - ) -SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) -SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} ) +SET(jsoncpp_sources + json_tool.h + json_reader.cpp + json_batchallocator.h + json_valueiterator.inl + json_value.cpp + json_writer.cpp + version.h.in) + +IF(JSONCPP_LIB_BUILD_SHARED) + ADD_DEFINITIONS( -DJSON_DLL_BUILD ) + ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources}) + SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR}) + SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) + + INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT} + RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR} + LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR} + ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR}) + + IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC + $ + $) + ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) -IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) - TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC +ENDIF() + +IF(JSONCPP_LIB_BUILD_STATIC) + ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources}) + SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR}) + SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp ) + + INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT} + RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR} + LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR} + ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR}) + + IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib_static PUBLIC $ $ ) -ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) + +ENDIF() # Install instructions for this target IF(JSONCPP_WITH_CMAKE_PACKAGE) @@ -62,9 +84,3 @@ IF(JSONCPP_WITH_CMAKE_PACKAGE) ELSE(JSONCPP_WITH_CMAKE_PACKAGE) SET(INSTALL_EXPORT) ENDIF(JSONCPP_WITH_CMAKE_PACKAGE) - -INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT} - RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR} - LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR} - ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR} -) diff --git a/src/test_lib_json/CMakeLists.txt b/src/test_lib_json/CMakeLists.txt index 24dc56ff2..df5a92467 100644 --- a/src/test_lib_json/CMakeLists.txt +++ b/src/test_lib_json/CMakeLists.txt @@ -9,7 +9,12 @@ ADD_EXECUTABLE( jsoncpp_test main.cpp ) -TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib) + +IF(JSONCPP_LIB_BUILD_SHARED) + TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib) +ELSE(JSONCPP_LIB_BUILD_SHARED) + TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static) +ENDIF(JSONCPP_LIB_BUILD_SHARED) # another way to solve issue #90 #set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store) From 198cc350c57f59f9fb4e11f73eff1d2ccd984003 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 29 Jan 2015 12:57:02 -0600 Subject: [PATCH 098/885] drop scoped enum, for pre-C++11 compatibility --- include/json/writer.h | 15 +++++++++------ src/lib_json/json_writer.cpp | 6 +++--- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/include/json/writer.h b/include/json/writer.h index c4fd6ed25..4b8a89ccc 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -41,11 +41,14 @@ class JSON_API StreamWriter { protected: std::ostream& sout_; // not owned; will not delete public: - /// Decide whether to write comments. - enum class CommentStyle { - None, ///< Drop all comments. - Most, ///< Recover odd behavior of previous versions (not implemented yet). - All ///< Keep all comments. + /// Scoped enums are not available until C++11. + struct CommentStyle { + /// Decide whether to write comments. + enum Enum { + None, ///< Drop all comments. + Most, ///< Recover odd behavior of previous versions (not implemented yet). + All ///< Keep all comments. + }; }; /// Keep a reference, but do not take ownership of `sout`. @@ -92,7 +95,7 @@ class JSON_API StreamWriterBuilder : public StreamWriter::Factory { /** \brief How to write comments. * Default: All */ - StreamWriter::CommentStyle cs_; + StreamWriter::CommentStyle::Enum cs_; /** \brief Write in human-friendly style. If "", then skip all indentation and newlines. diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 6e0a429bc..6bb39010b 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -673,7 +673,7 @@ struct BuiltStyledStreamWriter : public StreamWriter BuiltStyledStreamWriter( std::ostream* sout, std::string const& indentation, - StreamWriter::CommentStyle cs, + StreamWriter::CommentStyle::Enum cs, std::string const& colonSymbol, std::string const& nullSymbol, std::string const& endingLineFeedSymbol); @@ -697,7 +697,7 @@ struct BuiltStyledStreamWriter : public StreamWriter std::string indentString_; int rightMargin_; std::string indentation_; - CommentStyle cs_; + CommentStyle::Enum cs_; std::string colonSymbol_; std::string nullSymbol_; std::string endingLineFeedSymbol_; @@ -707,7 +707,7 @@ struct BuiltStyledStreamWriter : public StreamWriter BuiltStyledStreamWriter::BuiltStyledStreamWriter( std::ostream* sout, std::string const& indentation, - StreamWriter::CommentStyle cs, + StreamWriter::CommentStyle::Enum cs, std::string const& colonSymbol, std::string const& nullSymbol, std::string const& endingLineFeedSymbol) From fe855fb4dd4bf61f75bc500d60f647814cdeb772 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 2 Feb 2015 15:32:33 -0600 Subject: [PATCH 099/885] drop nullptr See issue #153. --- src/test_lib_json/jsontest.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test_lib_json/jsontest.h b/src/test_lib_json/jsontest.h index 127b7c221..d6b7cf386 100644 --- a/src/test_lib_json/jsontest.h +++ b/src/test_lib_json/jsontest.h @@ -214,7 +214,7 @@ TestResult& checkStringEqual(TestResult& result, #define JSONTEST_ASSERT_PRED(expr) \ { \ JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr, nullptr, nullptr \ + result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL \ }; \ result_->predicateStackTail_->next_ = &_minitest_Context; \ result_->predicateId_ += 1; \ From 636121485c4a0798521e441d17e2c0e0c23df19b Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 7 Feb 2015 11:34:40 -0600 Subject: [PATCH 100/885] fix JSONCPP_WITH_CMAKE_PACKAGE #155 mv JSONCPP_WITH_CMAKE_PACKAGE ahead of INSTALL def. --- src/lib_json/CMakeLists.txt | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/lib_json/CMakeLists.txt b/src/lib_json/CMakeLists.txt index 133d110f8..856b164d6 100644 --- a/src/lib_json/CMakeLists.txt +++ b/src/lib_json/CMakeLists.txt @@ -40,6 +40,13 @@ SET(jsoncpp_sources json_writer.cpp version.h.in) +# Install instructions for this target +IF(JSONCPP_WITH_CMAKE_PACKAGE) + SET(INSTALL_EXPORT EXPORT jsoncpp) +ELSE(JSONCPP_WITH_CMAKE_PACKAGE) + SET(INSTALL_EXPORT) +ENDIF(JSONCPP_WITH_CMAKE_PACKAGE) + IF(JSONCPP_LIB_BUILD_SHARED) ADD_DEFINITIONS( -DJSON_DLL_BUILD ) ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources}) @@ -77,10 +84,3 @@ IF(JSONCPP_LIB_BUILD_STATIC) ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11) ENDIF() - -# Install instructions for this target -IF(JSONCPP_WITH_CMAKE_PACKAGE) - SET(INSTALL_EXPORT EXPORT jsoncpp) -ELSE(JSONCPP_WITH_CMAKE_PACKAGE) - SET(INSTALL_EXPORT) -ENDIF(JSONCPP_WITH_CMAKE_PACKAGE) From 41edda5ebea4fb5f929afdbe2918894cef964b63 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sat, 7 Feb 2015 12:18:20 -0600 Subject: [PATCH 101/885] JSONCPP_WITH_CMAKE_PACKAGE in Travis --- .travis.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index a913b0958..0509696d6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,12 +7,11 @@ language: cpp compiler: - gcc - clang -script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make +script: cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make env: matrix: - - SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false - - SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false - - SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true + - SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false + - SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE notifications: email: - aaronjjacobs@gmail.com From dee4602b8f0a322c4716c3e1508f1e20c760302f Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Sun, 8 Feb 2015 11:29:52 -0600 Subject: [PATCH 102/885] rm unique_ptr<>/shared_ptr<>, for pre-C++11 --- src/lib_json/json_writer.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 6bb39010b..2204a4c08 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -34,6 +34,12 @@ namespace Json { +#if __cplusplus >= 201103L +typedef std::unique_ptr StreamWriterPtr; +#else +typedef std::auto_ptr StreamWriterPtr; +#endif + static bool containsControlCharacter(const char* str) { while (*str) { if (isControlCharacter(*(str++))) @@ -1017,14 +1023,14 @@ StreamWriter* OldCompressingStreamWriterBuilder::newStreamWriter( std::string writeString(Value const& root, StreamWriter::Factory const& builder) { std::ostringstream sout; - std::unique_ptr const sw(builder.newStreamWriter(&sout)); + StreamWriterPtr const sw(builder.newStreamWriter(&sout)); sw->write(root); return sout.str(); } std::ostream& operator<<(std::ostream& sout, Value const& root) { StreamWriterBuilder builder; - std::shared_ptr writer(builder.newStreamWriter(&sout)); + StreamWriterPtr const writer(builder.newStreamWriter(&sout)); writer->write(root); return sout; } From 2c1197c2c89aa7b79352e7c34d57e9a9eb1a6803 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Thu, 29 Jan 2015 14:29:40 -0600 Subject: [PATCH 103/885] CharReader/Builder * CharReaderBuilder is similar to StreamWriterBuilder. * use rdbuf(), since getline(string) is not required to handle EOF as delimiter --- include/json/reader.h | 64 ++++++++++++++++++++++++- src/lib_json/json_reader.cpp | 62 ++++++++++++++++++++++-- src/test_lib_json/main.cpp | 92 ++++++++++++++++++++++++++++++++++++ 3 files changed, 214 insertions(+), 4 deletions(-) diff --git a/include/json/reader.h b/include/json/reader.h index bd2204be4..1111a7bd0 100644 --- a/include/json/reader.h +++ b/include/json/reader.h @@ -14,6 +14,7 @@ #include #include #include +#include // Disable warning C4251: : needs to have dll-interface to // be used by... @@ -78,7 +79,7 @@ class JSON_API Reader { document to read. * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. - \ Must be >= beginDoc. + * Must be >= beginDoc. * \param root [out] Contains the root value of the document if it was * successfully parsed. * \param collectComments \c true to collect comment and allow writing them @@ -238,8 +239,69 @@ class JSON_API Reader { std::string commentsBefore_; Features features_; bool collectComments_; +}; // Reader + +/** Interface for reading JSON from a char array. + */ +class JSON_API CharReader { +public: + virtual ~CharReader() {} + /** \brief Read a Value from a JSON + document. + * The document must be a UTF-8 encoded string containing the document to read. + * + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the + document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the + document to read. + * Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param errs [out] Formatted error messages (if not NULL) + * a user friendly string that lists errors in the parsed + * document. + * \return \c true if the document was successfully parsed, \c false if an + error occurred. + */ + virtual bool parse( + char const* beginDoc, char const* endDoc, + Value* root, std::string* errs) = 0; + + class Factory { + public: + /// \brief Allocate a CharReader via operator new(). + virtual CharReader* newCharReader() const = 0; + }; // Factory +}; // CharReader + +class CharReaderBuilder : public CharReader::Factory { + bool collectComments_; + Features features_; +public: + CharReaderBuilder(); + + CharReaderBuilder& withCollectComments(bool v) { + collectComments_ = v; + return *this; + } + + CharReaderBuilder& withFeatures(Features const& v) { + features_ = v; + return *this; + } + + virtual CharReader* newCharReader() const; }; +/** Consume entire stream and use its begin/end. + * Someday we might have a real StreamReader, but for now this + * is convenient. + */ +bool parseFromStream( + CharReader::Factory const&, + std::istream&, + Value* root, std::string* errs); + /** \brief Read from 'sin' into 'root'. Always keep comments from the input JSON. diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index d2cff9a1c..119c3dac8 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -14,6 +14,8 @@ #include #include #include +#include +#include #if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below #define snprintf _snprintf @@ -26,6 +28,12 @@ namespace Json { +#if __cplusplus >= 201103L +typedef std::unique_ptr CharReaderPtr; +#else +typedef std::auto_ptr CharReaderPtr; +#endif + // Implementation of class Features // //////////////////////////////// @@ -882,13 +890,61 @@ bool Reader::good() const { return !errors_.size(); } +class OldReader : public CharReader { + bool const collectComments_; + Reader reader_; +public: + OldReader( + bool collectComments, + Features const& features) + : collectComments_(collectComments) + , reader_(features) + {} + virtual bool parse( + char const* beginDoc, char const* endDoc, + Value* root, std::string* errs) { + bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_); + if (errs) { + *errs = reader_.getFormattedErrorMessages(); + } + return ok; + } +}; + +CharReaderBuilder::CharReaderBuilder() + : collectComments_(true) + , features_(Features::all()) +{} +CharReader* CharReaderBuilder::newCharReader() const +{ + return new OldReader(collectComments_, features_); +} + +////////////////////////////////// +// global functions + +bool parseFromStream( + CharReader::Factory const& fact, std::istream& sin, + Value* root, std::string* errs) +{ + std::ostringstream ssin; + ssin << sin.rdbuf(); + std::string doc = ssin.str(); + char const* begin = doc.data(); + char const* end = begin + doc.size(); + // Note that we do not actually need a null-terminator. + CharReaderPtr const reader(fact.newCharReader()); + return reader->parse(begin, end, root, errs); +} + std::istream& operator>>(std::istream& sin, Value& root) { - Json::Reader reader; - bool ok = reader.parse(sin, root, true); + CharReaderBuilder b; + std::string errs; + bool ok = parseFromStream(b, sin, &root, &errs); if (!ok) { fprintf(stderr, "Error from reader: %s", - reader.getFormattedErrorMessages().c_str()); + errs.c_str()); JSON_FAIL_MESSAGE("reader error"); } diff --git a/src/test_lib_json/main.cpp b/src/test_lib_json/main.cpp index 8af3e19ef..e57fbc3ca 100644 --- a/src/test_lib_json/main.cpp +++ b/src/test_lib_json/main.cpp @@ -7,6 +7,7 @@ #include #include #include +#include // Make numeric limits more convenient to talk about. // Assumes int type in 32 bits. @@ -1617,6 +1618,90 @@ JSONTEST_FIXTURE(ReaderTest, parseWithDetailError) { JSONTEST_ASSERT(errors.at(0).message == "Bad escape sequence in string"); } +struct CharReaderTest : JsonTest::TestCase {}; + +JSONTEST_FIXTURE(CharReaderTest, parseWithNoErrors) { + Json::CharReaderBuilder b; + Json::CharReader* reader(b.newCharReader()); + std::string errs; + Json::Value root; + char const doc[] = "{ \"property\" : \"value\" }"; + bool ok = reader->parse( + doc, doc + std::strlen(doc), + &root, &errs); + JSONTEST_ASSERT(ok); + JSONTEST_ASSERT(errs.size() == 0); + delete reader; +} + +JSONTEST_FIXTURE(CharReaderTest, parseWithNoErrorsTestingOffsets) { + Json::CharReaderBuilder b; + Json::CharReader* reader(b.newCharReader()); + std::string errs; + Json::Value root; + char const doc[] = + "{ \"property\" : [\"value\", \"value2\"], \"obj\" : " + "{ \"nested\" : 123, \"bool\" : true}, \"null\" : " + "null, \"false\" : false }"; + bool ok = reader->parse( + doc, doc + std::strlen(doc), + &root, &errs); + JSONTEST_ASSERT(ok); + JSONTEST_ASSERT(errs.size() == 0); + delete reader; +} + +JSONTEST_FIXTURE(CharReaderTest, parseWithOneError) { + Json::CharReaderBuilder b; + Json::CharReader* reader(b.newCharReader()); + std::string errs; + Json::Value root; + char const doc[] = + "{ \"property\" :: \"value\" }"; + bool ok = reader->parse( + doc, doc + std::strlen(doc), + &root, &errs); + JSONTEST_ASSERT(!ok); + JSONTEST_ASSERT(errs == + "* Line 1, Column 15\n Syntax error: value, object or array " + "expected.\n"); + delete reader; +} + +JSONTEST_FIXTURE(CharReaderTest, parseChineseWithOneError) { + Json::CharReaderBuilder b; + Json::CharReader* reader(b.newCharReader()); + std::string errs; + Json::Value root; + char const doc[] = + "{ \"pr佐藤erty\" :: \"value\" }"; + bool ok = reader->parse( + doc, doc + std::strlen(doc), + &root, &errs); + JSONTEST_ASSERT(!ok); + JSONTEST_ASSERT(errs == + "* Line 1, Column 19\n Syntax error: value, object or array " + "expected.\n"); + delete reader; +} + +JSONTEST_FIXTURE(CharReaderTest, parseWithDetailError) { + Json::CharReaderBuilder b; + Json::CharReader* reader(b.newCharReader()); + std::string errs; + Json::Value root; + char const doc[] = + "{ \"property\" : \"v\\alue\" }"; + bool ok = reader->parse( + doc, doc + std::strlen(doc), + &root, &errs); + JSONTEST_ASSERT(!ok); + JSONTEST_ASSERT(errs == + "* Line 1, Column 16\n Bad escape sequence in string\nSee " + "Line 1, Column 20 for detail.\n"); + delete reader; +} + int main(int argc, const char* argv[]) { JsonTest::Runner runner; JSONTEST_REGISTER_FIXTURE(runner, ValueTest, checkNormalizeFloatingPointStr); @@ -1647,6 +1732,13 @@ int main(int argc, const char* argv[]) { JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseChineseWithOneError); JSONTEST_REGISTER_FIXTURE(runner, ReaderTest, parseWithDetailError); + JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithNoErrors); + JSONTEST_REGISTER_FIXTURE( + runner, CharReaderTest, parseWithNoErrorsTestingOffsets); + JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithOneError); + JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseChineseWithOneError); + JSONTEST_REGISTER_FIXTURE(runner, CharReaderTest, parseWithDetailError); + JSONTEST_REGISTER_FIXTURE(runner, WriterTest, dropNullPlaceholders); return runner.runCommandLine(argc, argv); From 249fd181143c2d189a9653f811a682e092a0ceaa Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 9 Feb 2015 00:46:20 -0600 Subject: [PATCH 104/885] put version into docs --- doc/doxyfile.in | 2 +- doc/jsoncpp.dox | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/doxyfile.in b/doc/doxyfile.in index a4161865c..bb30c3522 100644 --- a/doc/doxyfile.in +++ b/doc/doxyfile.in @@ -819,7 +819,7 @@ EXCLUDE_SYMBOLS = # that contain example code fragments that are included (see the \include # command). -EXAMPLE_PATH = +EXAMPLE_PATH = .. # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 0581476c6..0954974da 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -140,4 +140,6 @@ Basically JsonCpp is licensed under MIT license, or public domain if desired and recognized in your jurisdiction. \author Baptiste Lepilleur (originator) +\version \include version +\sa version.h */ From 66a8ba255f2d53acafafe3de2cff26ea060447ef Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 9 Feb 2015 01:29:43 -0600 Subject: [PATCH 105/885] clarify Builders --- doc/jsoncpp.dox | 14 +++----------- include/json/reader.h | 28 +++++++++++++++++----------- include/json/writer.h | 3 ++- src/lib_json/json_reader.cpp | 2 ++ src/lib_json/json_writer.cpp | 2 ++ 5 files changed, 26 insertions(+), 23 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 0954974da..1b246eff0 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -50,16 +50,8 @@ preserved. \section _example Code example \code -Json::Value root; // will contain the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormattedErrorMessages(); - return; -} +Json::Value root; // 'root' will contain the root value after parsing. +std::cin >> root; // Or see CharReaderBuilder. // Get the value of the member of root named 'encoding', return 'UTF-8' if there is no // such member. @@ -80,7 +72,7 @@ root["indent"]["length"] = getCurrentIndentLength(); root["indent"]["use_space"] = getCurrentIndentUseSpace(); // (NEW IN 1.4.0) -// To write into a steam with minimal memory overhead, +// To write into a stream with minimal memory overhead, // create a Builder for a StreamWriter. Json::StreamWriterBuilder builder; builder.indentation_ = " "; // or whatever you like diff --git a/include/json/reader.h b/include/json/reader.h index 1111a7bd0..7dc97615b 100644 --- a/include/json/reader.h +++ b/include/json/reader.h @@ -274,21 +274,27 @@ class JSON_API CharReader { }; // Factory }; // CharReader +/** \brief Build a CharReader implementation. + +Usage: +\code + using namespace Json; + CharReaderBuilder builder; + builder.collectComments_ = true; + std::shared_ptr reader( + builder.newCharReader()); + Value value; + std::string errs; + bool ok = parseFromStream(std::cin, &value, &errs); +\endcode +*/ class CharReaderBuilder : public CharReader::Factory { +public: bool collectComments_; Features features_; -public: - CharReaderBuilder(); - - CharReaderBuilder& withCollectComments(bool v) { - collectComments_ = v; - return *this; - } - CharReaderBuilder& withFeatures(Features const& v) { - features_ = v; - return *this; - } + CharReaderBuilder(); + virtual ~CharReaderBuilder(); virtual CharReader* newCharReader() const; }; diff --git a/include/json/writer.h b/include/json/writer.h index 4b8a89ccc..871287f14 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -105,9 +105,10 @@ class JSON_API StreamWriterBuilder : public StreamWriter::Factory { std::string indentation_; StreamWriterBuilder(); + virtual ~StreamWriterBuilder(); /// Do not take ownership of sout, but maintain a reference. - StreamWriter* newStreamWriter(std::ostream* sout) const; + virtual StreamWriter* newStreamWriter(std::ostream* sout) const; }; /** \brief Build a StreamWriter implementation. diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index 119c3dac8..410e793af 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -915,6 +915,8 @@ CharReaderBuilder::CharReaderBuilder() : collectComments_(true) , features_(Features::all()) {} +CharReaderBuilder::~CharReaderBuilder() +{} CharReader* CharReaderBuilder::newCharReader() const { return new OldReader(collectComments_, features_); diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 2204a4c08..036bfe827 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -971,6 +971,8 @@ StreamWriterBuilder::StreamWriterBuilder() : cs_(StreamWriter::CommentStyle::All) , indentation_("\t") {} +StreamWriterBuilder::~StreamWriterBuilder() +{} StreamWriter* StreamWriterBuilder::newStreamWriter(std::ostream* stream) const { std::string colonSymbol = " : "; From 6451412c994a3ad721b010533a6a103751c30fb1 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 9 Feb 2015 09:44:26 -0600 Subject: [PATCH 106/885] simplify basic docs --- doc/jsoncpp.dox | 18 ++---------------- include/json/writer.h | 5 ++--- 2 files changed, 4 insertions(+), 19 deletions(-) diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 1b246eff0..5e3489715 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -71,21 +71,8 @@ root["encoding"] = getCurrentEncoding(); root["indent"]["length"] = getCurrentIndentLength(); root["indent"]["use_space"] = getCurrentIndentUseSpace(); -// (NEW IN 1.4.0) -// To write into a stream with minimal memory overhead, -// create a Builder for a StreamWriter. -Json::StreamWriterBuilder builder; -builder.indentation_ = " "; // or whatever you like - -// Then build a StreamWriter. -std::shared_ptr writer( - builder.newStreamWriter( &std::cout ) ); - -// Make a new JSON document for the configuration. Preserve original comments. -writer->write( root ); - // If you like the defaults, you can insert directly into a stream. -std::cout << root; +std::cout << root; // Or see StreamWriterBuilder. // If desired, remember to add a linefeed and flush. std::cout << std::endl; @@ -94,8 +81,7 @@ std::cout << std::endl; // use `writeString()` for convenience. std::string document = Json::writeString( root, builder ); -// You can also read from a stream. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. +// You can also read into a particular sub-value. std::cin >> root["subtree"]; \endcode diff --git a/include/json/writer.h b/include/json/writer.h index 871287f14..5dffbe079 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -81,8 +81,7 @@ std::string writeString(Value const& root, StreamWriter::Factory const& factory) Value value = ...; StreamWriterBuilder builder; builder.cs_ = StreamWriter::CommentStyle::None; - std::shared_ptr writer( - builder.newStreamWriter(&std::cout)); + builder.indentation_ = " "; // or whatever you like writer->write(value); std::cout << std::endl; // add lf and flush \endcode @@ -120,7 +119,7 @@ class JSON_API StreamWriterBuilder : public StreamWriter::Factory { * OldCompressingStreamWriterBuilder b; * b.dropNullPlaceHolders_ = true; // etc. * StreamWriter* w = b.newStreamWriter(&std::cout); - * w.write(value); + * w->write(value); * delete w; * \endcode */ From 3a65581b20af9c3ea286d8c1e473408ba15026de Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 9 Feb 2015 09:54:26 -0600 Subject: [PATCH 107/885] drop an old impl --- src/lib_json/json_writer.cpp | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index 036bfe827..44aa9ecaa 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -947,24 +947,6 @@ StreamWriter::StreamWriter(std::ostream* sout) StreamWriter::~StreamWriter() { } -struct MyStreamWriter : public StreamWriter { -public: - MyStreamWriter(std::ostream* sout); - virtual ~MyStreamWriter(); - virtual int write(Value const& root) = 0; -}; -MyStreamWriter::MyStreamWriter(std::ostream* sout) - : StreamWriter(sout) -{ -} -MyStreamWriter::~MyStreamWriter() -{ -} -int MyStreamWriter::write(Value const& root) -{ - sout_ << root; - return 0; -} StreamWriter::Factory::~Factory() {} StreamWriterBuilder::StreamWriterBuilder() From ce799b3aa3715dc7e0cf653f324a250fd4353916 Mon Sep 17 00:00:00 2001 From: Christopher Dunn Date: Mon, 9 Feb 2015 10:36:55 -0600 Subject: [PATCH 108/885] copy doxyfile.in --- doc/web_doxyfile.in | 2302 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 2302 insertions(+) create mode 100644 doc/web_doxyfile.in diff --git a/doc/web_doxyfile.in b/doc/web_doxyfile.in new file mode 100644 index 000000000..bb30c3522 --- /dev/null +++ b/doc/web_doxyfile.in @@ -0,0 +1,2302 @@ +# Doxyfile 1.8.5 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify an logo or icon that is included in +# the documentation. The maximum height of the logo should not exceed 55 pixels +# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo +# to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese- +# Traditional, Croatian, Czech, Danish, Dutch, English, Esperanto, Farsi, +# Finnish, French, German, Greek, Hungarian, Italian, Japanese, Japanese-en, +# Korean, Korean-en, Latvian, Norwegian, Macedonian, Persian, Polish, +# Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, Swedish, +# Turkish, Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a +# new page for each member. If set to NO, the documentation of a member will be +# part of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make +# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C +# (default is Fortran), use: inc=Fortran f=C. +# +# Note For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by by putting a % sign in front of the word +# or globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO these classes will be included in the various overviews. This option has +# no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = YES + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the +# todo list. This list is created by putting \todo commands in the +# documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the +# test list. This list is created by putting \test commands in the +# documentation. +# The default value is: YES. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES the list +# will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. Do not use file names with spaces, bibtex cannot handle them. See +# also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO doxygen will only warn about wrong or incomplete parameter +# documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. +# Note: If this tag is empty the current directory is searched. + +INPUT = ../include \ + ../src/lib_json \ + . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank the +# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, +# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, +# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, +# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, +# *.qsf, *.as and *.js. + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = .. + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER ) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES, then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = NO + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- +# defined cascading style sheet that is included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefor more robust against future updates. +# Doxygen will copy the style sheet file to the output directory. For an example +# see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the stylesheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to NO can help when comparing the output of multiple runs. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = YES + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = %HTML_HELP% + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler ( hhc.exe). If non-empty +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# The GENERATE_CHI flag controls if a separate .chi index file is generated ( +# YES) or that it should be included in the master .chm file ( NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = YES + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated ( +# YES) or a normal table of contents ( NO) in the .chm file. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using prerendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /