From 8ecf57e867ddc4de789ebce775300e65e3e2c0b8 Mon Sep 17 00:00:00 2001 From: L2501 Date: Sun, 16 Jul 2023 05:31:03 +0000 Subject: [PATCH] [script.module.simplejson] 3.19.1+matrix.1 --- script.module.simplejson/CHANGES.txt | 662 ---- script.module.simplejson/README.rst | 40 - script.module.simplejson/addon.xml | 18 +- .../lib/simplejson/__init__.py | 110 +- .../lib/simplejson/_speedups.c | 3384 ----------------- .../lib/simplejson/decoder.py | 80 +- .../lib/simplejson/encoder.py | 86 +- .../lib/simplejson/scanner.py | 6 +- .../lib/simplejson/tests/__init__.py | 74 - .../simplejson/tests/test_bigint_as_string.py | 67 - .../tests/test_bitsize_int_as_string.py | 73 - .../simplejson/tests/test_check_circular.py | 30 - .../lib/simplejson/tests/test_decimal.py | 71 - .../lib/simplejson/tests/test_decode.py | 119 - .../lib/simplejson/tests/test_default.py | 9 - .../lib/simplejson/tests/test_dump.py | 249 -- .../tests/test_encode_basestring_ascii.py | 47 - .../simplejson/tests/test_encode_for_html.py | 38 - .../lib/simplejson/tests/test_errors.py | 68 - .../lib/simplejson/tests/test_fail.py | 176 - .../lib/simplejson/tests/test_float.py | 35 - .../lib/simplejson/tests/test_for_json.py | 97 - .../lib/simplejson/tests/test_indent.py | 86 - .../simplejson/tests/test_item_sort_key.py | 27 - .../lib/simplejson/tests/test_iterable.py | 31 - .../lib/simplejson/tests/test_namedtuple.py | 122 - .../lib/simplejson/tests/test_pass1.py | 71 - .../lib/simplejson/tests/test_pass2.py | 14 - .../lib/simplejson/tests/test_pass3.py | 20 - .../lib/simplejson/tests/test_raw_json.py | 47 - .../lib/simplejson/tests/test_recursion.py | 67 - .../lib/simplejson/tests/test_scanstring.py | 196 - .../lib/simplejson/tests/test_separators.py | 42 - .../lib/simplejson/tests/test_speedups.py | 114 - .../lib/simplejson/tests/test_str_subclass.py | 21 - .../lib/simplejson/tests/test_subclass.py | 37 - .../lib/simplejson/tests/test_tool.py | 114 - .../lib/simplejson/tests/test_tuple.py | 47 - .../lib/simplejson/tests/test_unicode.py | 154 - .../lib/simplejson/tool.py | 42 - .../{ => resources}/icon.png | Bin 41 files changed, 153 insertions(+), 6638 deletions(-) delete mode 100644 script.module.simplejson/CHANGES.txt delete mode 100644 script.module.simplejson/README.rst delete mode 100644 script.module.simplejson/lib/simplejson/_speedups.c delete mode 100644 script.module.simplejson/lib/simplejson/tests/__init__.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_bigint_as_string.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_bitsize_int_as_string.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_check_circular.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_decimal.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_decode.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_default.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_dump.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_encode_basestring_ascii.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_encode_for_html.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_errors.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_fail.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_float.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_for_json.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_indent.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_item_sort_key.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_iterable.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_namedtuple.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_pass1.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_pass2.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_pass3.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_raw_json.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_recursion.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_scanstring.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_separators.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_speedups.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_str_subclass.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_subclass.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_tool.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_tuple.py delete mode 100644 script.module.simplejson/lib/simplejson/tests/test_unicode.py delete mode 100644 script.module.simplejson/lib/simplejson/tool.py rename script.module.simplejson/{ => resources}/icon.png (100%) diff --git a/script.module.simplejson/CHANGES.txt b/script.module.simplejson/CHANGES.txt deleted file mode 100644 index 40083a8aeb..0000000000 --- a/script.module.simplejson/CHANGES.txt +++ /dev/null @@ -1,662 +0,0 @@ -Version 3.16.1 released 2018-09-07 - -* Added examples for JSON lines use cases - https://github.com/simplejson/simplejson/pull/236 -* Add wheels for more Python versions and platforms - https://github.com/simplejson/simplejson/pull/234 - https://github.com/simplejson/simplejson/pull/233 - https://github.com/simplejson/simplejson/pull/231 - -Version 3.16.0 released 2018-06-28 - -* Restore old behavior with regard to the type of decoded empty - strings with speedups enabled on Python 2.x - https://github.com/simplejson/simplejson/pull/225 -* Add python_requires to setup.py to help pip - https://github.com/simplejson/simplejson/pull/224 -* Fix CSS in docs when built locally - https://github.com/simplejson/simplejson/pull/222 - -Version 3.15.0 released 2018-05-12 - -* Clean up the C code - https://github.com/simplejson/simplejson/pull/220 -* Bypass the decode() method in bytes subclasses - https://github.com/simplejson/simplejson/pull/219 -* Support builds without cStringIO - https://github.com/simplejson/simplejson/pull/217 -* Allow to disable serializing bytes by default in Python 3 - https://github.com/simplejson/simplejson/pull/216 -* Simplify the compatibility code - https://github.com/simplejson/simplejson/pull/215 -* Fix tests in Python 2.5 - https://github.com/simplejson/simplejson/pull/214 - -Version 3.14.0 released 2018-04-21 - -* Defer is_raw_json test (performance improvement) - https://github.com/simplejson/simplejson/pull/212 -* Avoid escaping U+2028 and U+2029 without ensure_ascii - https://github.com/simplejson/simplejson/pull/211 -* Fix an incorrect type test in Python 2, avoiding an unnecessary unicode copy. - https://github.com/simplejson/simplejson/pull/210 - -Version 3.13.2 released 2017-11-24 - -* Fix additional Python 2.x compilation issue on Windows - -Version 3.13.1 released 2017-11-24 - -* Improve CI to catch speedups build regressions -* Fix speedups build regression in Python 2.x - https://github.com/simplejson/simplejson/issues/193 - -Version 3.13.0 released 2017-11-23 - -* Workarounds for NamedTemporaryFile issues with Windows for tool tests -* Make TypeError messages contain type name instead of a repr. - https://github.com/simplejson/simplejson/pull/191 -* Ensure that encoding of text subtypes is consistent with or without speedups - https://github.com/simplejson/simplejson/issues/185 - -Version 3.12.1 released 2017-11-23 - -* Misc updates to build infrastructure -* Fix an assertion failure when make_encoder receives a bad encoder argument - https://github.com/simplejson/simplejson/pull/188 -* Fix potential crash during GC - https://github.com/simplejson/simplejson/pull/187 -* Fix a reference leak when sorting keys - https://github.com/simplejson/simplejson/pull/186 - -Version 3.12.0 released 2017-11-05 - -* Fix threaded import race condition - https://github.com/simplejson/simplejson/issues/184 -* Move RawJSON implementation to simplejson.raw_json module -* Move JSONDecodeError implementation to simplejson.errors module - -Version 3.11.1 released 2017-06-19 - -* Fix issue with item_sort_key when speedups are available, and add - auto-discovery to test suites to prevent similar regressions - https://github.com/simplejson/simplejson/issues/173 - -Version 3.11.0 released 2017-06-18 - -* docstring fix in JSONEncoder - https://github.com/simplejson/simplejson/pull/172 -* Call PyObject_IsTrue() only once for the strict argument of scanner - https://github.com/simplejson/simplejson/pull/170 -* Fix a crash with unencodable encoding in the encoder - https://github.com/simplejson/simplejson/pull/171 -* Remove unused imports - https://github.com/simplejson/simplejson/pull/162 -* Remove remnants of Python 2.4 support - https://github.com/simplejson/simplejson/pull/168 -* Fix argument checking errors in _speedups.c - https://github.com/simplejson/simplejson/pull/169 -* Remove the `__init__` methods in extension classes - https://github.com/simplejson/simplejson/pull/166 -* Fix typo in the doc for loads - https://github.com/simplejson/simplejson/issues/161 -* Add Python 3.6 to testing matrix and PyPI metadata - https://github.com/simplejson/simplejson/pull/153 - https://github.com/simplejson/simplejson/pull/152 - -Version 3.10.0 released 2016-10-28 - -* Add RawJSON class to allow a faster path for already encoded JSON. - https://github.com/simplejson/simplejson/pull/143 - -Version 3.9.0 released 2016-10-21 - -* Workaround for bad behavior in string subclasses - https://github.com/simplejson/simplejson/issues/144 -* Fix warnings flagged by -3 - https://github.com/simplejson/simplejson/pull/146 -* Update readthedocs documentation links - https://github.com/simplejson/simplejson/pull/137 -* Add build status badge to README - https://github.com/simplejson/simplejson/pull/134 - -Version 3.8.2 released 2016-02-14 - -* Fix implicit cast compiler warning in _speedups.c -* simplejson is now available as wheels for OS X and Windows thanks to Travis-CI - and AppVeyor respectively! Many thanks to @aebrahim for getting this party - started. - https://github.com/simplejson/simplejson/pull/130 - https://github.com/simplejson/simplejson/issues/122 - -Version 3.8.1 released 2015-10-27 - -* Fix issue with iterable_as_array and indent option - https://github.com/simplejson/simplejson/issues/128 -* Fix typo in keyword argument name introduced in 3.8.0 - https://github.com/simplejson/simplejson/pull/123 - -Version 3.8.0 released 2015-07-18 - -* New iterable_as_array encoder option to perform lazy serialization of - any iterable objects, without having to convert to tuple or list. - -Version 3.7.3 released 2015-05-31 - -* Fix typo introduced in 3.7.0 (behavior should be indistinguishable) - https://github.com/simplejson/simplejson/commit/e18cc09b688ea1f3305c27616fd3cadd2adc6d31#commitcomment-11443842 - -Version 3.7.2 released 2015-05-22 - -* Do not cache Decimal class in encoder, only reference the decimal module. - This may make reload work in more common scenarios. - -Version 3.7.1 released 2015-05-18 - -* Fix compilation with MSVC - https://github.com/simplejson/simplejson/pull/119 - -Version 3.7.0 released 2015-05-18 - -* simplejson no longer trusts custom str/repr methods for int, long, float - subclasses. These instances are now formatted as if they were exact - instances of those types. - https://github.com/simplejson/simplejson/issues/118 - -Version 3.6.5 released 2014-10-24 - -* Importing bug fix for reference leak when an error occurs during - dict encoding - https://github.com/simplejson/simplejson/issues/109 - -Version 3.6.4 released 2014-09-29 - -* Important bug fix for dump when only sort_keys is set - https://github.com/simplejson/simplejson/issues/106 - -Version 3.6.3 released 2014-08-18 - -* Documentation updates - https://github.com/simplejson/simplejson/issues/103 - -Version 3.6.2 released 2014-08-09 - -* Documentation updates - http://bugs.python.org/issue21514 - -Version 3.6.1 released 2014-08-09 - -* Documentation updates - https://github.com/simplejson/simplejson/issues/102 - -Version 3.6.0 released 2014-07-21 - -* Automatically strip any UTF-8 BOM from input to more closely - follow the latest specs - https://github.com/simplejson/simplejson/pull/101 - -Version 3.5.3 released 2014-06-24 - -* Fix lower bound checking in scan_once / raw_decode API - https://github.com/simplejson/simplejson/issues/98 - -Version 3.5.2 released 2014-05-22 - -* Fix Windows build with VS2008 - https://github.com/simplejson/simplejson/pull/97 - -Version 3.5.1 released 2014-05-21 - -* Consistently reject int_as_string_bitcount settings that are not - positive integers - -Version 3.5.0 released 2014-05-20 - -* Added int_as_string_bitcount encoder option - https://github.com/simplejson/pull/96 -* Fixed potential crash when encoder created with incorrect options - -Version 3.4.1 released 2014-04-30 - -* Fixed tests to run on Python 3.4 - -Version 3.4.0 released 2014-04-02 - -* Native setuptools support re-introduced - https://github.com/simplejson/simplejson/pull/92 - -Version 3.3.3 released 2014-02-14 - -* Improve test suite's Python 3.4 compatibility - https://github.com/simplejson/simplejson/issues/87 - -Version 3.3.2 released 2014-01-06 - -* Docstring fix for decoded string types - https://github.com/simplejson/simplejson/pull/82 - -Version 3.3.1 released 2013-10-05 - -* JSONDecodeError exceptions can now be pickled - https://github.com/simplejson/simplejson/pull/78 - -Version 3.3.0 released 2013-05-07 - -* Unpaired surrogates once again pass through the decoder, to match older - behavior and the RFC-4627 spec. - https://github.com/simplejson/simplejson/issues/62 - -Version 3.2.0 released 2013-05-01 - -* New ignore_nan kwarg in encoder that serializes out - of range floats (Infinity, -Infinity, NaN) as null for ECMA-262 - compliance. - https://github.com/simplejson/simplejson/pull/63 -* New for_json kwarg in encoder to make it possible to for - subclasses of dict and list to be specialized. - https://github.com/simplejson/simplejson/pull/69 - -Version 3.1.3 released 2013-04-06 - -* Updated documentation to discourage subclassing whenever possible. - default, object_hook, and object_pairs_hook provide almost all of - the functionality of subclassing. - -Version 3.1.2 released 2013-03-20 - -* Updated documentation to reflect separators behavior when indent is - not None - https://github.com/simplejson/simplejson/issues/59 -* Test suite should be compatible with debug builds of Python 2.x and 3.x - https://github.com/simplejson/simplejson/pull/65 - -Version 3.1.1 released 2013-02-21 - -* setup.py now has another workaround for Windows machines without - MSVC installed - http://bugs.python.org/issue7511 - -Version 3.1.0 released 2013-02-21 - -* Updated JSON conformance test suite - http://bugs.python.org/issue16559 -* simplejson.tool tests and bugfix for Python 3.x - http://bugs.python.org/issue16549 -* Improve error messages for certain kinds of truncated input - http://bugs.python.org/issue16009 -* Moved JSONDecodeError to json.scanner (still available for import - from json.decoder) -* Changed scanner to use JSONDecodeError directly rather than - StopIteration to improve error messages - -Version 3.0.9 released 2013-02-21 - -* Fix an off-by-one error in the colno property of JSONDecodeError - (when lineno == 1) - http://bugs.python.org/issue17225 - -Version 3.0.8 released 2013-02-19 - -* Fix a Python 2.x compiler warning for narrow unicode builds - https://github.com/simplejson/simplejson/issues/56 - -Version 3.0.7 released 2013-01-11 - -* NOTE: this release only changes the license. -* simplejson is now dual-licensed software, MIT or AFL v2.1. It is - also made explicit that this code is also licensed to the PSF under - a Contributor Agreement. - -Version 3.0.6 released 2013-01-11 - -* Fix for major Python 2.x ensure_ascii=False encoding regression - introduced in simplejson 3.0.0. If you use this setting, please - upgrade immediately. - https://github.com/simplejson/simplejson/issues/50 - -Version 3.0.5 released 2013-01-03 - -* NOTE: this release only changes the tests, it is - not essential to upgrade -* Tests now run with deprecation warnings printed -* Fixed Python 3 syntax error in simplejson.tool - https://github.com/simplejson/simplejson/issues/49 -* Fixed Python 3.3 deprecation warnings in test suite - https://github.com/simplejson/simplejson/issues/48 - -Version 3.0.4 released 2013-01-02 - -* MSVC compatibility for Python 3.3 - https://github.com/simplejson/simplejson/pull/47 - -Version 3.0.3 released 2013-01-01 - -* Fixes for bugs introduced in 3.0.2 -* Fixes for Python 2.5 compatibility -* MSVC compatibility for Python 2.x - https://github.com/simplejson/simplejson/pull/46 - -Version 3.0.2 released 2013-01-01 - -* THIS VERSION HAS BEEN REMOVED -* Missed a changeset to _speedups.c in the 3.0.1 branch cut - -Version 3.0.1 released 2013-01-01 - -* THIS VERSION HAS BEEN REMOVED -* Add accumulator optimization to encoder, equivalent to the usage of - `_Py_Accu` in the Python 3.3 json library. Only relevant if encoding - very large JSON documents. - -Version 3.0.0 released 2012-12-30 - -* Python 3.3 is now supported, thanks to Vinay Sajip - https://github.com/simplejson/simplejson/issues/8 -* `sort_keys`/`item_sort_key` now sort on the stringified version of the - key, rather than the original object. This ensures that the sort - only compares string types and makes the behavior consistent between - Python 2.x and Python 3.x. -* Like other number types, Decimal instances used as keys are now - coerced to strings when use_decimal is True. - -Version 2.6.2 released 2012-09-21 - -* JSONEncoderForHTML was not exported in the simplejson module - https://github.com/simplejson/simplejson/issues/41 - -Version 2.6.1 released 2012-07-27 - -* raw_decode() now skips whitespace before the object - https://github.com/simplejson/simplejson/pull/38 - -Version 2.6.0 released 2012-06-26 - -* Error messages changed to match proposal for Python 3.3.1 - http://bugs.python.org/issue5067 - -Version 2.5.2 released 2012-05-10 - -* Fix for regression introduced in 2.5.1 - https://github.com/simplejson/simplejson/issues/35 - -Version 2.5.1 released 2012-05-10 - -* Support for use_decimal=True in environments that use Python - sub-interpreters such as uWSGI - https://github.com/simplejson/simplejson/issues/34 - -Version 2.5.0 released 2012-03-29 - -* New item_sort_key option for encoder to allow fine grained control of sorted - output - -Version 2.4.0 released 2012-03-06 - -* New bigint_as_string option for encoder to trade JavaScript number precision - issues for type issues. - https://github.com/simplejson/simplejson/issues/31 - -Version 2.3.3 released 2012-02-27 - -* Allow unknown numerical types for indent parameter - https://github.com/simplejson/simplejson/pull/29 - -Version 2.3.2 released 2011-12-30 - -* Fix crashing regression in speedups introduced in 2.3.1 - -Version 2.3.1 released 2011-12-29 - -* namedtuple_as_object now checks _asdict to ensure that it - is callable. - https://github.com/simplejson/simplejson/issues/26 - -Version 2.3.0 released 2011-12-05 - -* Any objects with _asdict() methods are now considered for - namedtuple_as_object. - https://github.com/simplejson/simplejson/pull/22 - -Version 2.2.1 released 2011-09-06 - -* Fix MANIFEST.in issue when building a sdist from a sdist. - https://github.com/simplejson/simplejson/issues/16 - -Version 2.2.0 released 2011-09-04 - -* Remove setuptools requirement, reverted to pure distutils -* use_decimal default for encoding (dump, dumps, JSONEncoder) is now True -* tuple encoding as JSON objects can be turned off with new - tuple_as_array=False option. - https://github.com/simplejson/simplejson/pull/6 -* namedtuple (or other tuple subclasses with _asdict methods) are now - encoded as JSON objects rather than arrays by default. Can be disabled - and treated as a tuple with the new namedtuple_as_object=False option. - https://github.com/simplejson/simplejson/pull/6 -* JSONDecodeError is now raised instead of ValueError when a document - ends with an opening quote and the C speedups are in use. - https://github.com/simplejson/simplejson/issues/15 -* Updated documentation with information about JSONDecodeError -* Force unicode linebreak characters to be escaped (U+2028 and U+2029) - http://timelessrepo.com/json-isnt-a-javascript-subset -* Moved documentation from a git submodule to - https://simplejson.readthedocs.io/ - -Version 2.1.6 released 2011-05-08 - -* Prevent segfaults with deeply nested JSON documents - https://github.com/simplejson/simplejson/issues/11 -* Fix compatibility with Python 2.5 - https://github.com/simplejson/simplejson/issues/5 - -Version 2.1.5 released 2011-04-17 - -* Built sdist tarball with setuptools_git installed. Argh. - -Version 2.1.4 released 2011-04-17 - -* Does not try to build the extension when using PyPy -* Trailing whitespace after commas no longer emitted when indent is used -* Migrated to github http://github.com/simplejson/simplejson - -Version 2.1.3 released 2011-01-17 - -* Support the sort_keys option in C encoding speedups - http://code.google.com/p/simplejson/issues/detail?id=86 -* Allow use_decimal to work with dump() - http://code.google.com/p/simplejson/issues/detail?id=87 - -Version 2.1.2 released 2010-11-01 - -* Correct wrong end when object_pairs_hook is used - http://code.google.com/p/simplejson/issues/detail?id=85 -* Correct output for indent=0 - http://bugs.python.org/issue10019 -* Correctly raise TypeError when non-string keys are used with speedups - http://code.google.com/p/simplejson/issues/detail?id=82 -* Fix the endlineno, endcolno attributes of the JSONDecodeError exception. - http://code.google.com/p/simplejson/issues/detail?id=81 - -Version 2.1.1 released 2010-03-31 - -* Change how setup.py imports ez_setup.py to try and workaround old versions - of setuptools. - http://code.google.com/p/simplejson/issues/detail?id=75 -* Fix compilation on Windows platform (and other platforms with very - picky compilers) -* Corrected simplejson.__version__ and other minor doc changes. -* Do not fail speedups tests if speedups could not be built. - http://code.google.com/p/simplejson/issues/detail?id=73 - -Version 2.1.0 released 2010-03-10 - -* Decimal serialization officially supported for encoding with - use_decimal=True. For encoding this encodes Decimal objects and - for decoding it implies parse_float=Decimal -* Python 2.4 no longer supported (may still work, but no longer tested) -* Decoding performance and memory utilization enhancements - http://bugs.python.org/issue7451 -* JSONEncoderForHTML class for escaping &, <, > - http://code.google.com/p/simplejson/issues/detail?id=66 -* Memoization of object keys during encoding (when using speedups) -* Encoder changed to use PyIter_Next for list iteration to avoid - potential threading issues -* Encoder changed to use iteritems rather than PyDict_Next in order to - support dict subclasses that have a well defined ordering - http://bugs.python.org/issue6105 -* indent encoding parameter changed to be a string rather than an integer - (integer use still supported for backwards compatibility) - http://code.google.com/p/simplejson/issues/detail?id=56 -* Test suite (python setup.py test) now automatically runs with and without - speedups - http://code.google.com/p/simplejson/issues/detail?id=55 -* Fixed support for older versions of easy_install (e.g. stock Mac OS X config) - http://code.google.com/p/simplejson/issues/detail?id=54 -* Fixed str/unicode mismatches when using ensure_ascii=False - http://code.google.com/p/simplejson/issues/detail?id=48 -* Fixed error message when parsing an array with trailing comma with speedups - http://code.google.com/p/simplejson/issues/detail?id=46 -* Refactor decoder errors to raise JSONDecodeError instead of ValueError - http://code.google.com/p/simplejson/issues/detail?id=45 -* New ordered_pairs_hook feature in decoder which makes it possible to - preserve key order. http://bugs.python.org/issue5381 -* Fixed containerless unicode float decoding (same bug as 2.0.4, oops!) - http://code.google.com/p/simplejson/issues/detail?id=43 -* Share PosInf definition between encoder and decoder -* Minor reformatting to make it easier to backport simplejson changes - to Python 2.7/3.1 json module - -Version 2.0.9 released 2009-02-18 - -* Adds cyclic GC to the Encoder and Scanner speedups, which could've - caused uncollectible cycles in some cases when using custom parser - or encoder functions - -Version 2.0.8 released 2009-02-15 - -* Documentation fixes -* Fixes encoding True and False as keys -* Fixes checking for True and False by identity for several parameters - -Version 2.0.7 released 2009-01-04 - -* Documentation fixes -* C extension now always returns unicode strings when the input string is - unicode, even for empty strings - -Version 2.0.6 released 2008-12-19 - -* Windows build fixes - -Version 2.0.5 released 2008-11-23 - -* Fixes a segfault in the C extension when using check_circular=False and - encoding an invalid document - -Version 2.0.4 released 2008-10-24 - -* Fixes a parsing error in the C extension when the JSON document is (only) - a floating point number. It would consume one too few characters in that - case, and claim the document invalid. - -Version 2.0.3 released 2008-10-11 - -* Fixes reference leaks in the encoding speedups (sorry about that!) -* Fixes doctest suite for Python 2.6 -* More optimizations for the decoder - -Version 2.0.2 released 2008-10-06 - -* Fixes MSVC2003 build regression -* Fixes Python 2.4 compatibility in _speedups.c - -Version 2.0.1 released 2008-09-29 - -* Fixes long encoding regression introduced in 2.0.0 -* Fixes MinGW build regression introduced in 2.0.0 - -Version 2.0.0 released 2008-09-27 - -* optimized Python encoding path -* optimized Python decoding path -* optimized C encoding path -* optimized C decoding path -* switched to sphinx docs (nearly the same as the json module in python 2.6) - -Version 1.9.3 released 2008-09-23 - -* Decoding is significantly faster (for our internal benchmarks) -* Pretty-printing tool changed from simplejson to simplejson.tool for better - Python 2.6 comaptibility -* Misc. bug fixes - -Version 1.9 released 2008-05-03 - -* Rewrote test suite with unittest and doctest (no more nosetest dependency) -* Better PEP 7 and PEP 8 source compliance -* Removed simplejson.jsonfilter demo module -* simplejson.jsonfilter is no longer included - -Version 1.8.1 released 2008-03-24 - -* Optional C extension for accelerating the decoding of JSON strings -* Command line interface for pretty-printing JSON (via python -msimplejson) -* Decoding of integers and floats is now extensible (e.g. to use Decimal) via - parse_int, parse_float options. -* Subversion and issue tracker moved to google code: - http://code.google.com/p/simplejson/ -* "/" is no longer escaped, so if you're embedding JSON directly in HTML - you'll want to use .replace("/", "\\/") to prevent a close-tag attack. - -Version 1.7 released 2007-03-18 - -* Improves encoding performance with an optional C extension to speed up - str/unicode encoding (by 10-150x or so), which yields an overall speed - boost of 2x+ (JSON is string-heavy). -* Support for encoding unicode code points outside the BMP to UTF-16 - surrogate code pairs (specified by the Strings section of RFC 4627). - -Version 1.6 released 2007-03-03 - -* Improved str support for encoding. Previous versions of simplejson - integrated strings directly into the output stream, this version ensures - they're of a particular encoding (default is UTF-8) so that the output - stream is valid. - -Version 1.5 released 2007-01-18 - -* Better Python 2.5 compatibility -* Better Windows compatibility -* indent encoding parameter for pretty printing -* separators encoding parameter for generating optimally compact JSON - -Version 1.3 released 2006-04-01 - -* The optional object_hook function is called upon decoding of any JSON - object literal, and its return value is used instead of the dict that - would normally be used. This can be used to efficiently implement - features such as JSON-RPC class hinting, or other custom decodings of - JSON. See the documentation for more information. - -Version 1.1 released 2005-12-31 - -* Renamed from simple_json to simplejson to comply with PEP 8 module naming - guidelines -* Full set of documentation -* More tests -* The encoder and decoder have been extended to understand NaN, Infinity, and - -Infinity (but this can be turned off via allow_nan=False for strict JSON - compliance) -* The decoder's scanner has been fixed so that it no longer accepts invalid - JSON documents -* The decoder now reports line and column information as well as character - numbers for easier debugging -* The encoder now has a circular reference checker, which can be optionally - disabled with check_circular=False -* dump, dumps, load, loads now accept an optional cls kwarg to use an - alternate JSONEncoder or JSONDecoder class for convenience. -* The read/write compatibility shim for json-py now have deprecation warnings - -Version 1.0 released 2005-12-25 - - * Initial release diff --git a/script.module.simplejson/README.rst b/script.module.simplejson/README.rst deleted file mode 100644 index 048a30966e..0000000000 --- a/script.module.simplejson/README.rst +++ /dev/null @@ -1,40 +0,0 @@ -simplejson ----------- - -.. image:: https://travis-ci.org/simplejson/simplejson.svg?branch=master - :target: https://travis-ci.org/simplejson/simplejson - -.. image:: https://ci.appveyor.com/api/projects/status/3riqhss6vca680gi/branch/master?svg=true - :target: https://ci.appveyor.com/project/etrepum/simplejson/branch/master - -simplejson is a simple, fast, complete, correct and extensible -JSON encoder and decoder for Python 3.3+ -with legacy support for Python 2.5+. It is pure Python code -with no dependencies, but includes an optional C extension -for a serious speed boost. - -The latest documentation for simplejson can be read online here: -https://simplejson.readthedocs.io/ - -simplejson is the externally maintained development version of the -json library included with Python (since 2.6). This version is tested -with the latest Python 3.8 and maintains backwards compatibility -with Python 3.3+ and the legacy Python 2.5 - Python 2.7 releases. - -The encoder can be specialized to provide serialization in any kind of -situation, without any special support by the objects to be serialized -(somewhat like pickle). This is best done with the ``default`` kwarg -to dumps. - -The decoder can handle incoming JSON strings of any specified encoding -(UTF-8 by default). It can also be specialized to post-process JSON -objects with the ``object_hook`` or ``object_pairs_hook`` kwargs. This -is particularly useful for implementing protocols such as JSON-RPC -that have a richer type system than JSON itself. - -For those of you that have legacy systems to maintain, there is a -very old fork of simplejson in the `python2.2`_ branch that supports -Python 2.2. This is based off of a very old version of simplejson, -is not maintained, and should only be used as a last resort. - -.. _python2.2: https://github.com/simplejson/simplejson/tree/python2.2 diff --git a/script.module.simplejson/addon.xml b/script.module.simplejson/addon.xml index b82b081de7..a1846e9617 100644 --- a/script.module.simplejson/addon.xml +++ b/script.module.simplejson/addon.xml @@ -1,24 +1,18 @@ - + - + - all - Simple, fast, extensible JSON encoder/decoder for Python Simple, fast, extensible JSON encoder/decoder for Python - Code taken from https://pypi.org/project/simplejson/ - MIT License, Academic Free License v. 2.1 - https://pypi.org/project/simplejson/ + MIT + all + https://github.com/simplejson/simplejson https://github.com/simplejson/simplejson - icon.png + resources/icon.png diff --git a/script.module.simplejson/lib/simplejson/__init__.py b/script.module.simplejson/lib/simplejson/__init__.py index 7b5687c859..7e533a24ae 100644 --- a/script.module.simplejson/lib/simplejson/__init__.py +++ b/script.module.simplejson/lib/simplejson/__init__.py @@ -118,7 +118,7 @@ """ from __future__ import absolute_import -__version__ = '3.17.0' +__version__ = '3.19.1' __all__ = [ 'dump', 'dumps', 'load', 'loads', 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', @@ -149,28 +149,10 @@ def _import_c_make_encoder(): except ImportError: return None -_default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - use_decimal=True, - namedtuple_as_object=True, - tuple_as_array=True, - iterable_as_array=False, - bigint_as_string=False, - item_sort_key=None, - for_json=False, - ignore_nan=False, - int_as_string_bitcount=None, -) +_default_encoder = JSONEncoder() def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, - allow_nan=True, cls=None, indent=None, separators=None, + allow_nan=False, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, @@ -187,10 +169,10 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, contain non-ASCII characters, so long as they do not need to be escaped by JSON. When it is true, all non-ASCII characters are escaped. - If *allow_nan* is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) - in strict compliance of the original JSON specification, instead of using - the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See + If *allow_nan* is true (default: ``False``), then out of range ``float`` + values (``nan``, ``inf``, ``-inf``) will be serialized to + their JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``) + instead of raising a ValueError. See *ignore_nan* for ECMA-262 compliant behavior. If *indent* is a string, then JSON array elements and object members @@ -258,7 +240,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, """ # cached encoder if (not skipkeys and ensure_ascii and - check_circular and allow_nan and + check_circular and not allow_nan and cls is None and indent is None and separators is None and encoding == 'utf-8' and default is None and use_decimal and namedtuple_as_object and tuple_as_array and not iterable_as_array @@ -292,7 +274,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, - allow_nan=True, cls=None, indent=None, separators=None, + allow_nan=False, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, @@ -300,7 +282,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, iterable_as_array=False, **kw): """Serialize ``obj`` to a JSON formatted ``str``. - If ``skipkeys`` is false then ``dict`` keys that are not basic types + If ``skipkeys`` is true then ``dict`` keys that are not basic types (``str``, ``int``, ``long``, ``float``, ``bool``, ``None``) will be skipped instead of raising a ``TypeError``. @@ -312,10 +294,11 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, for container types will be skipped and a circular reference will result in an ``OverflowError`` (or worse). - If ``allow_nan`` is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in - strict compliance of the JSON specification, instead of using the - JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + If *allow_nan* is true (default: ``False``), then out of range ``float`` + values (``nan``, ``inf``, ``-inf``) will be serialized to + their JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``) + instead of raising a ValueError. See + *ignore_nan* for ECMA-262 compliant behavior. If ``indent`` is a string, then JSON array elements and object members will be pretty-printed with a newline followed by that string repeated @@ -360,7 +343,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, If specified, *item_sort_key* is a callable used to sort the items in each dictionary. This is useful if you want to sort items other than - in alphabetical order by key. This option takes precendence over + in alphabetical order by key. This option takes precedence over *sort_keys*. If *sort_keys* is true (default: ``False``), the output of dictionaries @@ -383,7 +366,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, """ # cached encoder if (not skipkeys and ensure_ascii and - check_circular and allow_nan and + check_circular and not allow_nan and cls is None and indent is None and separators is None and encoding == 'utf-8' and default is None and use_decimal and namedtuple_as_object and tuple_as_array and not iterable_as_array @@ -412,14 +395,12 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, **kw).encode(obj) -_default_decoder = JSONDecoder(encoding=None, object_hook=None, - object_pairs_hook=None) +_default_decoder = JSONDecoder() def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, namedtuple_as_object=True, tuple_as_array=True, - **kw): + use_decimal=False, allow_nan=False, **kw): """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document as `str` or `bytes`) to a Python object. @@ -442,23 +423,27 @@ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, takes priority. *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to + JSON float to be decoded. By default, this is equivalent to ``float(num_str)``. This can be used to use another datatype or parser for JSON floats (e.g. :class:`decimal.Decimal`). *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to + JSON int to be decoded. By default, this is equivalent to ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. + *allow_nan*, if True (default false), will allow the parser to + accept the non-standard floats ``NaN``, ``Infinity``, and ``-Infinity`` + and enable the use of the deprecated *parse_constant*. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. + *parse_constant*, if specified, will be + called with one of the following strings: ``'-Infinity'``, + ``'Infinity'``, ``'NaN'``. It is not recommended to use this feature, + as it is rare to parse non-compliant JSON containing these values. + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. @@ -468,12 +453,12 @@ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, - use_decimal=use_decimal, **kw) + use_decimal=use_decimal, allow_nan=allow_nan, **kw) def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, **kw): + use_decimal=False, allow_nan=False, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. @@ -505,14 +490,18 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. + *allow_nan*, if True (default false), will allow the parser to + accept the non-standard floats ``NaN``, ``Infinity``, and ``-Infinity`` + and enable the use of the deprecated *parse_constant*. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. + *parse_constant*, if specified, will be + called with one of the following strings: ``'-Infinity'``, + ``'Infinity'``, ``'NaN'``. It is not recommended to use this feature, + as it is rare to parse non-compliant JSON containing these values. + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. @@ -521,7 +510,7 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, if (cls is None and encoding is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and object_pairs_hook is None - and not use_decimal and not kw): + and not use_decimal and not allow_nan and not kw): return _default_decoder.decode(s) if cls is None: cls = JSONDecoder @@ -539,6 +528,8 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, if parse_float is not None: raise TypeError("use_decimal=True implies parse_float=Decimal") kw['parse_float'] = Decimal + if allow_nan: + kw['allow_nan'] = True return cls(encoding=encoding, **kw).decode(s) @@ -560,22 +551,9 @@ def _toggle_speedups(enabled): scan.make_scanner = scan.py_make_scanner dec.make_scanner = scan.make_scanner global _default_decoder - _default_decoder = JSONDecoder( - encoding=None, - object_hook=None, - object_pairs_hook=None, - ) + _default_decoder = JSONDecoder() global _default_encoder - _default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - ) + _default_encoder = JSONEncoder() def simple_first(kv): """Helper function to pass to item_sort_key to sort simple diff --git a/script.module.simplejson/lib/simplejson/_speedups.c b/script.module.simplejson/lib/simplejson/_speedups.c deleted file mode 100644 index e7101288d4..0000000000 --- a/script.module.simplejson/lib/simplejson/_speedups.c +++ /dev/null @@ -1,3384 +0,0 @@ -/* -*- mode: C; c-file-style: "python"; c-basic-offset: 4 -*- */ -#include "Python.h" -#include "structmember.h" - -#if PY_MAJOR_VERSION >= 3 -#define PyInt_FromSsize_t PyLong_FromSsize_t -#define PyInt_AsSsize_t PyLong_AsSsize_t -#define PyInt_Check(obj) 0 -#define PyInt_CheckExact(obj) 0 -#define JSON_UNICHR Py_UCS4 -#define JSON_InternFromString PyUnicode_InternFromString -#define PyString_GET_SIZE PyUnicode_GET_LENGTH -#define PY2_UNUSED -#define PY3_UNUSED UNUSED -#else /* PY_MAJOR_VERSION >= 3 */ -#define PY2_UNUSED UNUSED -#define PY3_UNUSED -#define PyBytes_Check PyString_Check -#define PyUnicode_READY(obj) 0 -#define PyUnicode_KIND(obj) (sizeof(Py_UNICODE)) -#define PyUnicode_DATA(obj) ((void *)(PyUnicode_AS_UNICODE(obj))) -#define PyUnicode_READ(kind, data, index) ((JSON_UNICHR)((const Py_UNICODE *)(data))[(index)]) -#define PyUnicode_GET_LENGTH PyUnicode_GET_SIZE -#define JSON_UNICHR Py_UNICODE -#define JSON_InternFromString PyString_InternFromString -#endif /* PY_MAJOR_VERSION < 3 */ - -#if PY_VERSION_HEX < 0x02070000 -#if !defined(PyOS_string_to_double) -#define PyOS_string_to_double json_PyOS_string_to_double -static double -json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception); -static double -json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) -{ - double x; - assert(endptr == NULL); - assert(overflow_exception == NULL); - PyFPE_START_PROTECT("json_PyOS_string_to_double", return -1.0;) - x = PyOS_ascii_atof(s); - PyFPE_END_PROTECT(x) - return x; -} -#endif -#endif /* PY_VERSION_HEX < 0x02070000 */ - -#if PY_VERSION_HEX < 0x02060000 -#if !defined(Py_TYPE) -#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) -#endif -#if !defined(Py_SIZE) -#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) -#endif -#if !defined(PyVarObject_HEAD_INIT) -#define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size, -#endif -#endif /* PY_VERSION_HEX < 0x02060000 */ - -#ifdef __GNUC__ -#define UNUSED __attribute__((__unused__)) -#else -#define UNUSED -#endif - -#define DEFAULT_ENCODING "utf-8" - -#define PyScanner_Check(op) PyObject_TypeCheck(op, &PyScannerType) -#define PyScanner_CheckExact(op) (Py_TYPE(op) == &PyScannerType) -#define PyEncoder_Check(op) PyObject_TypeCheck(op, &PyEncoderType) -#define PyEncoder_CheckExact(op) (Py_TYPE(op) == &PyEncoderType) - -#define JSON_ALLOW_NAN 1 -#define JSON_IGNORE_NAN 2 - -static PyObject *JSON_Infinity = NULL; -static PyObject *JSON_NegInfinity = NULL; -static PyObject *JSON_NaN = NULL; -static PyObject *JSON_EmptyUnicode = NULL; -#if PY_MAJOR_VERSION < 3 -static PyObject *JSON_EmptyStr = NULL; -#endif - -static PyTypeObject PyScannerType; -static PyTypeObject PyEncoderType; - -typedef struct { - PyObject *large_strings; /* A list of previously accumulated large strings */ - PyObject *small_strings; /* Pending small strings */ -} JSON_Accu; - -static int -JSON_Accu_Init(JSON_Accu *acc); -static int -JSON_Accu_Accumulate(JSON_Accu *acc, PyObject *unicode); -static PyObject * -JSON_Accu_FinishAsList(JSON_Accu *acc); -static void -JSON_Accu_Destroy(JSON_Accu *acc); - -#define ERR_EXPECTING_VALUE "Expecting value" -#define ERR_ARRAY_DELIMITER "Expecting ',' delimiter or ']'" -#define ERR_ARRAY_VALUE_FIRST "Expecting value or ']'" -#define ERR_OBJECT_DELIMITER "Expecting ',' delimiter or '}'" -#define ERR_OBJECT_PROPERTY "Expecting property name enclosed in double quotes" -#define ERR_OBJECT_PROPERTY_FIRST "Expecting property name enclosed in double quotes or '}'" -#define ERR_OBJECT_PROPERTY_DELIMITER "Expecting ':' delimiter" -#define ERR_STRING_UNTERMINATED "Unterminated string starting at" -#define ERR_STRING_CONTROL "Invalid control character %r at" -#define ERR_STRING_ESC1 "Invalid \\X escape sequence %r" -#define ERR_STRING_ESC4 "Invalid \\uXXXX escape sequence" - -typedef struct _PyScannerObject { - PyObject_HEAD - PyObject *encoding; - PyObject *strict_bool; - int strict; - PyObject *object_hook; - PyObject *pairs_hook; - PyObject *parse_float; - PyObject *parse_int; - PyObject *parse_constant; - PyObject *memo; -} PyScannerObject; - -static PyMemberDef scanner_members[] = { - {"encoding", T_OBJECT, offsetof(PyScannerObject, encoding), READONLY, "encoding"}, - {"strict", T_OBJECT, offsetof(PyScannerObject, strict_bool), READONLY, "strict"}, - {"object_hook", T_OBJECT, offsetof(PyScannerObject, object_hook), READONLY, "object_hook"}, - {"object_pairs_hook", T_OBJECT, offsetof(PyScannerObject, pairs_hook), READONLY, "object_pairs_hook"}, - {"parse_float", T_OBJECT, offsetof(PyScannerObject, parse_float), READONLY, "parse_float"}, - {"parse_int", T_OBJECT, offsetof(PyScannerObject, parse_int), READONLY, "parse_int"}, - {"parse_constant", T_OBJECT, offsetof(PyScannerObject, parse_constant), READONLY, "parse_constant"}, - {NULL} -}; - -typedef struct _PyEncoderObject { - PyObject_HEAD - PyObject *markers; - PyObject *defaultfn; - PyObject *encoder; - PyObject *indent; - PyObject *key_separator; - PyObject *item_separator; - PyObject *sort_keys; - PyObject *key_memo; - PyObject *encoding; - PyObject *Decimal; - PyObject *skipkeys_bool; - int skipkeys; - int fast_encode; - /* 0, JSON_ALLOW_NAN, JSON_IGNORE_NAN */ - int allow_or_ignore_nan; - int use_decimal; - int namedtuple_as_object; - int tuple_as_array; - int iterable_as_array; - PyObject *max_long_size; - PyObject *min_long_size; - PyObject *item_sort_key; - PyObject *item_sort_kw; - int for_json; -} PyEncoderObject; - -static PyMemberDef encoder_members[] = { - {"markers", T_OBJECT, offsetof(PyEncoderObject, markers), READONLY, "markers"}, - {"default", T_OBJECT, offsetof(PyEncoderObject, defaultfn), READONLY, "default"}, - {"encoder", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoder"}, - {"encoding", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoding"}, - {"indent", T_OBJECT, offsetof(PyEncoderObject, indent), READONLY, "indent"}, - {"key_separator", T_OBJECT, offsetof(PyEncoderObject, key_separator), READONLY, "key_separator"}, - {"item_separator", T_OBJECT, offsetof(PyEncoderObject, item_separator), READONLY, "item_separator"}, - {"sort_keys", T_OBJECT, offsetof(PyEncoderObject, sort_keys), READONLY, "sort_keys"}, - /* Python 2.5 does not support T_BOOl */ - {"skipkeys", T_OBJECT, offsetof(PyEncoderObject, skipkeys_bool), READONLY, "skipkeys"}, - {"key_memo", T_OBJECT, offsetof(PyEncoderObject, key_memo), READONLY, "key_memo"}, - {"item_sort_key", T_OBJECT, offsetof(PyEncoderObject, item_sort_key), READONLY, "item_sort_key"}, - {"max_long_size", T_OBJECT, offsetof(PyEncoderObject, max_long_size), READONLY, "max_long_size"}, - {"min_long_size", T_OBJECT, offsetof(PyEncoderObject, min_long_size), READONLY, "min_long_size"}, - {NULL} -}; - -static PyObject * -join_list_unicode(PyObject *lst); -static PyObject * -JSON_ParseEncoding(PyObject *encoding); -static PyObject * -maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj); -static Py_ssize_t -ascii_char_size(JSON_UNICHR c); -static Py_ssize_t -ascii_escape_char(JSON_UNICHR c, char *output, Py_ssize_t chars); -static PyObject * -ascii_escape_unicode(PyObject *pystr); -static PyObject * -ascii_escape_str(PyObject *pystr); -static PyObject * -py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr); -#if PY_MAJOR_VERSION < 3 -static PyObject * -join_list_string(PyObject *lst); -static PyObject * -scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); -static PyObject * -scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr); -static PyObject * -_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); -#endif -static PyObject * -scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr); -static PyObject * -scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); -static PyObject * -_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx); -static PyObject * -scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds); -static void -scanner_dealloc(PyObject *self); -static int -scanner_clear(PyObject *self); -static PyObject * -encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds); -static void -encoder_dealloc(PyObject *self); -static int -encoder_clear(PyObject *self); -static int -is_raw_json(PyObject *obj); -static PyObject * -encoder_stringify_key(PyEncoderObject *s, PyObject *key); -static int -encoder_listencode_list(PyEncoderObject *s, JSON_Accu *rval, PyObject *seq, Py_ssize_t indent_level); -static int -encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ssize_t indent_level); -static int -encoder_listencode_dict(PyEncoderObject *s, JSON_Accu *rval, PyObject *dct, Py_ssize_t indent_level); -static PyObject * -_encoded_const(PyObject *obj); -static void -raise_errmsg(char *msg, PyObject *s, Py_ssize_t end); -static PyObject * -encoder_encode_string(PyEncoderObject *s, PyObject *obj); -static int -_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr); -static PyObject * -_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr); -static PyObject * -encoder_encode_float(PyEncoderObject *s, PyObject *obj); -static int -_is_namedtuple(PyObject *obj); -static int -_has_for_json_hook(PyObject *obj); -static PyObject * -moduleinit(void); - -#define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '"') -#define IS_WHITESPACE(c) (((c) == ' ') || ((c) == '\t') || ((c) == '\n') || ((c) == '\r')) - -#define MIN_EXPANSION 6 - -static PyObject* RawJSONType = NULL; -static int -is_raw_json(PyObject *obj) -{ - return PyObject_IsInstance(obj, RawJSONType) ? 1 : 0; -} - -static int -JSON_Accu_Init(JSON_Accu *acc) -{ - /* Lazily allocated */ - acc->large_strings = NULL; - acc->small_strings = PyList_New(0); - if (acc->small_strings == NULL) - return -1; - return 0; -} - -static int -flush_accumulator(JSON_Accu *acc) -{ - Py_ssize_t nsmall = PyList_GET_SIZE(acc->small_strings); - if (nsmall) { - int ret; - PyObject *joined; - if (acc->large_strings == NULL) { - acc->large_strings = PyList_New(0); - if (acc->large_strings == NULL) - return -1; - } -#if PY_MAJOR_VERSION >= 3 - joined = join_list_unicode(acc->small_strings); -#else /* PY_MAJOR_VERSION >= 3 */ - joined = join_list_string(acc->small_strings); -#endif /* PY_MAJOR_VERSION < 3 */ - if (joined == NULL) - return -1; - if (PyList_SetSlice(acc->small_strings, 0, nsmall, NULL)) { - Py_DECREF(joined); - return -1; - } - ret = PyList_Append(acc->large_strings, joined); - Py_DECREF(joined); - return ret; - } - return 0; -} - -static int -JSON_Accu_Accumulate(JSON_Accu *acc, PyObject *unicode) -{ - Py_ssize_t nsmall; -#if PY_MAJOR_VERSION >= 3 - assert(PyUnicode_Check(unicode)); -#else /* PY_MAJOR_VERSION >= 3 */ - assert(PyString_Check(unicode) || PyUnicode_Check(unicode)); -#endif /* PY_MAJOR_VERSION < 3 */ - - if (PyList_Append(acc->small_strings, unicode)) - return -1; - nsmall = PyList_GET_SIZE(acc->small_strings); - /* Each item in a list of unicode objects has an overhead (in 64-bit - * builds) of: - * - 8 bytes for the list slot - * - 56 bytes for the header of the unicode object - * that is, 64 bytes. 100000 such objects waste more than 6MB - * compared to a single concatenated string. - */ - if (nsmall < 100000) - return 0; - return flush_accumulator(acc); -} - -static PyObject * -JSON_Accu_FinishAsList(JSON_Accu *acc) -{ - int ret; - PyObject *res; - - ret = flush_accumulator(acc); - Py_CLEAR(acc->small_strings); - if (ret) { - Py_CLEAR(acc->large_strings); - return NULL; - } - res = acc->large_strings; - acc->large_strings = NULL; - if (res == NULL) - return PyList_New(0); - return res; -} - -static void -JSON_Accu_Destroy(JSON_Accu *acc) -{ - Py_CLEAR(acc->small_strings); - Py_CLEAR(acc->large_strings); -} - -static int -IS_DIGIT(JSON_UNICHR c) -{ - return c >= '0' && c <= '9'; -} - -static PyObject * -maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj) -{ - if (s->max_long_size != Py_None && s->min_long_size != Py_None) { - if (PyObject_RichCompareBool(obj, s->max_long_size, Py_GE) || - PyObject_RichCompareBool(obj, s->min_long_size, Py_LE)) { -#if PY_MAJOR_VERSION >= 3 - PyObject* quoted = PyUnicode_FromFormat("\"%U\"", encoded); -#else - PyObject* quoted = PyString_FromFormat("\"%s\"", - PyString_AsString(encoded)); -#endif - Py_DECREF(encoded); - encoded = quoted; - } - } - - return encoded; -} - -static int -_is_namedtuple(PyObject *obj) -{ - int rval = 0; - PyObject *_asdict = PyObject_GetAttrString(obj, "_asdict"); - if (_asdict == NULL) { - PyErr_Clear(); - return 0; - } - rval = PyCallable_Check(_asdict); - Py_DECREF(_asdict); - return rval; -} - -static int -_has_for_json_hook(PyObject *obj) -{ - int rval = 0; - PyObject *for_json = PyObject_GetAttrString(obj, "for_json"); - if (for_json == NULL) { - PyErr_Clear(); - return 0; - } - rval = PyCallable_Check(for_json); - Py_DECREF(for_json); - return rval; -} - -static int -_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr) -{ - /* PyObject to Py_ssize_t converter */ - *size_ptr = PyInt_AsSsize_t(o); - if (*size_ptr == -1 && PyErr_Occurred()) - return 0; - return 1; -} - -static PyObject * -_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr) -{ - /* Py_ssize_t to PyObject converter */ - return PyInt_FromSsize_t(*size_ptr); -} - -static Py_ssize_t -ascii_escape_char(JSON_UNICHR c, char *output, Py_ssize_t chars) -{ - /* Escape unicode code point c to ASCII escape sequences - in char *output. output must have at least 12 bytes unused to - accommodate an escaped surrogate pair "\uXXXX\uXXXX" */ - if (S_CHAR(c)) { - output[chars++] = (char)c; - } - else { - output[chars++] = '\\'; - switch (c) { - case '\\': output[chars++] = (char)c; break; - case '"': output[chars++] = (char)c; break; - case '\b': output[chars++] = 'b'; break; - case '\f': output[chars++] = 'f'; break; - case '\n': output[chars++] = 'n'; break; - case '\r': output[chars++] = 'r'; break; - case '\t': output[chars++] = 't'; break; - default: -#if PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE) - if (c >= 0x10000) { - /* UTF-16 surrogate pair */ - JSON_UNICHR v = c - 0x10000; - c = 0xd800 | ((v >> 10) & 0x3ff); - output[chars++] = 'u'; - output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf]; - output[chars++] = "0123456789abcdef"[(c ) & 0xf]; - c = 0xdc00 | (v & 0x3ff); - output[chars++] = '\\'; - } -#endif - output[chars++] = 'u'; - output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf]; - output[chars++] = "0123456789abcdef"[(c ) & 0xf]; - } - } - return chars; -} - -static Py_ssize_t -ascii_char_size(JSON_UNICHR c) -{ - if (S_CHAR(c)) { - return 1; - } - else if (c == '\\' || - c == '"' || - c == '\b' || - c == '\f' || - c == '\n' || - c == '\r' || - c == '\t') { - return 2; - } -#if PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE) - else if (c >= 0x10000U) { - return 2 * MIN_EXPANSION; - } -#endif - else { - return MIN_EXPANSION; - } -} - -static PyObject * -ascii_escape_unicode(PyObject *pystr) -{ - /* Take a PyUnicode pystr and return a new ASCII-only escaped PyString */ - Py_ssize_t i; - Py_ssize_t input_chars = PyUnicode_GET_LENGTH(pystr); - Py_ssize_t output_size = 2; - Py_ssize_t chars; - PY2_UNUSED int kind = PyUnicode_KIND(pystr); - void *data = PyUnicode_DATA(pystr); - PyObject *rval; - char *output; - - output_size = 2; - for (i = 0; i < input_chars; i++) { - output_size += ascii_char_size(PyUnicode_READ(kind, data, i)); - } -#if PY_MAJOR_VERSION >= 3 - rval = PyUnicode_New(output_size, 127); - if (rval == NULL) { - return NULL; - } - assert(PyUnicode_KIND(rval) == PyUnicode_1BYTE_KIND); - output = (char *)PyUnicode_DATA(rval); -#else - rval = PyString_FromStringAndSize(NULL, output_size); - if (rval == NULL) { - return NULL; - } - output = PyString_AS_STRING(rval); -#endif - chars = 0; - output[chars++] = '"'; - for (i = 0; i < input_chars; i++) { - chars = ascii_escape_char(PyUnicode_READ(kind, data, i), output, chars); - } - output[chars++] = '"'; - assert(chars == output_size); - return rval; -} - -#if PY_MAJOR_VERSION >= 3 - -static PyObject * -ascii_escape_str(PyObject *pystr) -{ - PyObject *rval; - PyObject *input = PyUnicode_DecodeUTF8(PyBytes_AS_STRING(pystr), PyBytes_GET_SIZE(pystr), NULL); - if (input == NULL) - return NULL; - rval = ascii_escape_unicode(input); - Py_DECREF(input); - return rval; -} - -#else /* PY_MAJOR_VERSION >= 3 */ - -static PyObject * -ascii_escape_str(PyObject *pystr) -{ - /* Take a PyString pystr and return a new ASCII-only escaped PyString */ - Py_ssize_t i; - Py_ssize_t input_chars; - Py_ssize_t output_size; - Py_ssize_t chars; - PyObject *rval; - char *output; - char *input_str; - - input_chars = PyString_GET_SIZE(pystr); - input_str = PyString_AS_STRING(pystr); - output_size = 2; - - /* Fast path for a string that's already ASCII */ - for (i = 0; i < input_chars; i++) { - JSON_UNICHR c = (JSON_UNICHR)input_str[i]; - if (c > 0x7f) { - /* We hit a non-ASCII character, bail to unicode mode */ - PyObject *uni; - uni = PyUnicode_DecodeUTF8(input_str, input_chars, "strict"); - if (uni == NULL) { - return NULL; - } - rval = ascii_escape_unicode(uni); - Py_DECREF(uni); - return rval; - } - output_size += ascii_char_size(c); - } - - rval = PyString_FromStringAndSize(NULL, output_size); - if (rval == NULL) { - return NULL; - } - chars = 0; - output = PyString_AS_STRING(rval); - output[chars++] = '"'; - for (i = 0; i < input_chars; i++) { - chars = ascii_escape_char((JSON_UNICHR)input_str[i], output, chars); - } - output[chars++] = '"'; - assert(chars == output_size); - return rval; -} -#endif /* PY_MAJOR_VERSION < 3 */ - -static PyObject * -encoder_stringify_key(PyEncoderObject *s, PyObject *key) -{ - if (PyUnicode_Check(key)) { - Py_INCREF(key); - return key; - } -#if PY_MAJOR_VERSION >= 3 - else if (PyBytes_Check(key) && s->encoding != NULL) { - const char *encoding = PyUnicode_AsUTF8(s->encoding); - if (encoding == NULL) - return NULL; - return PyUnicode_Decode( - PyBytes_AS_STRING(key), - PyBytes_GET_SIZE(key), - encoding, - NULL); - } -#else /* PY_MAJOR_VERSION >= 3 */ - else if (PyString_Check(key)) { - Py_INCREF(key); - return key; - } -#endif /* PY_MAJOR_VERSION < 3 */ - else if (PyFloat_Check(key)) { - return encoder_encode_float(s, key); - } - else if (key == Py_True || key == Py_False || key == Py_None) { - /* This must come before the PyInt_Check because - True and False are also 1 and 0.*/ - return _encoded_const(key); - } - else if (PyInt_Check(key) || PyLong_Check(key)) { - if (!(PyInt_CheckExact(key) || PyLong_CheckExact(key))) { - /* See #118, do not trust custom str/repr */ - PyObject *res; - PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyLong_Type, key, NULL); - if (tmp == NULL) { - return NULL; - } - res = PyObject_Str(tmp); - Py_DECREF(tmp); - return res; - } - else { - return PyObject_Str(key); - } - } - else if (s->use_decimal && PyObject_TypeCheck(key, (PyTypeObject *)s->Decimal)) { - return PyObject_Str(key); - } - if (s->skipkeys) { - Py_INCREF(Py_None); - return Py_None; - } - PyErr_Format(PyExc_TypeError, - "keys must be str, int, float, bool or None, " - "not %.100s", key->ob_type->tp_name); - return NULL; -} - -static PyObject * -encoder_dict_iteritems(PyEncoderObject *s, PyObject *dct) -{ - PyObject *items; - PyObject *iter = NULL; - PyObject *lst = NULL; - PyObject *item = NULL; - PyObject *kstr = NULL; - PyObject *sortfun = NULL; - PyObject *sortres; - static PyObject *sortargs = NULL; - - if (sortargs == NULL) { - sortargs = PyTuple_New(0); - if (sortargs == NULL) - return NULL; - } - - if (PyDict_CheckExact(dct)) - items = PyDict_Items(dct); - else - items = PyMapping_Items(dct); - if (items == NULL) - return NULL; - iter = PyObject_GetIter(items); - Py_DECREF(items); - if (iter == NULL) - return NULL; - if (s->item_sort_kw == Py_None) - return iter; - lst = PyList_New(0); - if (lst == NULL) - goto bail; - while ((item = PyIter_Next(iter))) { - PyObject *key, *value; - if (!PyTuple_Check(item) || Py_SIZE(item) != 2) { - PyErr_SetString(PyExc_ValueError, "items must return 2-tuples"); - goto bail; - } - key = PyTuple_GET_ITEM(item, 0); - if (key == NULL) - goto bail; -#if PY_MAJOR_VERSION < 3 - else if (PyString_Check(key)) { - /* item can be added as-is */ - } -#endif /* PY_MAJOR_VERSION < 3 */ - else if (PyUnicode_Check(key)) { - /* item can be added as-is */ - } - else { - PyObject *tpl; - kstr = encoder_stringify_key(s, key); - if (kstr == NULL) - goto bail; - else if (kstr == Py_None) { - /* skipkeys */ - Py_DECREF(kstr); - continue; - } - value = PyTuple_GET_ITEM(item, 1); - if (value == NULL) - goto bail; - tpl = PyTuple_Pack(2, kstr, value); - if (tpl == NULL) - goto bail; - Py_CLEAR(kstr); - Py_DECREF(item); - item = tpl; - } - if (PyList_Append(lst, item)) - goto bail; - Py_DECREF(item); - } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; - sortfun = PyObject_GetAttrString(lst, "sort"); - if (sortfun == NULL) - goto bail; - sortres = PyObject_Call(sortfun, sortargs, s->item_sort_kw); - if (!sortres) - goto bail; - Py_DECREF(sortres); - Py_CLEAR(sortfun); - iter = PyObject_GetIter(lst); - Py_CLEAR(lst); - return iter; -bail: - Py_XDECREF(sortfun); - Py_XDECREF(kstr); - Py_XDECREF(item); - Py_XDECREF(lst); - Py_XDECREF(iter); - return NULL; -} - -/* Use JSONDecodeError exception to raise a nice looking ValueError subclass */ -static PyObject *JSONDecodeError = NULL; -static void -raise_errmsg(char *msg, PyObject *s, Py_ssize_t end) -{ - PyObject *exc = PyObject_CallFunction(JSONDecodeError, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end); - if (exc) { - PyErr_SetObject(JSONDecodeError, exc); - Py_DECREF(exc); - } -} - -static PyObject * -join_list_unicode(PyObject *lst) -{ - /* return u''.join(lst) */ - return PyUnicode_Join(JSON_EmptyUnicode, lst); -} - -#if PY_MAJOR_VERSION >= 3 -#define join_list_string join_list_unicode -#else /* PY_MAJOR_VERSION >= 3 */ -static PyObject * -join_list_string(PyObject *lst) -{ - /* return ''.join(lst) */ - static PyObject *joinfn = NULL; - if (joinfn == NULL) { - joinfn = PyObject_GetAttrString(JSON_EmptyStr, "join"); - if (joinfn == NULL) - return NULL; - } - return PyObject_CallFunctionObjArgs(joinfn, lst, NULL); -} -#endif /* PY_MAJOR_VERSION < 3 */ - -static PyObject * -_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) -{ - /* return (rval, idx) tuple, stealing reference to rval */ - PyObject *tpl; - PyObject *pyidx; - /* - steal a reference to rval, returns (rval, idx) - */ - if (rval == NULL) { - assert(PyErr_Occurred()); - return NULL; - } - pyidx = PyInt_FromSsize_t(idx); - if (pyidx == NULL) { - Py_DECREF(rval); - return NULL; - } - tpl = PyTuple_New(2); - if (tpl == NULL) { - Py_DECREF(pyidx); - Py_DECREF(rval); - return NULL; - } - PyTuple_SET_ITEM(tpl, 0, rval); - PyTuple_SET_ITEM(tpl, 1, pyidx); - return tpl; -} - -#define APPEND_OLD_CHUNK \ - if (chunk != NULL) { \ - if (chunks == NULL) { \ - chunks = PyList_New(0); \ - if (chunks == NULL) { \ - goto bail; \ - } \ - } \ - if (PyList_Append(chunks, chunk)) { \ - goto bail; \ - } \ - Py_CLEAR(chunk); \ - } - -#if PY_MAJOR_VERSION < 3 -static PyObject * -scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr) -{ - /* Read the JSON string from PyString pystr. - end is the index of the first character after the quote. - encoding is the encoding of pystr (must be an ASCII superset) - if strict is zero then literal control characters are allowed - *next_end_ptr is a return-by-reference index of the character - after the end quote - - Return value is a new PyString (if ASCII-only) or PyUnicode - */ - PyObject *rval; - Py_ssize_t len = PyString_GET_SIZE(pystr); - Py_ssize_t begin = end - 1; - Py_ssize_t next = begin; - int has_unicode = 0; - char *buf = PyString_AS_STRING(pystr); - PyObject *chunks = NULL; - PyObject *chunk = NULL; - PyObject *strchunk = NULL; - - if (len == end) { - raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin); - goto bail; - } - else if (end < 0 || len < end) { - PyErr_SetString(PyExc_ValueError, "end is out of bounds"); - goto bail; - } - while (1) { - /* Find the end of the string or the next escape */ - Py_UNICODE c = 0; - for (next = end; next < len; next++) { - c = (unsigned char)buf[next]; - if (c == '"' || c == '\\') { - break; - } - else if (strict && c <= 0x1f) { - raise_errmsg(ERR_STRING_CONTROL, pystr, next); - goto bail; - } - else if (c > 0x7f) { - has_unicode = 1; - } - } - if (!(c == '"' || c == '\\')) { - raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin); - goto bail; - } - /* Pick up this chunk if it's not zero length */ - if (next != end) { - APPEND_OLD_CHUNK - strchunk = PyString_FromStringAndSize(&buf[end], next - end); - if (strchunk == NULL) { - goto bail; - } - if (has_unicode) { - chunk = PyUnicode_FromEncodedObject(strchunk, encoding, NULL); - Py_DECREF(strchunk); - if (chunk == NULL) { - goto bail; - } - } - else { - chunk = strchunk; - } - } - next++; - if (c == '"') { - end = next; - break; - } - if (next == len) { - raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin); - goto bail; - } - c = buf[next]; - if (c != 'u') { - /* Non-unicode backslash escapes */ - end = next + 1; - switch (c) { - case '"': break; - case '\\': break; - case '/': break; - case 'b': c = '\b'; break; - case 'f': c = '\f'; break; - case 'n': c = '\n'; break; - case 'r': c = '\r'; break; - case 't': c = '\t'; break; - default: c = 0; - } - if (c == 0) { - raise_errmsg(ERR_STRING_ESC1, pystr, end - 2); - goto bail; - } - } - else { - c = 0; - next++; - end = next + 4; - if (end >= len) { - raise_errmsg(ERR_STRING_ESC4, pystr, next - 1); - goto bail; - } - /* Decode 4 hex digits */ - for (; next < end; next++) { - JSON_UNICHR digit = (JSON_UNICHR)buf[next]; - c <<= 4; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c |= (digit - 'A' + 10); break; - default: - raise_errmsg(ERR_STRING_ESC4, pystr, end - 5); - goto bail; - } - } -#if defined(Py_UNICODE_WIDE) - /* Surrogate pair */ - if ((c & 0xfc00) == 0xd800) { - if (end + 6 < len && buf[next] == '\\' && buf[next+1] == 'u') { - JSON_UNICHR c2 = 0; - end += 6; - /* Decode 4 hex digits */ - for (next += 2; next < end; next++) { - c2 <<= 4; - JSON_UNICHR digit = buf[next]; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c2 |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c2 |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c2 |= (digit - 'A' + 10); break; - default: - raise_errmsg(ERR_STRING_ESC4, pystr, end - 5); - goto bail; - } - } - if ((c2 & 0xfc00) != 0xdc00) { - /* not a low surrogate, rewind */ - end -= 6; - next = end; - } - else { - c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); - } - } - } -#endif /* Py_UNICODE_WIDE */ - } - if (c > 0x7f) { - has_unicode = 1; - } - APPEND_OLD_CHUNK - if (has_unicode) { - chunk = PyUnicode_FromOrdinal(c); - if (chunk == NULL) { - goto bail; - } - } - else { - char c_char = Py_CHARMASK(c); - chunk = PyString_FromStringAndSize(&c_char, 1); - if (chunk == NULL) { - goto bail; - } - } - } - - if (chunks == NULL) { - if (chunk != NULL) - rval = chunk; - else { - rval = JSON_EmptyStr; - Py_INCREF(rval); - } - } - else { - APPEND_OLD_CHUNK - rval = join_list_string(chunks); - if (rval == NULL) { - goto bail; - } - Py_CLEAR(chunks); - } - - *next_end_ptr = end; - return rval; -bail: - *next_end_ptr = -1; - Py_XDECREF(chunk); - Py_XDECREF(chunks); - return NULL; -} -#endif /* PY_MAJOR_VERSION < 3 */ - -static PyObject * -scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr) -{ - /* Read the JSON string from PyUnicode pystr. - end is the index of the first character after the quote. - if strict is zero then literal control characters are allowed - *next_end_ptr is a return-by-reference index of the character - after the end quote - - Return value is a new PyUnicode - */ - PyObject *rval; - Py_ssize_t begin = end - 1; - Py_ssize_t next = begin; - PY2_UNUSED int kind = PyUnicode_KIND(pystr); - Py_ssize_t len = PyUnicode_GET_LENGTH(pystr); - void *buf = PyUnicode_DATA(pystr); - PyObject *chunks = NULL; - PyObject *chunk = NULL; - - if (len == end) { - raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin); - goto bail; - } - else if (end < 0 || len < end) { - PyErr_SetString(PyExc_ValueError, "end is out of bounds"); - goto bail; - } - while (1) { - /* Find the end of the string or the next escape */ - JSON_UNICHR c = 0; - for (next = end; next < len; next++) { - c = PyUnicode_READ(kind, buf, next); - if (c == '"' || c == '\\') { - break; - } - else if (strict && c <= 0x1f) { - raise_errmsg(ERR_STRING_CONTROL, pystr, next); - goto bail; - } - } - if (!(c == '"' || c == '\\')) { - raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin); - goto bail; - } - /* Pick up this chunk if it's not zero length */ - if (next != end) { - APPEND_OLD_CHUNK -#if PY_MAJOR_VERSION < 3 - chunk = PyUnicode_FromUnicode(&((const Py_UNICODE *)buf)[end], next - end); -#else - chunk = PyUnicode_Substring(pystr, end, next); -#endif - if (chunk == NULL) { - goto bail; - } - } - next++; - if (c == '"') { - end = next; - break; - } - if (next == len) { - raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin); - goto bail; - } - c = PyUnicode_READ(kind, buf, next); - if (c != 'u') { - /* Non-unicode backslash escapes */ - end = next + 1; - switch (c) { - case '"': break; - case '\\': break; - case '/': break; - case 'b': c = '\b'; break; - case 'f': c = '\f'; break; - case 'n': c = '\n'; break; - case 'r': c = '\r'; break; - case 't': c = '\t'; break; - default: c = 0; - } - if (c == 0) { - raise_errmsg(ERR_STRING_ESC1, pystr, end - 2); - goto bail; - } - } - else { - c = 0; - next++; - end = next + 4; - if (end >= len) { - raise_errmsg(ERR_STRING_ESC4, pystr, next - 1); - goto bail; - } - /* Decode 4 hex digits */ - for (; next < end; next++) { - JSON_UNICHR digit = PyUnicode_READ(kind, buf, next); - c <<= 4; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c |= (digit - 'A' + 10); break; - default: - raise_errmsg(ERR_STRING_ESC4, pystr, end - 5); - goto bail; - } - } -#if PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE) - /* Surrogate pair */ - if ((c & 0xfc00) == 0xd800) { - JSON_UNICHR c2 = 0; - if (end + 6 < len && - PyUnicode_READ(kind, buf, next) == '\\' && - PyUnicode_READ(kind, buf, next + 1) == 'u') { - end += 6; - /* Decode 4 hex digits */ - for (next += 2; next < end; next++) { - JSON_UNICHR digit = PyUnicode_READ(kind, buf, next); - c2 <<= 4; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c2 |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c2 |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c2 |= (digit - 'A' + 10); break; - default: - raise_errmsg(ERR_STRING_ESC4, pystr, end - 5); - goto bail; - } - } - if ((c2 & 0xfc00) != 0xdc00) { - /* not a low surrogate, rewind */ - end -= 6; - next = end; - } - else { - c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); - } - } - } -#endif - } - APPEND_OLD_CHUNK - chunk = PyUnicode_FromOrdinal(c); - if (chunk == NULL) { - goto bail; - } - } - - if (chunks == NULL) { - if (chunk != NULL) - rval = chunk; - else { - rval = JSON_EmptyUnicode; - Py_INCREF(rval); - } - } - else { - APPEND_OLD_CHUNK - rval = join_list_unicode(chunks); - if (rval == NULL) { - goto bail; - } - Py_CLEAR(chunks); - } - *next_end_ptr = end; - return rval; -bail: - *next_end_ptr = -1; - Py_XDECREF(chunk); - Py_XDECREF(chunks); - return NULL; -} - -PyDoc_STRVAR(pydoc_scanstring, - "scanstring(basestring, end, encoding, strict=True) -> (str, end)\n" - "\n" - "Scan the string s for a JSON string. End is the index of the\n" - "character in s after the quote that started the JSON string.\n" - "Unescapes all valid JSON string escape sequences and raises ValueError\n" - "on attempt to decode an invalid string. If strict is False then literal\n" - "control characters are allowed in the string.\n" - "\n" - "Returns a tuple of the decoded string and the index of the character in s\n" - "after the end quote." -); - -static PyObject * -py_scanstring(PyObject* self UNUSED, PyObject *args) -{ - PyObject *pystr; - PyObject *rval; - Py_ssize_t end; - Py_ssize_t next_end = -1; - char *encoding = NULL; - int strict = 1; - if (!PyArg_ParseTuple(args, "OO&|zi:scanstring", &pystr, _convertPyInt_AsSsize_t, &end, &encoding, &strict)) { - return NULL; - } - if (encoding == NULL) { - encoding = DEFAULT_ENCODING; - } - if (PyUnicode_Check(pystr)) { - if (PyUnicode_READY(pystr)) - return NULL; - rval = scanstring_unicode(pystr, end, strict, &next_end); - } -#if PY_MAJOR_VERSION < 3 - /* Using a bytes input is unsupported for scanning in Python 3. - It is coerced to str in the decoder before it gets here. */ - else if (PyString_Check(pystr)) { - rval = scanstring_str(pystr, end, encoding, strict, &next_end); - } -#endif - else { - PyErr_Format(PyExc_TypeError, - "first argument must be a string, not %.80s", - Py_TYPE(pystr)->tp_name); - return NULL; - } - return _build_rval_index_tuple(rval, next_end); -} - -PyDoc_STRVAR(pydoc_encode_basestring_ascii, - "encode_basestring_ascii(basestring) -> str\n" - "\n" - "Return an ASCII-only JSON representation of a Python string" -); - -static PyObject * -py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr) -{ - /* Return an ASCII-only JSON representation of a Python string */ - /* METH_O */ - if (PyBytes_Check(pystr)) { - return ascii_escape_str(pystr); - } - else if (PyUnicode_Check(pystr)) { - if (PyUnicode_READY(pystr)) - return NULL; - return ascii_escape_unicode(pystr); - } - else { - PyErr_Format(PyExc_TypeError, - "first argument must be a string, not %.80s", - Py_TYPE(pystr)->tp_name); - return NULL; - } -} - -static void -scanner_dealloc(PyObject *self) -{ - /* bpo-31095: UnTrack is needed before calling any callbacks */ - PyObject_GC_UnTrack(self); - scanner_clear(self); - Py_TYPE(self)->tp_free(self); -} - -static int -scanner_traverse(PyObject *self, visitproc visit, void *arg) -{ - PyScannerObject *s; - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - Py_VISIT(s->encoding); - Py_VISIT(s->strict_bool); - Py_VISIT(s->object_hook); - Py_VISIT(s->pairs_hook); - Py_VISIT(s->parse_float); - Py_VISIT(s->parse_int); - Py_VISIT(s->parse_constant); - Py_VISIT(s->memo); - return 0; -} - -static int -scanner_clear(PyObject *self) -{ - PyScannerObject *s; - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - Py_CLEAR(s->encoding); - Py_CLEAR(s->strict_bool); - Py_CLEAR(s->object_hook); - Py_CLEAR(s->pairs_hook); - Py_CLEAR(s->parse_float); - Py_CLEAR(s->parse_int); - Py_CLEAR(s->parse_constant); - Py_CLEAR(s->memo); - return 0; -} - -#if PY_MAJOR_VERSION < 3 -static PyObject * -_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON object from PyString pystr. - idx is the index of the first character after the opening curly brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing curly brace. - - Returns a new PyObject (usually a dict, but object_hook or - object_pairs_hook can change that) - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - PyObject *rval = NULL; - PyObject *pairs = NULL; - PyObject *item; - PyObject *key = NULL; - PyObject *val = NULL; - char *encoding = PyString_AS_STRING(s->encoding); - int has_pairs_hook = (s->pairs_hook != Py_None); - int did_parse = 0; - Py_ssize_t next_idx; - if (has_pairs_hook) { - pairs = PyList_New(0); - if (pairs == NULL) - return NULL; - } - else { - rval = PyDict_New(); - if (rval == NULL) - return NULL; - } - - /* skip whitespace after { */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* only loop if the object is non-empty */ - if (idx <= end_idx && str[idx] != '}') { - int trailing_delimiter = 0; - while (idx <= end_idx) { - PyObject *memokey; - trailing_delimiter = 0; - - /* read key */ - if (str[idx] != '"') { - raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx); - goto bail; - } - key = scanstring_str(pystr, idx + 1, encoding, s->strict, &next_idx); - if (key == NULL) - goto bail; - memokey = PyDict_GetItem(s->memo, key); - if (memokey != NULL) { - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; - } - else { - if (PyDict_SetItem(s->memo, key, key) < 0) - goto bail; - } - idx = next_idx; - - /* skip whitespace between key and : delimiter, read :, skip whitespace */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - if (idx > end_idx || str[idx] != ':') { - raise_errmsg(ERR_OBJECT_PROPERTY_DELIMITER, pystr, idx); - goto bail; - } - idx++; - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* read any JSON data type */ - val = scan_once_str(s, pystr, idx, &next_idx); - if (val == NULL) - goto bail; - - if (has_pairs_hook) { - item = PyTuple_Pack(2, key, val); - if (item == NULL) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - if (PyList_Append(pairs, item) == -1) { - Py_DECREF(item); - goto bail; - } - Py_DECREF(item); - } - else { - if (PyDict_SetItem(rval, key, val) < 0) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - } - idx = next_idx; - - /* skip whitespace before } or , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* bail if the object is closed or we didn't get the , delimiter */ - did_parse = 1; - if (idx > end_idx) break; - if (str[idx] == '}') { - break; - } - else if (str[idx] != ',') { - raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , delimiter */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - trailing_delimiter = 1; - } - if (trailing_delimiter) { - raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx); - goto bail; - } - } - /* verify that idx < end_idx, str[idx] should be '}' */ - if (idx > end_idx || str[idx] != '}') { - if (did_parse) { - raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx); - } else { - raise_errmsg(ERR_OBJECT_PROPERTY_FIRST, pystr, idx); - } - goto bail; - } - - /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */ - if (s->pairs_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL); - if (val == NULL) - goto bail; - Py_DECREF(pairs); - *next_idx_ptr = idx + 1; - return val; - } - - /* if object_hook is not None: rval = object_hook(rval) */ - if (s->object_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); - if (val == NULL) - goto bail; - Py_DECREF(rval); - rval = val; - val = NULL; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(rval); - Py_XDECREF(key); - Py_XDECREF(val); - Py_XDECREF(pairs); - return NULL; -} -#endif /* PY_MAJOR_VERSION < 3 */ - -static PyObject * -_parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON object from PyUnicode pystr. - idx is the index of the first character after the opening curly brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing curly brace. - - Returns a new PyObject (usually a dict, but object_hook can change that) - */ - void *str = PyUnicode_DATA(pystr); - Py_ssize_t end_idx = PyUnicode_GET_LENGTH(pystr) - 1; - PY2_UNUSED int kind = PyUnicode_KIND(pystr); - PyObject *rval = NULL; - PyObject *pairs = NULL; - PyObject *item; - PyObject *key = NULL; - PyObject *val = NULL; - int has_pairs_hook = (s->pairs_hook != Py_None); - int did_parse = 0; - Py_ssize_t next_idx; - - if (has_pairs_hook) { - pairs = PyList_New(0); - if (pairs == NULL) - return NULL; - } - else { - rval = PyDict_New(); - if (rval == NULL) - return NULL; - } - - /* skip whitespace after { */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - - /* only loop if the object is non-empty */ - if (idx <= end_idx && PyUnicode_READ(kind, str, idx) != '}') { - int trailing_delimiter = 0; - while (idx <= end_idx) { - PyObject *memokey; - trailing_delimiter = 0; - - /* read key */ - if (PyUnicode_READ(kind, str, idx) != '"') { - raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx); - goto bail; - } - key = scanstring_unicode(pystr, idx + 1, s->strict, &next_idx); - if (key == NULL) - goto bail; - memokey = PyDict_GetItem(s->memo, key); - if (memokey != NULL) { - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; - } - else { - if (PyDict_SetItem(s->memo, key, key) < 0) - goto bail; - } - idx = next_idx; - - /* skip whitespace between key and : delimiter, read :, skip - whitespace */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ':') { - raise_errmsg(ERR_OBJECT_PROPERTY_DELIMITER, pystr, idx); - goto bail; - } - idx++; - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - - /* read any JSON term */ - val = scan_once_unicode(s, pystr, idx, &next_idx); - if (val == NULL) - goto bail; - - if (has_pairs_hook) { - item = PyTuple_Pack(2, key, val); - if (item == NULL) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - if (PyList_Append(pairs, item) == -1) { - Py_DECREF(item); - goto bail; - } - Py_DECREF(item); - } - else { - if (PyDict_SetItem(rval, key, val) < 0) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - } - idx = next_idx; - - /* skip whitespace before } or , */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - - /* bail if the object is closed or we didn't get the , - delimiter */ - did_parse = 1; - if (idx > end_idx) break; - if (PyUnicode_READ(kind, str, idx) == '}') { - break; - } - else if (PyUnicode_READ(kind, str, idx) != ',') { - raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , delimiter */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - trailing_delimiter = 1; - } - if (trailing_delimiter) { - raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx); - goto bail; - } - } - - /* verify that idx < end_idx, str[idx] should be '}' */ - if (idx > end_idx || PyUnicode_READ(kind, str, idx) != '}') { - if (did_parse) { - raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx); - } else { - raise_errmsg(ERR_OBJECT_PROPERTY_FIRST, pystr, idx); - } - goto bail; - } - - /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */ - if (s->pairs_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL); - if (val == NULL) - goto bail; - Py_DECREF(pairs); - *next_idx_ptr = idx + 1; - return val; - } - - /* if object_hook is not None: rval = object_hook(rval) */ - if (s->object_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); - if (val == NULL) - goto bail; - Py_DECREF(rval); - rval = val; - val = NULL; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(rval); - Py_XDECREF(key); - Py_XDECREF(val); - Py_XDECREF(pairs); - return NULL; -} - -#if PY_MAJOR_VERSION < 3 -static PyObject * -_parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON array from PyString pystr. - idx is the index of the first character after the opening brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing brace. - - Returns a new PyList - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - PyObject *val = NULL; - PyObject *rval = PyList_New(0); - Py_ssize_t next_idx; - if (rval == NULL) - return NULL; - - /* skip whitespace after [ */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* only loop if the array is non-empty */ - if (idx <= end_idx && str[idx] != ']') { - int trailing_delimiter = 0; - while (idx <= end_idx) { - trailing_delimiter = 0; - /* read any JSON term and de-tuplefy the (rval, idx) */ - val = scan_once_str(s, pystr, idx, &next_idx); - if (val == NULL) { - goto bail; - } - - if (PyList_Append(rval, val) == -1) - goto bail; - - Py_CLEAR(val); - idx = next_idx; - - /* skip whitespace between term and , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* bail if the array is closed or we didn't get the , delimiter */ - if (idx > end_idx) break; - if (str[idx] == ']') { - break; - } - else if (str[idx] != ',') { - raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - trailing_delimiter = 1; - } - if (trailing_delimiter) { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - goto bail; - } - } - - /* verify that idx < end_idx, str[idx] should be ']' */ - if (idx > end_idx || str[idx] != ']') { - if (PyList_GET_SIZE(rval)) { - raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx); - } else { - raise_errmsg(ERR_ARRAY_VALUE_FIRST, pystr, idx); - } - goto bail; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(val); - Py_DECREF(rval); - return NULL; -} -#endif /* PY_MAJOR_VERSION < 3 */ - -static PyObject * -_parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON array from PyString pystr. - idx is the index of the first character after the opening brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing brace. - - Returns a new PyList - */ - PY2_UNUSED int kind = PyUnicode_KIND(pystr); - void *str = PyUnicode_DATA(pystr); - Py_ssize_t end_idx = PyUnicode_GET_LENGTH(pystr) - 1; - PyObject *val = NULL; - PyObject *rval = PyList_New(0); - Py_ssize_t next_idx; - if (rval == NULL) - return NULL; - - /* skip whitespace after [ */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - - /* only loop if the array is non-empty */ - if (idx <= end_idx && PyUnicode_READ(kind, str, idx) != ']') { - int trailing_delimiter = 0; - while (idx <= end_idx) { - trailing_delimiter = 0; - /* read any JSON term */ - val = scan_once_unicode(s, pystr, idx, &next_idx); - if (val == NULL) { - goto bail; - } - - if (PyList_Append(rval, val) == -1) - goto bail; - - Py_CLEAR(val); - idx = next_idx; - - /* skip whitespace between term and , */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - - /* bail if the array is closed or we didn't get the , delimiter */ - if (idx > end_idx) break; - if (PyUnicode_READ(kind, str, idx) == ']') { - break; - } - else if (PyUnicode_READ(kind, str, idx) != ',') { - raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , */ - while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; - trailing_delimiter = 1; - } - if (trailing_delimiter) { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - goto bail; - } - } - - /* verify that idx < end_idx, str[idx] should be ']' */ - if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ']') { - if (PyList_GET_SIZE(rval)) { - raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx); - } else { - raise_errmsg(ERR_ARRAY_VALUE_FIRST, pystr, idx); - } - goto bail; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(val); - Py_DECREF(rval); - return NULL; -} - -static PyObject * -_parse_constant(PyScannerObject *s, PyObject *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON constant from PyString pystr. - constant is the Python string that was found - ("NaN", "Infinity", "-Infinity"). - idx is the index of the first character of the constant - *next_idx_ptr is a return-by-reference index to the first character after - the constant. - - Returns the result of parse_constant - */ - PyObject *rval; - - /* rval = parse_constant(constant) */ - rval = PyObject_CallFunctionObjArgs(s->parse_constant, constant, NULL); - idx += PyString_GET_SIZE(constant); - *next_idx_ptr = idx; - return rval; -} - -#if PY_MAJOR_VERSION < 3 -static PyObject * -_match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON number from PyString pystr. - idx is the index of the first character of the number - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of that number: - PyInt, PyLong, or PyFloat. - May return other types if parse_int or parse_float are set - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - Py_ssize_t idx = start; - int is_float = 0; - PyObject *rval; - PyObject *numstr; - - /* read a sign if it's there, make sure it's not the end of the string */ - if (str[idx] == '-') { - if (idx >= end_idx) { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - return NULL; - } - idx++; - } - - /* read as many integer digits as we find as long as it doesn't start with 0 */ - if (str[idx] >= '1' && str[idx] <= '9') { - idx++; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - } - /* if it starts with 0 we only expect one integer digit */ - else if (str[idx] == '0') { - idx++; - } - /* no integer digits, error */ - else { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - return NULL; - } - - /* if the next char is '.' followed by a digit then read all float digits */ - if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') { - is_float = 1; - idx += 2; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - } - - /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ - if (idx < end_idx && (str[idx] == 'e' || str[idx] == 'E')) { - - /* save the index of the 'e' or 'E' just in case we need to backtrack */ - Py_ssize_t e_start = idx; - idx++; - - /* read an exponent sign if present */ - if (idx < end_idx && (str[idx] == '-' || str[idx] == '+')) idx++; - - /* read all digits */ - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - - /* if we got a digit, then parse as float. if not, backtrack */ - if (str[idx - 1] >= '0' && str[idx - 1] <= '9') { - is_float = 1; - } - else { - idx = e_start; - } - } - - /* copy the section we determined to be a number */ - numstr = PyString_FromStringAndSize(&str[start], idx - start); - if (numstr == NULL) - return NULL; - if (is_float) { - /* parse as a float using a fast path if available, otherwise call user defined method */ - if (s->parse_float != (PyObject *)&PyFloat_Type) { - rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); - } - else { - /* rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); */ - double d = PyOS_string_to_double(PyString_AS_STRING(numstr), - NULL, NULL); - if (d == -1.0 && PyErr_Occurred()) - return NULL; - rval = PyFloat_FromDouble(d); - } - } - else { - /* parse as an int using a fast path if available, otherwise call user defined method */ - if (s->parse_int != (PyObject *)&PyInt_Type) { - rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL); - } - else { - rval = PyInt_FromString(PyString_AS_STRING(numstr), NULL, 10); - } - } - Py_DECREF(numstr); - *next_idx_ptr = idx; - return rval; -} -#endif /* PY_MAJOR_VERSION < 3 */ - -static PyObject * -_match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) -{ - /* Read a JSON number from PyUnicode pystr. - idx is the index of the first character of the number - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of that number: - PyInt, PyLong, or PyFloat. - May return other types if parse_int or parse_float are set - */ - PY2_UNUSED int kind = PyUnicode_KIND(pystr); - void *str = PyUnicode_DATA(pystr); - Py_ssize_t end_idx = PyUnicode_GET_LENGTH(pystr) - 1; - Py_ssize_t idx = start; - int is_float = 0; - JSON_UNICHR c; - PyObject *rval; - PyObject *numstr; - - /* read a sign if it's there, make sure it's not the end of the string */ - if (PyUnicode_READ(kind, str, idx) == '-') { - if (idx >= end_idx) { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - return NULL; - } - idx++; - } - - /* read as many integer digits as we find as long as it doesn't start with 0 */ - c = PyUnicode_READ(kind, str, idx); - if (c == '0') { - /* if it starts with 0 we only expect one integer digit */ - idx++; - } - else if (IS_DIGIT(c)) { - idx++; - while (idx <= end_idx && IS_DIGIT(PyUnicode_READ(kind, str, idx))) { - idx++; - } - } - else { - /* no integer digits, error */ - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - return NULL; - } - - /* if the next char is '.' followed by a digit then read all float digits */ - if (idx < end_idx && - PyUnicode_READ(kind, str, idx) == '.' && - IS_DIGIT(PyUnicode_READ(kind, str, idx + 1))) { - is_float = 1; - idx += 2; - while (idx <= end_idx && IS_DIGIT(PyUnicode_READ(kind, str, idx))) idx++; - } - - /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ - if (idx < end_idx && - (PyUnicode_READ(kind, str, idx) == 'e' || - PyUnicode_READ(kind, str, idx) == 'E')) { - Py_ssize_t e_start = idx; - idx++; - - /* read an exponent sign if present */ - if (idx < end_idx && - (PyUnicode_READ(kind, str, idx) == '-' || - PyUnicode_READ(kind, str, idx) == '+')) idx++; - - /* read all digits */ - while (idx <= end_idx && IS_DIGIT(PyUnicode_READ(kind, str, idx))) idx++; - - /* if we got a digit, then parse as float. if not, backtrack */ - if (IS_DIGIT(PyUnicode_READ(kind, str, idx - 1))) { - is_float = 1; - } - else { - idx = e_start; - } - } - - /* copy the section we determined to be a number */ -#if PY_MAJOR_VERSION >= 3 - numstr = PyUnicode_Substring(pystr, start, idx); -#else - numstr = PyUnicode_FromUnicode(&((Py_UNICODE *)str)[start], idx - start); -#endif - if (numstr == NULL) - return NULL; - if (is_float) { - /* parse as a float using a fast path if available, otherwise call user defined method */ - if (s->parse_float != (PyObject *)&PyFloat_Type) { - rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); - } - else { -#if PY_MAJOR_VERSION >= 3 - rval = PyFloat_FromString(numstr); -#else - rval = PyFloat_FromString(numstr, NULL); -#endif - } - } - else { - /* no fast path for unicode -> int, just call */ - rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL); - } - Py_DECREF(numstr); - *next_idx_ptr = idx; - return rval; -} - -#if PY_MAJOR_VERSION < 3 -static PyObject * -scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read one JSON term (of any kind) from PyString pystr. - idx is the index of the first character of the term - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of the term. - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t length = PyString_GET_SIZE(pystr); - PyObject *rval = NULL; - int fallthrough = 0; - if (idx < 0 || idx >= length) { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - return NULL; - } - switch (str[idx]) { - case '"': - /* string */ - rval = scanstring_str(pystr, idx + 1, - PyString_AS_STRING(s->encoding), - s->strict, - next_idx_ptr); - break; - case '{': - /* object */ - if (Py_EnterRecursiveCall(" while decoding a JSON object " - "from a string")) - return NULL; - rval = _parse_object_str(s, pystr, idx + 1, next_idx_ptr); - Py_LeaveRecursiveCall(); - break; - case '[': - /* array */ - if (Py_EnterRecursiveCall(" while decoding a JSON array " - "from a string")) - return NULL; - rval = _parse_array_str(s, pystr, idx + 1, next_idx_ptr); - Py_LeaveRecursiveCall(); - break; - case 'n': - /* null */ - if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { - Py_INCREF(Py_None); - *next_idx_ptr = idx + 4; - rval = Py_None; - } - else - fallthrough = 1; - break; - case 't': - /* true */ - if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { - Py_INCREF(Py_True); - *next_idx_ptr = idx + 4; - rval = Py_True; - } - else - fallthrough = 1; - break; - case 'f': - /* false */ - if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { - Py_INCREF(Py_False); - *next_idx_ptr = idx + 5; - rval = Py_False; - } - else - fallthrough = 1; - break; - case 'N': - /* NaN */ - if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { - rval = _parse_constant(s, JSON_NaN, idx, next_idx_ptr); - } - else - fallthrough = 1; - break; - case 'I': - /* Infinity */ - if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { - rval = _parse_constant(s, JSON_Infinity, idx, next_idx_ptr); - } - else - fallthrough = 1; - break; - case '-': - /* -Infinity */ - if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { - rval = _parse_constant(s, JSON_NegInfinity, idx, next_idx_ptr); - } - else - fallthrough = 1; - break; - default: - fallthrough = 1; - } - /* Didn't find a string, object, array, or named constant. Look for a number. */ - if (fallthrough) - rval = _match_number_str(s, pystr, idx, next_idx_ptr); - return rval; -} -#endif /* PY_MAJOR_VERSION < 3 */ - - -static PyObject * -scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read one JSON term (of any kind) from PyUnicode pystr. - idx is the index of the first character of the term - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of the term. - */ - PY2_UNUSED int kind = PyUnicode_KIND(pystr); - void *str = PyUnicode_DATA(pystr); - Py_ssize_t length = PyUnicode_GET_LENGTH(pystr); - PyObject *rval = NULL; - int fallthrough = 0; - if (idx < 0 || idx >= length) { - raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx); - return NULL; - } - switch (PyUnicode_READ(kind, str, idx)) { - case '"': - /* string */ - rval = scanstring_unicode(pystr, idx + 1, - s->strict, - next_idx_ptr); - break; - case '{': - /* object */ - if (Py_EnterRecursiveCall(" while decoding a JSON object " - "from a unicode string")) - return NULL; - rval = _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr); - Py_LeaveRecursiveCall(); - break; - case '[': - /* array */ - if (Py_EnterRecursiveCall(" while decoding a JSON array " - "from a unicode string")) - return NULL; - rval = _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr); - Py_LeaveRecursiveCall(); - break; - case 'n': - /* null */ - if ((idx + 3 < length) && - PyUnicode_READ(kind, str, idx + 1) == 'u' && - PyUnicode_READ(kind, str, idx + 2) == 'l' && - PyUnicode_READ(kind, str, idx + 3) == 'l') { - Py_INCREF(Py_None); - *next_idx_ptr = idx + 4; - rval = Py_None; - } - else - fallthrough = 1; - break; - case 't': - /* true */ - if ((idx + 3 < length) && - PyUnicode_READ(kind, str, idx + 1) == 'r' && - PyUnicode_READ(kind, str, idx + 2) == 'u' && - PyUnicode_READ(kind, str, idx + 3) == 'e') { - Py_INCREF(Py_True); - *next_idx_ptr = idx + 4; - rval = Py_True; - } - else - fallthrough = 1; - break; - case 'f': - /* false */ - if ((idx + 4 < length) && - PyUnicode_READ(kind, str, idx + 1) == 'a' && - PyUnicode_READ(kind, str, idx + 2) == 'l' && - PyUnicode_READ(kind, str, idx + 3) == 's' && - PyUnicode_READ(kind, str, idx + 4) == 'e') { - Py_INCREF(Py_False); - *next_idx_ptr = idx + 5; - rval = Py_False; - } - else - fallthrough = 1; - break; - case 'N': - /* NaN */ - if ((idx + 2 < length) && - PyUnicode_READ(kind, str, idx + 1) == 'a' && - PyUnicode_READ(kind, str, idx + 2) == 'N') { - rval = _parse_constant(s, JSON_NaN, idx, next_idx_ptr); - } - else - fallthrough = 1; - break; - case 'I': - /* Infinity */ - if ((idx + 7 < length) && - PyUnicode_READ(kind, str, idx + 1) == 'n' && - PyUnicode_READ(kind, str, idx + 2) == 'f' && - PyUnicode_READ(kind, str, idx + 3) == 'i' && - PyUnicode_READ(kind, str, idx + 4) == 'n' && - PyUnicode_READ(kind, str, idx + 5) == 'i' && - PyUnicode_READ(kind, str, idx + 6) == 't' && - PyUnicode_READ(kind, str, idx + 7) == 'y') { - rval = _parse_constant(s, JSON_Infinity, idx, next_idx_ptr); - } - else - fallthrough = 1; - break; - case '-': - /* -Infinity */ - if ((idx + 8 < length) && - PyUnicode_READ(kind, str, idx + 1) == 'I' && - PyUnicode_READ(kind, str, idx + 2) == 'n' && - PyUnicode_READ(kind, str, idx + 3) == 'f' && - PyUnicode_READ(kind, str, idx + 4) == 'i' && - PyUnicode_READ(kind, str, idx + 5) == 'n' && - PyUnicode_READ(kind, str, idx + 6) == 'i' && - PyUnicode_READ(kind, str, idx + 7) == 't' && - PyUnicode_READ(kind, str, idx + 8) == 'y') { - rval = _parse_constant(s, JSON_NegInfinity, idx, next_idx_ptr); - } - else - fallthrough = 1; - break; - default: - fallthrough = 1; - } - /* Didn't find a string, object, array, or named constant. Look for a number. */ - if (fallthrough) - rval = _match_number_unicode(s, pystr, idx, next_idx_ptr); - return rval; -} - -static PyObject * -scanner_call(PyObject *self, PyObject *args, PyObject *kwds) -{ - /* Python callable interface to scan_once_{str,unicode} */ - PyObject *pystr; - PyObject *rval; - Py_ssize_t idx; - Py_ssize_t next_idx = -1; - static char *kwlist[] = {"string", "idx", NULL}; - PyScannerObject *s; - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:scan_once", kwlist, &pystr, _convertPyInt_AsSsize_t, &idx)) - return NULL; - - if (PyUnicode_Check(pystr)) { - if (PyUnicode_READY(pystr)) - return NULL; - rval = scan_once_unicode(s, pystr, idx, &next_idx); - } -#if PY_MAJOR_VERSION < 3 - else if (PyString_Check(pystr)) { - rval = scan_once_str(s, pystr, idx, &next_idx); - } -#endif /* PY_MAJOR_VERSION < 3 */ - else { - PyErr_Format(PyExc_TypeError, - "first argument must be a string, not %.80s", - Py_TYPE(pystr)->tp_name); - return NULL; - } - PyDict_Clear(s->memo); - return _build_rval_index_tuple(rval, next_idx); -} - -static PyObject * -JSON_ParseEncoding(PyObject *encoding) -{ - if (encoding == Py_None) - return JSON_InternFromString(DEFAULT_ENCODING); -#if PY_MAJOR_VERSION >= 3 - if (PyUnicode_Check(encoding)) { - if (PyUnicode_AsUTF8(encoding) == NULL) { - return NULL; - } - Py_INCREF(encoding); - return encoding; - } -#else /* PY_MAJOR_VERSION >= 3 */ - if (PyString_Check(encoding)) { - Py_INCREF(encoding); - return encoding; - } - if (PyUnicode_Check(encoding)) - return PyUnicode_AsEncodedString(encoding, NULL, NULL); -#endif /* PY_MAJOR_VERSION >= 3 */ - PyErr_SetString(PyExc_TypeError, "encoding must be a string"); - return NULL; -} - -static PyObject * -scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - /* Initialize Scanner object */ - PyObject *ctx; - static char *kwlist[] = {"context", NULL}; - PyScannerObject *s; - PyObject *encoding; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:make_scanner", kwlist, &ctx)) - return NULL; - - s = (PyScannerObject *)type->tp_alloc(type, 0); - if (s == NULL) - return NULL; - - if (s->memo == NULL) { - s->memo = PyDict_New(); - if (s->memo == NULL) - goto bail; - } - - encoding = PyObject_GetAttrString(ctx, "encoding"); - if (encoding == NULL) - goto bail; - s->encoding = JSON_ParseEncoding(encoding); - Py_XDECREF(encoding); - if (s->encoding == NULL) - goto bail; - - /* All of these will fail "gracefully" so we don't need to verify them */ - s->strict_bool = PyObject_GetAttrString(ctx, "strict"); - if (s->strict_bool == NULL) - goto bail; - s->strict = PyObject_IsTrue(s->strict_bool); - if (s->strict < 0) - goto bail; - s->object_hook = PyObject_GetAttrString(ctx, "object_hook"); - if (s->object_hook == NULL) - goto bail; - s->pairs_hook = PyObject_GetAttrString(ctx, "object_pairs_hook"); - if (s->pairs_hook == NULL) - goto bail; - s->parse_float = PyObject_GetAttrString(ctx, "parse_float"); - if (s->parse_float == NULL) - goto bail; - s->parse_int = PyObject_GetAttrString(ctx, "parse_int"); - if (s->parse_int == NULL) - goto bail; - s->parse_constant = PyObject_GetAttrString(ctx, "parse_constant"); - if (s->parse_constant == NULL) - goto bail; - - return (PyObject *)s; - -bail: - Py_DECREF(s); - return NULL; -} - -PyDoc_STRVAR(scanner_doc, "JSON scanner object"); - -static -PyTypeObject PyScannerType = { - PyVarObject_HEAD_INIT(NULL, 0) - "simplejson._speedups.Scanner", /* tp_name */ - sizeof(PyScannerObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - scanner_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - scanner_call, /* tp_call */ - 0, /* tp_str */ - 0,/* PyObject_GenericGetAttr, */ /* tp_getattro */ - 0,/* PyObject_GenericSetAttr, */ /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - scanner_doc, /* tp_doc */ - scanner_traverse, /* tp_traverse */ - scanner_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - scanner_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0,/* PyType_GenericAlloc, */ /* tp_alloc */ - scanner_new, /* tp_new */ - 0,/* PyObject_GC_Del, */ /* tp_free */ -}; - -static PyObject * -encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = { - "markers", - "default", - "encoder", - "indent", - "key_separator", - "item_separator", - "sort_keys", - "skipkeys", - "allow_nan", - "key_memo", - "use_decimal", - "namedtuple_as_object", - "tuple_as_array", - "int_as_string_bitcount", - "item_sort_key", - "encoding", - "for_json", - "ignore_nan", - "Decimal", - "iterable_as_array", - NULL}; - - PyEncoderObject *s; - PyObject *markers, *defaultfn, *encoder, *indent, *key_separator; - PyObject *item_separator, *sort_keys, *skipkeys, *allow_nan, *key_memo; - PyObject *use_decimal, *namedtuple_as_object, *tuple_as_array, *iterable_as_array; - PyObject *int_as_string_bitcount, *item_sort_key, *encoding, *for_json; - PyObject *ignore_nan, *Decimal; - int is_true; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOOOOOOOOOOOOO:make_encoder", kwlist, - &markers, &defaultfn, &encoder, &indent, &key_separator, &item_separator, - &sort_keys, &skipkeys, &allow_nan, &key_memo, &use_decimal, - &namedtuple_as_object, &tuple_as_array, - &int_as_string_bitcount, &item_sort_key, &encoding, &for_json, - &ignore_nan, &Decimal, &iterable_as_array)) - return NULL; - - s = (PyEncoderObject *)type->tp_alloc(type, 0); - if (s == NULL) - return NULL; - - Py_INCREF(markers); - s->markers = markers; - Py_INCREF(defaultfn); - s->defaultfn = defaultfn; - Py_INCREF(encoder); - s->encoder = encoder; -#if PY_MAJOR_VERSION >= 3 - if (encoding == Py_None) { - s->encoding = NULL; - } - else -#endif /* PY_MAJOR_VERSION >= 3 */ - { - s->encoding = JSON_ParseEncoding(encoding); - if (s->encoding == NULL) - goto bail; - } - Py_INCREF(indent); - s->indent = indent; - Py_INCREF(key_separator); - s->key_separator = key_separator; - Py_INCREF(item_separator); - s->item_separator = item_separator; - Py_INCREF(skipkeys); - s->skipkeys_bool = skipkeys; - s->skipkeys = PyObject_IsTrue(skipkeys); - if (s->skipkeys < 0) - goto bail; - Py_INCREF(key_memo); - s->key_memo = key_memo; - s->fast_encode = (PyCFunction_Check(s->encoder) && PyCFunction_GetFunction(s->encoder) == (PyCFunction)py_encode_basestring_ascii); - is_true = PyObject_IsTrue(ignore_nan); - if (is_true < 0) - goto bail; - s->allow_or_ignore_nan = is_true ? JSON_IGNORE_NAN : 0; - is_true = PyObject_IsTrue(allow_nan); - if (is_true < 0) - goto bail; - s->allow_or_ignore_nan |= is_true ? JSON_ALLOW_NAN : 0; - s->use_decimal = PyObject_IsTrue(use_decimal); - if (s->use_decimal < 0) - goto bail; - s->namedtuple_as_object = PyObject_IsTrue(namedtuple_as_object); - if (s->namedtuple_as_object < 0) - goto bail; - s->tuple_as_array = PyObject_IsTrue(tuple_as_array); - if (s->tuple_as_array < 0) - goto bail; - s->iterable_as_array = PyObject_IsTrue(iterable_as_array); - if (s->iterable_as_array < 0) - goto bail; - if (PyInt_Check(int_as_string_bitcount) || PyLong_Check(int_as_string_bitcount)) { - static const unsigned long long_long_bitsize = SIZEOF_LONG_LONG * 8; - long int_as_string_bitcount_val = PyLong_AsLong(int_as_string_bitcount); - if (int_as_string_bitcount_val > 0 && int_as_string_bitcount_val < (long)long_long_bitsize) { - s->max_long_size = PyLong_FromUnsignedLongLong(1ULL << (int)int_as_string_bitcount_val); - s->min_long_size = PyLong_FromLongLong(-1LL << (int)int_as_string_bitcount_val); - if (s->min_long_size == NULL || s->max_long_size == NULL) { - goto bail; - } - } - else { - PyErr_Format(PyExc_TypeError, - "int_as_string_bitcount (%ld) must be greater than 0 and less than the number of bits of a `long long` type (%lu bits)", - int_as_string_bitcount_val, long_long_bitsize); - goto bail; - } - } - else if (int_as_string_bitcount == Py_None) { - Py_INCREF(Py_None); - s->max_long_size = Py_None; - Py_INCREF(Py_None); - s->min_long_size = Py_None; - } - else { - PyErr_SetString(PyExc_TypeError, "int_as_string_bitcount must be None or an integer"); - goto bail; - } - if (item_sort_key != Py_None) { - if (!PyCallable_Check(item_sort_key)) { - PyErr_SetString(PyExc_TypeError, "item_sort_key must be None or callable"); - goto bail; - } - } - else { - is_true = PyObject_IsTrue(sort_keys); - if (is_true < 0) - goto bail; - if (is_true) { - static PyObject *itemgetter0 = NULL; - if (!itemgetter0) { - PyObject *operator = PyImport_ImportModule("operator"); - if (!operator) - goto bail; - itemgetter0 = PyObject_CallMethod(operator, "itemgetter", "i", 0); - Py_DECREF(operator); - } - item_sort_key = itemgetter0; - if (!item_sort_key) - goto bail; - } - } - if (item_sort_key == Py_None) { - Py_INCREF(Py_None); - s->item_sort_kw = Py_None; - } - else { - s->item_sort_kw = PyDict_New(); - if (s->item_sort_kw == NULL) - goto bail; - if (PyDict_SetItemString(s->item_sort_kw, "key", item_sort_key)) - goto bail; - } - Py_INCREF(sort_keys); - s->sort_keys = sort_keys; - Py_INCREF(item_sort_key); - s->item_sort_key = item_sort_key; - Py_INCREF(Decimal); - s->Decimal = Decimal; - s->for_json = PyObject_IsTrue(for_json); - if (s->for_json < 0) - goto bail; - - return (PyObject *)s; - -bail: - Py_DECREF(s); - return NULL; -} - -static PyObject * -encoder_call(PyObject *self, PyObject *args, PyObject *kwds) -{ - /* Python callable interface to encode_listencode_obj */ - static char *kwlist[] = {"obj", "_current_indent_level", NULL}; - PyObject *obj; - Py_ssize_t indent_level; - PyEncoderObject *s; - JSON_Accu rval; - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:_iterencode", kwlist, - &obj, _convertPyInt_AsSsize_t, &indent_level)) - return NULL; - if (JSON_Accu_Init(&rval)) - return NULL; - if (encoder_listencode_obj(s, &rval, obj, indent_level)) { - JSON_Accu_Destroy(&rval); - return NULL; - } - return JSON_Accu_FinishAsList(&rval); -} - -static PyObject * -_encoded_const(PyObject *obj) -{ - /* Return the JSON string representation of None, True, False */ - if (obj == Py_None) { - static PyObject *s_null = NULL; - if (s_null == NULL) { - s_null = JSON_InternFromString("null"); - } - Py_INCREF(s_null); - return s_null; - } - else if (obj == Py_True) { - static PyObject *s_true = NULL; - if (s_true == NULL) { - s_true = JSON_InternFromString("true"); - } - Py_INCREF(s_true); - return s_true; - } - else if (obj == Py_False) { - static PyObject *s_false = NULL; - if (s_false == NULL) { - s_false = JSON_InternFromString("false"); - } - Py_INCREF(s_false); - return s_false; - } - else { - PyErr_SetString(PyExc_ValueError, "not a const"); - return NULL; - } -} - -static PyObject * -encoder_encode_float(PyEncoderObject *s, PyObject *obj) -{ - /* Return the JSON representation of a PyFloat */ - double i = PyFloat_AS_DOUBLE(obj); - if (!Py_IS_FINITE(i)) { - if (!s->allow_or_ignore_nan) { - PyErr_SetString(PyExc_ValueError, "Out of range float values are not JSON compliant"); - return NULL; - } - if (s->allow_or_ignore_nan & JSON_IGNORE_NAN) { - return _encoded_const(Py_None); - } - /* JSON_ALLOW_NAN is set */ - else if (i > 0) { - Py_INCREF(JSON_Infinity); - return JSON_Infinity; - } - else if (i < 0) { - Py_INCREF(JSON_NegInfinity); - return JSON_NegInfinity; - } - else { - Py_INCREF(JSON_NaN); - return JSON_NaN; - } - } - /* Use a better float format here? */ - if (PyFloat_CheckExact(obj)) { - return PyObject_Repr(obj); - } - else { - /* See #118, do not trust custom str/repr */ - PyObject *res; - PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyFloat_Type, obj, NULL); - if (tmp == NULL) { - return NULL; - } - res = PyObject_Repr(tmp); - Py_DECREF(tmp); - return res; - } -} - -static PyObject * -encoder_encode_string(PyEncoderObject *s, PyObject *obj) -{ - /* Return the JSON representation of a string */ - PyObject *encoded; - - if (s->fast_encode) { - return py_encode_basestring_ascii(NULL, obj); - } - encoded = PyObject_CallFunctionObjArgs(s->encoder, obj, NULL); - if (encoded != NULL && -#if PY_MAJOR_VERSION < 3 - !PyString_Check(encoded) && -#endif /* PY_MAJOR_VERSION < 3 */ - !PyUnicode_Check(encoded)) - { - PyErr_Format(PyExc_TypeError, - "encoder() must return a string, not %.80s", - Py_TYPE(encoded)->tp_name); - Py_DECREF(encoded); - return NULL; - } - return encoded; -} - -static int -_steal_accumulate(JSON_Accu *accu, PyObject *stolen) -{ - /* Append stolen and then decrement its reference count */ - int rval = JSON_Accu_Accumulate(accu, stolen); - Py_DECREF(stolen); - return rval; -} - -static int -encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ssize_t indent_level) -{ - /* Encode Python object obj to a JSON term, rval is a PyList */ - int rv = -1; - do { - if (obj == Py_None || obj == Py_True || obj == Py_False) { - PyObject *cstr = _encoded_const(obj); - if (cstr != NULL) - rv = _steal_accumulate(rval, cstr); - } - else if ((PyBytes_Check(obj) && s->encoding != NULL) || - PyUnicode_Check(obj)) - { - PyObject *encoded = encoder_encode_string(s, obj); - if (encoded != NULL) - rv = _steal_accumulate(rval, encoded); - } - else if (PyInt_Check(obj) || PyLong_Check(obj)) { - PyObject *encoded; - if (PyInt_CheckExact(obj) || PyLong_CheckExact(obj)) { - encoded = PyObject_Str(obj); - } - else { - /* See #118, do not trust custom str/repr */ - PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyLong_Type, obj, NULL); - if (tmp == NULL) { - encoded = NULL; - } - else { - encoded = PyObject_Str(tmp); - Py_DECREF(tmp); - } - } - if (encoded != NULL) { - encoded = maybe_quote_bigint(s, encoded, obj); - if (encoded == NULL) - break; - rv = _steal_accumulate(rval, encoded); - } - } - else if (PyFloat_Check(obj)) { - PyObject *encoded = encoder_encode_float(s, obj); - if (encoded != NULL) - rv = _steal_accumulate(rval, encoded); - } - else if (s->for_json && _has_for_json_hook(obj)) { - PyObject *newobj; - if (Py_EnterRecursiveCall(" while encoding a JSON object")) - return rv; - newobj = PyObject_CallMethod(obj, "for_json", NULL); - if (newobj != NULL) { - rv = encoder_listencode_obj(s, rval, newobj, indent_level); - Py_DECREF(newobj); - } - Py_LeaveRecursiveCall(); - } - else if (s->namedtuple_as_object && _is_namedtuple(obj)) { - PyObject *newobj; - if (Py_EnterRecursiveCall(" while encoding a JSON object")) - return rv; - newobj = PyObject_CallMethod(obj, "_asdict", NULL); - if (newobj != NULL) { - rv = encoder_listencode_dict(s, rval, newobj, indent_level); - Py_DECREF(newobj); - } - Py_LeaveRecursiveCall(); - } - else if (PyList_Check(obj) || (s->tuple_as_array && PyTuple_Check(obj))) { - if (Py_EnterRecursiveCall(" while encoding a JSON object")) - return rv; - rv = encoder_listencode_list(s, rval, obj, indent_level); - Py_LeaveRecursiveCall(); - } - else if (PyDict_Check(obj)) { - if (Py_EnterRecursiveCall(" while encoding a JSON object")) - return rv; - rv = encoder_listencode_dict(s, rval, obj, indent_level); - Py_LeaveRecursiveCall(); - } - else if (s->use_decimal && PyObject_TypeCheck(obj, (PyTypeObject *)s->Decimal)) { - PyObject *encoded = PyObject_Str(obj); - if (encoded != NULL) - rv = _steal_accumulate(rval, encoded); - } - else if (is_raw_json(obj)) - { - PyObject *encoded = PyObject_GetAttrString(obj, "encoded_json"); - if (encoded != NULL) - rv = _steal_accumulate(rval, encoded); - } - else { - PyObject *ident = NULL; - PyObject *newobj; - if (s->iterable_as_array) { - newobj = PyObject_GetIter(obj); - if (newobj == NULL) - PyErr_Clear(); - else { - rv = encoder_listencode_list(s, rval, newobj, indent_level); - Py_DECREF(newobj); - break; - } - } - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(obj); - if (ident == NULL) - break; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - Py_DECREF(ident); - break; - } - if (PyDict_SetItem(s->markers, ident, obj)) { - Py_DECREF(ident); - break; - } - } - if (Py_EnterRecursiveCall(" while encoding a JSON object")) - return rv; - newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL); - if (newobj == NULL) { - Py_XDECREF(ident); - Py_LeaveRecursiveCall(); - break; - } - rv = encoder_listencode_obj(s, rval, newobj, indent_level); - Py_LeaveRecursiveCall(); - Py_DECREF(newobj); - if (rv) { - Py_XDECREF(ident); - rv = -1; - } - else if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) { - Py_XDECREF(ident); - rv = -1; - } - Py_XDECREF(ident); - } - } - } while (0); - return rv; -} - -static int -encoder_listencode_dict(PyEncoderObject *s, JSON_Accu *rval, PyObject *dct, Py_ssize_t indent_level) -{ - /* Encode Python dict dct a JSON term */ - static PyObject *open_dict = NULL; - static PyObject *close_dict = NULL; - static PyObject *empty_dict = NULL; - PyObject *kstr = NULL; - PyObject *ident = NULL; - PyObject *iter = NULL; - PyObject *item = NULL; - PyObject *items = NULL; - PyObject *encoded = NULL; - Py_ssize_t idx; - - if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) { - open_dict = JSON_InternFromString("{"); - close_dict = JSON_InternFromString("}"); - empty_dict = JSON_InternFromString("{}"); - if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) - return -1; - } - if (PyDict_Size(dct) == 0) - return JSON_Accu_Accumulate(rval, empty_dict); - - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(dct); - if (ident == NULL) - goto bail; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - goto bail; - } - if (PyDict_SetItem(s->markers, ident, dct)) { - goto bail; - } - } - - if (JSON_Accu_Accumulate(rval, open_dict)) - goto bail; - - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level += 1; - /* - newline_indent = '\n' + (_indent * _current_indent_level) - separator = _item_separator + newline_indent - buf += newline_indent - */ - } - - iter = encoder_dict_iteritems(s, dct); - if (iter == NULL) - goto bail; - - idx = 0; - while ((item = PyIter_Next(iter))) { - PyObject *encoded, *key, *value; - if (!PyTuple_Check(item) || Py_SIZE(item) != 2) { - PyErr_SetString(PyExc_ValueError, "items must return 2-tuples"); - goto bail; - } - key = PyTuple_GET_ITEM(item, 0); - if (key == NULL) - goto bail; - value = PyTuple_GET_ITEM(item, 1); - if (value == NULL) - goto bail; - - encoded = PyDict_GetItem(s->key_memo, key); - if (encoded != NULL) { - Py_INCREF(encoded); - } else { - kstr = encoder_stringify_key(s, key); - if (kstr == NULL) - goto bail; - else if (kstr == Py_None) { - /* skipkeys */ - Py_DECREF(item); - Py_DECREF(kstr); - continue; - } - } - if (idx) { - if (JSON_Accu_Accumulate(rval, s->item_separator)) - goto bail; - } - if (encoded == NULL) { - encoded = encoder_encode_string(s, kstr); - Py_CLEAR(kstr); - if (encoded == NULL) - goto bail; - if (PyDict_SetItem(s->key_memo, key, encoded)) - goto bail; - } - if (JSON_Accu_Accumulate(rval, encoded)) { - goto bail; - } - Py_CLEAR(encoded); - if (JSON_Accu_Accumulate(rval, s->key_separator)) - goto bail; - if (encoder_listencode_obj(s, rval, value, indent_level)) - goto bail; - Py_CLEAR(item); - idx += 1; - } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; - if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) - goto bail; - Py_CLEAR(ident); - } - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level -= 1; - /* - yield '\n' + (_indent * _current_indent_level) - */ - } - if (JSON_Accu_Accumulate(rval, close_dict)) - goto bail; - return 0; - -bail: - Py_XDECREF(encoded); - Py_XDECREF(items); - Py_XDECREF(item); - Py_XDECREF(iter); - Py_XDECREF(kstr); - Py_XDECREF(ident); - return -1; -} - - -static int -encoder_listencode_list(PyEncoderObject *s, JSON_Accu *rval, PyObject *seq, Py_ssize_t indent_level) -{ - /* Encode Python list seq to a JSON term */ - static PyObject *open_array = NULL; - static PyObject *close_array = NULL; - static PyObject *empty_array = NULL; - PyObject *ident = NULL; - PyObject *iter = NULL; - PyObject *obj = NULL; - int is_true; - int i = 0; - - if (open_array == NULL || close_array == NULL || empty_array == NULL) { - open_array = JSON_InternFromString("["); - close_array = JSON_InternFromString("]"); - empty_array = JSON_InternFromString("[]"); - if (open_array == NULL || close_array == NULL || empty_array == NULL) - return -1; - } - ident = NULL; - is_true = PyObject_IsTrue(seq); - if (is_true == -1) - return -1; - else if (is_true == 0) - return JSON_Accu_Accumulate(rval, empty_array); - - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(seq); - if (ident == NULL) - goto bail; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - goto bail; - } - if (PyDict_SetItem(s->markers, ident, seq)) { - goto bail; - } - } - - iter = PyObject_GetIter(seq); - if (iter == NULL) - goto bail; - - if (JSON_Accu_Accumulate(rval, open_array)) - goto bail; - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level += 1; - /* - newline_indent = '\n' + (_indent * _current_indent_level) - separator = _item_separator + newline_indent - buf += newline_indent - */ - } - while ((obj = PyIter_Next(iter))) { - if (i) { - if (JSON_Accu_Accumulate(rval, s->item_separator)) - goto bail; - } - if (encoder_listencode_obj(s, rval, obj, indent_level)) - goto bail; - i++; - Py_CLEAR(obj); - } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; - if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) - goto bail; - Py_CLEAR(ident); - } - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level -= 1; - /* - yield '\n' + (_indent * _current_indent_level) - */ - } - if (JSON_Accu_Accumulate(rval, close_array)) - goto bail; - return 0; - -bail: - Py_XDECREF(obj); - Py_XDECREF(iter); - Py_XDECREF(ident); - return -1; -} - -static void -encoder_dealloc(PyObject *self) -{ - /* bpo-31095: UnTrack is needed before calling any callbacks */ - PyObject_GC_UnTrack(self); - encoder_clear(self); - Py_TYPE(self)->tp_free(self); -} - -static int -encoder_traverse(PyObject *self, visitproc visit, void *arg) -{ - PyEncoderObject *s; - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - Py_VISIT(s->markers); - Py_VISIT(s->defaultfn); - Py_VISIT(s->encoder); - Py_VISIT(s->encoding); - Py_VISIT(s->indent); - Py_VISIT(s->key_separator); - Py_VISIT(s->item_separator); - Py_VISIT(s->key_memo); - Py_VISIT(s->sort_keys); - Py_VISIT(s->item_sort_kw); - Py_VISIT(s->item_sort_key); - Py_VISIT(s->max_long_size); - Py_VISIT(s->min_long_size); - Py_VISIT(s->Decimal); - return 0; -} - -static int -encoder_clear(PyObject *self) -{ - /* Deallocate Encoder */ - PyEncoderObject *s; - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - Py_CLEAR(s->markers); - Py_CLEAR(s->defaultfn); - Py_CLEAR(s->encoder); - Py_CLEAR(s->encoding); - Py_CLEAR(s->indent); - Py_CLEAR(s->key_separator); - Py_CLEAR(s->item_separator); - Py_CLEAR(s->key_memo); - Py_CLEAR(s->skipkeys_bool); - Py_CLEAR(s->sort_keys); - Py_CLEAR(s->item_sort_kw); - Py_CLEAR(s->item_sort_key); - Py_CLEAR(s->max_long_size); - Py_CLEAR(s->min_long_size); - Py_CLEAR(s->Decimal); - return 0; -} - -PyDoc_STRVAR(encoder_doc, "_iterencode(obj, _current_indent_level) -> iterable"); - -static -PyTypeObject PyEncoderType = { - PyVarObject_HEAD_INIT(NULL, 0) - "simplejson._speedups.Encoder", /* tp_name */ - sizeof(PyEncoderObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - encoder_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - encoder_call, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - encoder_doc, /* tp_doc */ - encoder_traverse, /* tp_traverse */ - encoder_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - encoder_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - encoder_new, /* tp_new */ - 0, /* tp_free */ -}; - -static PyMethodDef speedups_methods[] = { - {"encode_basestring_ascii", - (PyCFunction)py_encode_basestring_ascii, - METH_O, - pydoc_encode_basestring_ascii}, - {"scanstring", - (PyCFunction)py_scanstring, - METH_VARARGS, - pydoc_scanstring}, - {NULL, NULL, 0, NULL} -}; - -PyDoc_STRVAR(module_doc, -"simplejson speedups\n"); - -#if PY_MAJOR_VERSION >= 3 -static struct PyModuleDef moduledef = { - PyModuleDef_HEAD_INIT, - "_speedups", /* m_name */ - module_doc, /* m_doc */ - -1, /* m_size */ - speedups_methods, /* m_methods */ - NULL, /* m_reload */ - NULL, /* m_traverse */ - NULL, /* m_clear*/ - NULL, /* m_free */ -}; -#endif - -PyObject * -import_dependency(char *module_name, char *attr_name) -{ - PyObject *rval; - PyObject *module = PyImport_ImportModule(module_name); - if (module == NULL) - return NULL; - rval = PyObject_GetAttrString(module, attr_name); - Py_DECREF(module); - return rval; -} - -static int -init_constants(void) -{ - JSON_NaN = JSON_InternFromString("NaN"); - if (JSON_NaN == NULL) - return 0; - JSON_Infinity = JSON_InternFromString("Infinity"); - if (JSON_Infinity == NULL) - return 0; - JSON_NegInfinity = JSON_InternFromString("-Infinity"); - if (JSON_NegInfinity == NULL) - return 0; -#if PY_MAJOR_VERSION >= 3 - JSON_EmptyUnicode = PyUnicode_New(0, 127); -#else /* PY_MAJOR_VERSION >= 3 */ - JSON_EmptyStr = PyString_FromString(""); - if (JSON_EmptyStr == NULL) - return 0; - JSON_EmptyUnicode = PyUnicode_FromUnicode(NULL, 0); -#endif /* PY_MAJOR_VERSION >= 3 */ - if (JSON_EmptyUnicode == NULL) - return 0; - - return 1; -} - -static PyObject * -moduleinit(void) -{ - PyObject *m; - if (PyType_Ready(&PyScannerType) < 0) - return NULL; - if (PyType_Ready(&PyEncoderType) < 0) - return NULL; - if (!init_constants()) - return NULL; - -#if PY_MAJOR_VERSION >= 3 - m = PyModule_Create(&moduledef); -#else - m = Py_InitModule3("_speedups", speedups_methods, module_doc); -#endif - Py_INCREF((PyObject*)&PyScannerType); - PyModule_AddObject(m, "make_scanner", (PyObject*)&PyScannerType); - Py_INCREF((PyObject*)&PyEncoderType); - PyModule_AddObject(m, "make_encoder", (PyObject*)&PyEncoderType); - RawJSONType = import_dependency("simplejson.raw_json", "RawJSON"); - if (RawJSONType == NULL) - return NULL; - JSONDecodeError = import_dependency("simplejson.errors", "JSONDecodeError"); - if (JSONDecodeError == NULL) - return NULL; - return m; -} - -#if PY_MAJOR_VERSION >= 3 -PyMODINIT_FUNC -PyInit__speedups(void) -{ - return moduleinit(); -} -#else -void -init_speedups(void) -{ - moduleinit(); -} -#endif diff --git a/script.module.simplejson/lib/simplejson/decoder.py b/script.module.simplejson/lib/simplejson/decoder.py index 7f0b0568fd..c99a976d84 100644 --- a/script.module.simplejson/lib/simplejson/decoder.py +++ b/script.module.simplejson/lib/simplejson/decoder.py @@ -46,9 +46,35 @@ def _floatconstants(): DEFAULT_ENCODING = "utf-8" +if hasattr(sys, 'get_int_max_str_digits'): + bounded_int = int +else: + def bounded_int(s, INT_MAX_STR_DIGITS=4300): + """Backport of the integer string length conversion limitation + + https://docs.python.org/3/library/stdtypes.html#int-max-str-digits + """ + if len(s) > INT_MAX_STR_DIGITS: + raise ValueError("Exceeds the limit (%s) for integer string conversion: value has %s digits" % (INT_MAX_STR_DIGITS, len(s))) + return int(s) + + +def scan_four_digit_hex(s, end, _m=re.compile(r'^[0-9a-fA-F]{4}$').match): + """Scan a four digit hex number from s[end:end + 4] + """ + msg = "Invalid \\uXXXX escape sequence" + esc = s[end:end + 4] + if not _m(esc): + raise JSONDecodeError(msg, s, end - 2) + try: + return int(esc, 16), end + 4 + except ValueError: + raise JSONDecodeError(msg, s, end - 2) + def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u''.join, - _PY3=PY3, _maxunicode=sys.maxunicode): + _PY3=PY3, _maxunicode=sys.maxunicode, + _scan_four_digit_hex=scan_four_digit_hex): """Scan the string s for a JSON string. End is the index of the character in s after the quote that started the JSON string. Unescapes all valid JSON string escape sequences and raises ValueError @@ -67,6 +93,7 @@ def py_scanstring(s, end, encoding=None, strict=True, if chunk is None: raise JSONDecodeError( "Unterminated string starting at", s, begin) + prev_end = end end = chunk.end() content, terminator = chunk.groups() # Content is contains zero or more unescaped string characters @@ -81,7 +108,7 @@ def py_scanstring(s, end, encoding=None, strict=True, elif terminator != '\\': if strict: msg = "Invalid control character %r at" - raise JSONDecodeError(msg, s, end) + raise JSONDecodeError(msg, s, prev_end) else: _append(terminator) continue @@ -100,33 +127,18 @@ def py_scanstring(s, end, encoding=None, strict=True, end += 1 else: # Unicode escape sequence - msg = "Invalid \\uXXXX escape sequence" - esc = s[end + 1:end + 5] - escX = esc[1:2] - if len(esc) != 4 or escX == 'x' or escX == 'X': - raise JSONDecodeError(msg, s, end - 1) - try: - uni = int(esc, 16) - except ValueError: - raise JSONDecodeError(msg, s, end - 1) - end += 5 + uni, end = _scan_four_digit_hex(s, end + 1) # Check for surrogate pair on UCS-4 systems # Note that this will join high/low surrogate pairs # but will also pass unpaired surrogates through if (_maxunicode > 65535 and uni & 0xfc00 == 0xd800 and s[end:end + 2] == '\\u'): - esc2 = s[end + 2:end + 6] - escX = esc2[1:2] - if len(esc2) == 4 and not (escX == 'x' or escX == 'X'): - try: - uni2 = int(esc2, 16) - except ValueError: - raise JSONDecodeError(msg, s, end) - if uni2 & 0xfc00 == 0xdc00: - uni = 0x10000 + (((uni - 0xd800) << 10) | - (uni2 - 0xdc00)) - end += 6 + uni2, end2 = _scan_four_digit_hex(s, end + 2) + if uni2 & 0xfc00 == 0xdc00: + uni = 0x10000 + (((uni - 0xd800) << 10) | + (uni2 - 0xdc00)) + end = end2 char = unichr(uni) # Append the unescaped character _append(char) @@ -167,7 +179,7 @@ def JSONObject(state, encoding, strict, scan_once, object_hook, return pairs, end + 1 elif nextchar != '"': raise JSONDecodeError( - "Expecting property name enclosed in double quotes", + "Expecting property name enclosed in double quotes or '}'", s, end) end += 1 while True: @@ -294,14 +306,15 @@ class JSONDecoder(object): | null | None | +---------------+-------------------+ - It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as + When allow_nan=True, it also understands + ``NaN``, ``Infinity``, and ``-Infinity`` as their corresponding ``float`` values, which is outside the JSON spec. """ def __init__(self, encoding=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, strict=True, - object_pairs_hook=None): + object_pairs_hook=None, allow_nan=False): """ *encoding* determines the encoding used to interpret any :class:`str` objects decoded by this instance (``'utf-8'`` by @@ -334,10 +347,13 @@ def __init__(self, encoding=None, object_hook=None, parse_float=None, ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. + *allow_nan*, if True (default false), will allow the parser to + accept the non-standard floats ``NaN``, ``Infinity``, and ``-Infinity``. + + *parse_constant*, if specified, will be + called with one of the following strings: ``'-Infinity'``, + ``'Infinity'``, ``'NaN'``. It is not recommended to use this feature, + as it is rare to parse non-compliant JSON containing these values. *strict* controls the parser's behavior when it encounters an invalid control character in a string. The default setting of @@ -351,8 +367,8 @@ def __init__(self, encoding=None, object_hook=None, parse_float=None, self.object_hook = object_hook self.object_pairs_hook = object_pairs_hook self.parse_float = parse_float or float - self.parse_int = parse_int or int - self.parse_constant = parse_constant or _CONSTANTS.__getitem__ + self.parse_int = parse_int or bounded_int + self.parse_constant = parse_constant or (allow_nan and _CONSTANTS.__getitem__ or None) self.strict = strict self.parse_object = JSONObject self.parse_array = JSONArray diff --git a/script.module.simplejson/lib/simplejson/encoder.py b/script.module.simplejson/lib/simplejson/encoder.py index 7ea172e7d2..661ff361bf 100644 --- a/script.module.simplejson/lib/simplejson/encoder.py +++ b/script.module.simplejson/lib/simplejson/encoder.py @@ -5,7 +5,7 @@ from operator import itemgetter # Do not import Decimal directly to avoid reload issues import decimal -from .compat import unichr, binary_type, text_type, string_types, integer_types, PY3 +from .compat import binary_type, text_type, string_types, integer_types, PY3 def _import_speedups(): try: from . import _speedups @@ -32,6 +32,7 @@ def _import_speedups(): for i in range(0x20): #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) +del i FLOAT_REPR = repr @@ -139,7 +140,7 @@ class JSONEncoder(object): key_separator = ': ' def __init__(self, skipkeys=False, ensure_ascii=True, - check_circular=True, allow_nan=True, sort_keys=False, + check_circular=True, allow_nan=False, sort_keys=False, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, @@ -160,10 +161,11 @@ def __init__(self, skipkeys=False, ensure_ascii=True, prevent an infinite recursion (which would cause an OverflowError). Otherwise, no such check takes place. - If allow_nan is true, then NaN, Infinity, and -Infinity will be - encoded as such. This behavior is not JSON specification compliant, - but is consistent with most JavaScript based encoders and decoders. - Otherwise, it will be a ValueError to encode such floats. + If allow_nan is true (default: False), then out of range float + values (nan, inf, -inf) will be serialized to + their JavaScript equivalents (NaN, Infinity, -Infinity) + instead of raising a ValueError. See + ignore_nan for ECMA-262 compliant behavior. If sort_keys is true, then the output of dictionaries will be sorted by key; this is useful for regression tests to ensure @@ -293,7 +295,7 @@ def encode(self, o): # This doesn't pass the iterator directly to ''.join() because the # exceptions aren't as detailed. The list call should be roughly # equivalent to the PySequence_Fast that ''.join() would do. - chunks = self.iterencode(o, _one_shot=True) + chunks = self.iterencode(o) if not isinstance(chunks, (list, tuple)): chunks = list(chunks) if self.ensure_ascii: @@ -301,7 +303,7 @@ def encode(self, o): else: return u''.join(chunks) - def iterencode(self, o, _one_shot=False): + def iterencode(self, o): """Encode the given object and yield each string representation as available. @@ -355,8 +357,7 @@ def floatstr(o, allow_nan=self.allow_nan, ignore_nan=self.ignore_nan, key_memo = {} int_as_string_bitcount = ( 53 if self.bigint_as_string else self.int_as_string_bitcount) - if (_one_shot and c_make_encoder is not None - and self.indent is None): + if (c_make_encoder is not None and self.indent is None): _iterencode = c_make_encoder( markers, self.default, _encoder, self.indent, self.key_separator, self.item_separator, self.sort_keys, @@ -369,7 +370,7 @@ def floatstr(o, allow_nan=self.allow_nan, ignore_nan=self.ignore_nan, _iterencode = _make_iterencode( markers, self.default, _encoder, self.indent, floatstr, self.key_separator, self.item_separator, self.sort_keys, - self.skipkeys, _one_shot, self.use_decimal, + self.skipkeys, self.use_decimal, self.namedtuple_as_object, self.tuple_as_array, int_as_string_bitcount, self.item_sort_key, self.encoding, self.for_json, @@ -397,14 +398,14 @@ class JSONEncoderForHTML(JSONEncoder): def encode(self, o): # Override JSONEncoder.encode because it has hacks for # performance that make things more complicated. - chunks = self.iterencode(o, True) + chunks = self.iterencode(o) if self.ensure_ascii: return ''.join(chunks) else: return u''.join(chunks) - def iterencode(self, o, _one_shot=False): - chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot) + def iterencode(self, o): + chunks = super(JSONEncoderForHTML, self).iterencode(o) for chunk in chunks: chunk = chunk.replace('&', '\\u0026') chunk = chunk.replace('<', '\\u003c') @@ -418,7 +419,7 @@ def iterencode(self, o, _one_shot=False): def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, - _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, + _key_separator, _item_separator, _sort_keys, _skipkeys, _use_decimal, _namedtuple_as_object, _tuple_as_array, _int_as_string_bitcount, _item_sort_key, _encoding,_for_json, @@ -450,6 +451,15 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, not isinstance(_int_as_string_bitcount, integer_types))): raise TypeError("int_as_string_bitcount must be a positive integer") + def call_method(obj, method_name): + method = getattr(obj, method_name, None) + if callable(method): + try: + return (method(),) + except TypeError: + pass + return None + def _encode_int(value): skip_quoting = ( _int_as_string_bitcount is None @@ -512,15 +522,18 @@ def _iterencode_list(lst, _current_indent_level): yield buf + str(value) else: yield buf - for_json = _for_json and getattr(value, 'for_json', None) - if for_json and callable(for_json): - chunks = _iterencode(for_json(), _current_indent_level) + for_json = _for_json and call_method(value, 'for_json') + if for_json: + chunks = _iterencode(for_json[0], _current_indent_level) elif isinstance(value, list): chunks = _iterencode_list(value, _current_indent_level) else: - _asdict = _namedtuple_as_object and getattr(value, '_asdict', None) - if _asdict and callable(_asdict): - chunks = _iterencode_dict(_asdict(), + _asdict = _namedtuple_as_object and call_method(value, '_asdict') + if _asdict: + dct = _asdict[0] + if not isinstance(dct, dict): + raise TypeError("_asdict() must return a dict, not %s" % (type(dct).__name__,)) + chunks = _iterencode_dict(dct, _current_indent_level) elif _tuple_as_array and isinstance(value, tuple): chunks = _iterencode_list(value, _current_indent_level) @@ -633,15 +646,18 @@ def _iterencode_dict(dct, _current_indent_level): elif _use_decimal and isinstance(value, Decimal): yield str(value) else: - for_json = _for_json and getattr(value, 'for_json', None) - if for_json and callable(for_json): - chunks = _iterencode(for_json(), _current_indent_level) + for_json = _for_json and call_method(value, 'for_json') + if for_json: + chunks = _iterencode(for_json[0], _current_indent_level) elif isinstance(value, list): chunks = _iterencode_list(value, _current_indent_level) else: - _asdict = _namedtuple_as_object and getattr(value, '_asdict', None) - if _asdict and callable(_asdict): - chunks = _iterencode_dict(_asdict(), + _asdict = _namedtuple_as_object and call_method(value, '_asdict') + if _asdict: + dct = _asdict[0] + if not isinstance(dct, dict): + raise TypeError("_asdict() must return a dict, not %s" % (type(dct).__name__,)) + chunks = _iterencode_dict(dct, _current_indent_level) elif _tuple_as_array and isinstance(value, tuple): chunks = _iterencode_list(value, _current_indent_level) @@ -676,18 +692,20 @@ def _iterencode(o, _current_indent_level): elif isinstance(o, float): yield _floatstr(o) else: - for_json = _for_json and getattr(o, 'for_json', None) - if for_json and callable(for_json): - for chunk in _iterencode(for_json(), _current_indent_level): + for_json = _for_json and call_method(o, 'for_json') + if for_json: + for chunk in _iterencode(for_json[0], _current_indent_level): yield chunk elif isinstance(o, list): for chunk in _iterencode_list(o, _current_indent_level): yield chunk else: - _asdict = _namedtuple_as_object and getattr(o, '_asdict', None) - if _asdict and callable(_asdict): - for chunk in _iterencode_dict(_asdict(), - _current_indent_level): + _asdict = _namedtuple_as_object and call_method(o, '_asdict') + if _asdict: + dct = _asdict[0] + if not isinstance(dct, dict): + raise TypeError("_asdict() must return a dict, not %s" % (type(dct).__name__,)) + for chunk in _iterencode_dict(dct, _current_indent_level): yield chunk elif (_tuple_as_array and isinstance(o, tuple)): for chunk in _iterencode_list(o, _current_indent_level): diff --git a/script.module.simplejson/lib/simplejson/scanner.py b/script.module.simplejson/lib/simplejson/scanner.py index 85e385e147..34710d68c6 100644 --- a/script.module.simplejson/lib/simplejson/scanner.py +++ b/script.module.simplejson/lib/simplejson/scanner.py @@ -60,11 +60,11 @@ def _scan_once(string, idx): else: res = parse_int(integer) return res, m.end() - elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': + elif parse_constant and nextchar == 'N' and string[idx:idx + 3] == 'NaN': return parse_constant('NaN'), idx + 3 - elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': + elif parse_constant and nextchar == 'I' and string[idx:idx + 8] == 'Infinity': return parse_constant('Infinity'), idx + 8 - elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': + elif parse_constant and nextchar == '-' and string[idx:idx + 9] == '-Infinity': return parse_constant('-Infinity'), idx + 9 else: raise JSONDecodeError(errmsg, string, idx) diff --git a/script.module.simplejson/lib/simplejson/tests/__init__.py b/script.module.simplejson/lib/simplejson/tests/__init__.py deleted file mode 100644 index 25d3305b3d..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -from __future__ import absolute_import -import unittest -import sys -import os - - -class NoExtensionTestSuite(unittest.TestSuite): - def run(self, result): - import simplejson - simplejson._toggle_speedups(False) - result = unittest.TestSuite.run(self, result) - simplejson._toggle_speedups(True) - return result - - -class TestMissingSpeedups(unittest.TestCase): - def runTest(self): - if hasattr(sys, 'pypy_translation_info'): - "PyPy doesn't need speedups! :)" - elif hasattr(self, 'skipTest'): - self.skipTest('_speedups.so is missing!') - - -def additional_tests(suite=None): - import simplejson - import simplejson.encoder - import simplejson.decoder - if suite is None: - suite = unittest.TestSuite() - try: - import doctest - except ImportError: - if sys.version_info < (2, 7): - # doctests in 2.6 depends on cStringIO - return suite - raise - for mod in (simplejson, simplejson.encoder, simplejson.decoder): - suite.addTest(doctest.DocTestSuite(mod)) - suite.addTest(doctest.DocFileSuite('../../index.rst')) - return suite - - -def all_tests_suite(): - def get_suite(): - suite_names = [ - 'simplejson.tests.%s' % (os.path.splitext(f)[0],) - for f in os.listdir(os.path.dirname(__file__)) - if f.startswith('test_') and f.endswith('.py') - ] - return additional_tests( - unittest.TestLoader().loadTestsFromNames(suite_names)) - suite = get_suite() - import simplejson - if simplejson._import_c_make_encoder() is None: - suite.addTest(TestMissingSpeedups()) - else: - suite = unittest.TestSuite([ - suite, - NoExtensionTestSuite([get_suite()]), - ]) - return suite - - -def main(): - runner = unittest.TextTestRunner(verbosity=1 + sys.argv.count('-v')) - suite = all_tests_suite() - raise SystemExit(not runner.run(suite).wasSuccessful()) - - -if __name__ == '__main__': - import os - import sys - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) - main() diff --git a/script.module.simplejson/lib/simplejson/tests/test_bigint_as_string.py b/script.module.simplejson/lib/simplejson/tests/test_bigint_as_string.py deleted file mode 100644 index 2cf2cc2966..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_bigint_as_string.py +++ /dev/null @@ -1,67 +0,0 @@ -from unittest import TestCase - -import simplejson as json - - -class TestBigintAsString(TestCase): - # Python 2.5, at least the one that ships on Mac OS X, calculates - # 2 ** 53 as 0! It manages to calculate 1 << 53 correctly. - values = [(200, 200), - ((1 << 53) - 1, 9007199254740991), - ((1 << 53), '9007199254740992'), - ((1 << 53) + 1, '9007199254740993'), - (-100, -100), - ((-1 << 53), '-9007199254740992'), - ((-1 << 53) - 1, '-9007199254740993'), - ((-1 << 53) + 1, -9007199254740991)] - - options = ( - {"bigint_as_string": True}, - {"int_as_string_bitcount": 53} - ) - - def test_ints(self): - for opts in self.options: - for val, expect in self.values: - self.assertEqual( - val, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, **opts))) - - def test_lists(self): - for opts in self.options: - for val, expect in self.values: - val = [val, val] - expect = [expect, expect] - self.assertEqual( - val, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, **opts))) - - def test_dicts(self): - for opts in self.options: - for val, expect in self.values: - val = {'k': val} - expect = {'k': expect} - self.assertEqual( - val, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, **opts))) - - def test_dict_keys(self): - for opts in self.options: - for val, _ in self.values: - expect = {str(val): 'value'} - val = {val: 'value'} - self.assertEqual( - expect, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, **opts))) diff --git a/script.module.simplejson/lib/simplejson/tests/test_bitsize_int_as_string.py b/script.module.simplejson/lib/simplejson/tests/test_bitsize_int_as_string.py deleted file mode 100644 index fd7d103a6d..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_bitsize_int_as_string.py +++ /dev/null @@ -1,73 +0,0 @@ -from unittest import TestCase - -import simplejson as json - - -class TestBitSizeIntAsString(TestCase): - # Python 2.5, at least the one that ships on Mac OS X, calculates - # 2 ** 31 as 0! It manages to calculate 1 << 31 correctly. - values = [ - (200, 200), - ((1 << 31) - 1, (1 << 31) - 1), - ((1 << 31), str(1 << 31)), - ((1 << 31) + 1, str((1 << 31) + 1)), - (-100, -100), - ((-1 << 31), str(-1 << 31)), - ((-1 << 31) - 1, str((-1 << 31) - 1)), - ((-1 << 31) + 1, (-1 << 31) + 1), - ] - - def test_invalid_counts(self): - for n in ['foo', -1, 0, 1.0]: - self.assertRaises( - TypeError, - json.dumps, 0, int_as_string_bitcount=n) - - def test_ints_outside_range_fails(self): - self.assertNotEqual( - str(1 << 15), - json.loads(json.dumps(1 << 15, int_as_string_bitcount=16)), - ) - - def test_ints(self): - for val, expect in self.values: - self.assertEqual( - val, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, int_as_string_bitcount=31)), - ) - - def test_lists(self): - for val, expect in self.values: - val = [val, val] - expect = [expect, expect] - self.assertEqual( - val, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, int_as_string_bitcount=31))) - - def test_dicts(self): - for val, expect in self.values: - val = {'k': val} - expect = {'k': expect} - self.assertEqual( - val, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, int_as_string_bitcount=31))) - - def test_dict_keys(self): - for val, _ in self.values: - expect = {str(val): 'value'} - val = {val: 'value'} - self.assertEqual( - expect, - json.loads(json.dumps(val))) - self.assertEqual( - expect, - json.loads(json.dumps(val, int_as_string_bitcount=31))) diff --git a/script.module.simplejson/lib/simplejson/tests/test_check_circular.py b/script.module.simplejson/lib/simplejson/tests/test_check_circular.py deleted file mode 100644 index af6463d6d8..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_check_circular.py +++ /dev/null @@ -1,30 +0,0 @@ -from unittest import TestCase -import simplejson as json - -def default_iterable(obj): - return list(obj) - -class TestCheckCircular(TestCase): - def test_circular_dict(self): - dct = {} - dct['a'] = dct - self.assertRaises(ValueError, json.dumps, dct) - - def test_circular_list(self): - lst = [] - lst.append(lst) - self.assertRaises(ValueError, json.dumps, lst) - - def test_circular_composite(self): - dct2 = {} - dct2['a'] = [] - dct2['a'].append(dct2) - self.assertRaises(ValueError, json.dumps, dct2) - - def test_circular_default(self): - json.dumps([set()], default=default_iterable) - self.assertRaises(TypeError, json.dumps, [set()]) - - def test_circular_off_default(self): - json.dumps([set()], default=default_iterable, check_circular=False) - self.assertRaises(TypeError, json.dumps, [set()], check_circular=False) diff --git a/script.module.simplejson/lib/simplejson/tests/test_decimal.py b/script.module.simplejson/lib/simplejson/tests/test_decimal.py deleted file mode 100644 index 2b0940b152..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_decimal.py +++ /dev/null @@ -1,71 +0,0 @@ -import decimal -from decimal import Decimal -from unittest import TestCase -from simplejson.compat import StringIO, reload_module - -import simplejson as json - -class TestDecimal(TestCase): - NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500" - def dumps(self, obj, **kw): - sio = StringIO() - json.dump(obj, sio, **kw) - res = json.dumps(obj, **kw) - self.assertEqual(res, sio.getvalue()) - return res - - def loads(self, s, **kw): - sio = StringIO(s) - res = json.loads(s, **kw) - self.assertEqual(res, json.load(sio, **kw)) - return res - - def test_decimal_encode(self): - for d in map(Decimal, self.NUMS): - self.assertEqual(self.dumps(d, use_decimal=True), str(d)) - - def test_decimal_decode(self): - for s in self.NUMS: - self.assertEqual(self.loads(s, parse_float=Decimal), Decimal(s)) - - def test_stringify_key(self): - for d in map(Decimal, self.NUMS): - v = {d: d} - self.assertEqual( - self.loads( - self.dumps(v, use_decimal=True), parse_float=Decimal), - {str(d): d}) - - def test_decimal_roundtrip(self): - for d in map(Decimal, self.NUMS): - # The type might not be the same (int and Decimal) but they - # should still compare equal. - for v in [d, [d], {'': d}]: - self.assertEqual( - self.loads( - self.dumps(v, use_decimal=True), parse_float=Decimal), - v) - - def test_decimal_defaults(self): - d = Decimal('1.1') - # use_decimal=True is the default - self.assertRaises(TypeError, json.dumps, d, use_decimal=False) - self.assertEqual('1.1', json.dumps(d)) - self.assertEqual('1.1', json.dumps(d, use_decimal=True)) - self.assertRaises(TypeError, json.dump, d, StringIO(), - use_decimal=False) - sio = StringIO() - json.dump(d, sio) - self.assertEqual('1.1', sio.getvalue()) - sio = StringIO() - json.dump(d, sio, use_decimal=True) - self.assertEqual('1.1', sio.getvalue()) - - def test_decimal_reload(self): - # Simulate a subinterpreter that reloads the Python modules but not - # the C code https://github.com/simplejson/simplejson/issues/34 - global Decimal - Decimal = reload_module(decimal).Decimal - import simplejson.encoder - simplejson.encoder.Decimal = Decimal - self.test_decimal_roundtrip() diff --git a/script.module.simplejson/lib/simplejson/tests/test_decode.py b/script.module.simplejson/lib/simplejson/tests/test_decode.py deleted file mode 100644 index 6960ee58ba..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_decode.py +++ /dev/null @@ -1,119 +0,0 @@ -from __future__ import absolute_import -import decimal -from unittest import TestCase - -import simplejson as json -from simplejson.compat import StringIO, b, binary_type -from simplejson import OrderedDict - -class MisbehavingBytesSubtype(binary_type): - def decode(self, encoding=None): - return "bad decode" - def __str__(self): - return "bad __str__" - def __bytes__(self): - return b("bad __bytes__") - -class TestDecode(TestCase): - if not hasattr(TestCase, 'assertIs'): - def assertIs(self, a, b): - self.assertTrue(a is b, '%r is %r' % (a, b)) - - def test_decimal(self): - rval = json.loads('1.1', parse_float=decimal.Decimal) - self.assertTrue(isinstance(rval, decimal.Decimal)) - self.assertEqual(rval, decimal.Decimal('1.1')) - - def test_float(self): - rval = json.loads('1', parse_int=float) - self.assertTrue(isinstance(rval, float)) - self.assertEqual(rval, 1.0) - - def test_decoder_optimizations(self): - # Several optimizations were made that skip over calls to - # the whitespace regex, so this test is designed to try and - # exercise the uncommon cases. The array cases are already covered. - rval = json.loads('{ "key" : "value" , "k":"v" }') - self.assertEqual(rval, {"key":"value", "k":"v"}) - - def test_empty_objects(self): - s = '{}' - self.assertEqual(json.loads(s), eval(s)) - s = '[]' - self.assertEqual(json.loads(s), eval(s)) - s = '""' - self.assertEqual(json.loads(s), eval(s)) - - def test_object_pairs_hook(self): - s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}' - p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4), - ("qrt", 5), ("pad", 6), ("hoy", 7)] - self.assertEqual(json.loads(s), eval(s)) - self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p) - self.assertEqual(json.load(StringIO(s), - object_pairs_hook=lambda x: x), p) - od = json.loads(s, object_pairs_hook=OrderedDict) - self.assertEqual(od, OrderedDict(p)) - self.assertEqual(type(od), OrderedDict) - # the object_pairs_hook takes priority over the object_hook - self.assertEqual(json.loads(s, - object_pairs_hook=OrderedDict, - object_hook=lambda x: None), - OrderedDict(p)) - - def check_keys_reuse(self, source, loads): - rval = loads(source) - (a, b), (c, d) = sorted(rval[0]), sorted(rval[1]) - self.assertIs(a, c) - self.assertIs(b, d) - - def test_keys_reuse_str(self): - s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'.encode('utf8') - self.check_keys_reuse(s, json.loads) - - def test_keys_reuse_unicode(self): - s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]' - self.check_keys_reuse(s, json.loads) - - def test_empty_strings(self): - self.assertEqual(json.loads('""'), "") - self.assertEqual(json.loads(u'""'), u"") - self.assertEqual(json.loads('[""]'), [""]) - self.assertEqual(json.loads(u'[""]'), [u""]) - - def test_raw_decode(self): - cls = json.decoder.JSONDecoder - self.assertEqual( - ({'a': {}}, 9), - cls().raw_decode("{\"a\": {}}")) - # http://code.google.com/p/simplejson/issues/detail?id=85 - self.assertEqual( - ({'a': {}}, 9), - cls(object_pairs_hook=dict).raw_decode("{\"a\": {}}")) - # https://github.com/simplejson/simplejson/pull/38 - self.assertEqual( - ({'a': {}}, 11), - cls().raw_decode(" \n{\"a\": {}}")) - - def test_bytes_decode(self): - cls = json.decoder.JSONDecoder - data = b('"\xe2\x82\xac"') - self.assertEqual(cls().decode(data), u'\u20ac') - self.assertEqual(cls(encoding='latin1').decode(data), u'\xe2\x82\xac') - self.assertEqual(cls(encoding=None).decode(data), u'\u20ac') - - data = MisbehavingBytesSubtype(b('"\xe2\x82\xac"')) - self.assertEqual(cls().decode(data), u'\u20ac') - self.assertEqual(cls(encoding='latin1').decode(data), u'\xe2\x82\xac') - self.assertEqual(cls(encoding=None).decode(data), u'\u20ac') - - def test_bounds_checking(self): - # https://github.com/simplejson/simplejson/issues/98 - j = json.decoder.JSONDecoder() - for i in [4, 5, 6, -1, -2, -3, -4, -5, -6]: - self.assertRaises(ValueError, j.scan_once, '1234', i) - self.assertRaises(ValueError, j.raw_decode, '1234', i) - x, y = sorted(['128931233', '472389423'], key=id) - diff = id(x) - id(y) - self.assertRaises(ValueError, j.scan_once, y, diff) - self.assertRaises(ValueError, j.raw_decode, y, i) diff --git a/script.module.simplejson/lib/simplejson/tests/test_default.py b/script.module.simplejson/lib/simplejson/tests/test_default.py deleted file mode 100644 index d1eacb8c4c..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_default.py +++ /dev/null @@ -1,9 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -class TestDefault(TestCase): - def test_default(self): - self.assertEqual( - json.dumps(type, default=repr), - json.dumps(repr(type))) diff --git a/script.module.simplejson/lib/simplejson/tests/test_dump.py b/script.module.simplejson/lib/simplejson/tests/test_dump.py deleted file mode 100644 index eff24c299c..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_dump.py +++ /dev/null @@ -1,249 +0,0 @@ -from unittest import TestCase -from simplejson.compat import StringIO, long_type, b, binary_type, text_type, PY3 -import simplejson as json - -class MisbehavingTextSubtype(text_type): - def __str__(self): - return "FAIL!" - -class MisbehavingBytesSubtype(binary_type): - def decode(self, encoding=None): - return "bad decode" - def __str__(self): - return "bad __str__" - def __bytes__(self): - return b("bad __bytes__") - -def as_text_type(s): - if PY3 and isinstance(s, bytes): - return s.decode('ascii') - return s - -def decode_iso_8859_15(b): - return b.decode('iso-8859-15') - -class TestDump(TestCase): - def test_dump(self): - sio = StringIO() - json.dump({}, sio) - self.assertEqual(sio.getvalue(), '{}') - - def test_constants(self): - for c in [None, True, False]: - self.assertTrue(json.loads(json.dumps(c)) is c) - self.assertTrue(json.loads(json.dumps([c]))[0] is c) - self.assertTrue(json.loads(json.dumps({'a': c}))['a'] is c) - - def test_stringify_key(self): - items = [(b('bytes'), 'bytes'), - (1.0, '1.0'), - (10, '10'), - (True, 'true'), - (False, 'false'), - (None, 'null'), - (long_type(100), '100')] - for k, expect in items: - self.assertEqual( - json.loads(json.dumps({k: expect})), - {expect: expect}) - self.assertEqual( - json.loads(json.dumps({k: expect}, sort_keys=True)), - {expect: expect}) - self.assertRaises(TypeError, json.dumps, {json: 1}) - for v in [{}, {'other': 1}, {b('derp'): 1, 'herp': 2}]: - for sort_keys in [False, True]: - v0 = dict(v) - v0[json] = 1 - v1 = dict((as_text_type(key), val) for (key, val) in v.items()) - self.assertEqual( - json.loads(json.dumps(v0, skipkeys=True, sort_keys=sort_keys)), - v1) - self.assertEqual( - json.loads(json.dumps({'': v0}, skipkeys=True, sort_keys=sort_keys)), - {'': v1}) - self.assertEqual( - json.loads(json.dumps([v0], skipkeys=True, sort_keys=sort_keys)), - [v1]) - - def test_dumps(self): - self.assertEqual(json.dumps({}), '{}') - - def test_encode_truefalse(self): - self.assertEqual(json.dumps( - {True: False, False: True}, sort_keys=True), - '{"false": true, "true": false}') - self.assertEqual( - json.dumps( - {2: 3.0, - 4.0: long_type(5), - False: 1, - long_type(6): True, - "7": 0}, - sort_keys=True), - '{"2": 3.0, "4.0": 5, "6": true, "7": 0, "false": 1}') - - def test_ordered_dict(self): - # http://bugs.python.org/issue6105 - items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)] - s = json.dumps(json.OrderedDict(items)) - self.assertEqual( - s, - '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}') - - def test_indent_unknown_type_acceptance(self): - """ - A test against the regression mentioned at `github issue 29`_. - - The indent parameter should accept any type which pretends to be - an instance of int or long when it comes to being multiplied by - strings, even if it is not actually an int or long, for - backwards compatibility. - - .. _github issue 29: - http://github.com/simplejson/simplejson/issue/29 - """ - - class AwesomeInt(object): - """An awesome reimplementation of integers""" - - def __init__(self, *args, **kwargs): - if len(args) > 0: - # [construct from literals, objects, etc.] - # ... - - # Finally, if args[0] is an integer, store it - if isinstance(args[0], int): - self._int = args[0] - - # [various methods] - - def __mul__(self, other): - # [various ways to multiply AwesomeInt objects] - # ... finally, if the right-hand operand is not awesome enough, - # try to do a normal integer multiplication - if hasattr(self, '_int'): - return self._int * other - else: - raise NotImplementedError("To do non-awesome things with" - " this object, please construct it from an integer!") - - s = json.dumps([0, 1, 2], indent=AwesomeInt(3)) - self.assertEqual(s, '[\n 0,\n 1,\n 2\n]') - - def test_accumulator(self): - # the C API uses an accumulator that collects after 100,000 appends - lst = [0] * 100000 - self.assertEqual(json.loads(json.dumps(lst)), lst) - - def test_sort_keys(self): - # https://github.com/simplejson/simplejson/issues/106 - for num_keys in range(2, 32): - p = dict((str(x), x) for x in range(num_keys)) - sio = StringIO() - json.dump(p, sio, sort_keys=True) - self.assertEqual(sio.getvalue(), json.dumps(p, sort_keys=True)) - self.assertEqual(json.loads(sio.getvalue()), p) - - def test_misbehaving_text_subtype(self): - # https://github.com/simplejson/simplejson/issues/185 - text = "this is some text" - self.assertEqual( - json.dumps(MisbehavingTextSubtype(text)), - json.dumps(text) - ) - self.assertEqual( - json.dumps([MisbehavingTextSubtype(text)]), - json.dumps([text]) - ) - self.assertEqual( - json.dumps({MisbehavingTextSubtype(text): 42}), - json.dumps({text: 42}) - ) - - def test_misbehaving_bytes_subtype(self): - data = b("this is some data \xe2\x82\xac") - self.assertEqual( - json.dumps(MisbehavingBytesSubtype(data)), - json.dumps(data) - ) - self.assertEqual( - json.dumps([MisbehavingBytesSubtype(data)]), - json.dumps([data]) - ) - self.assertEqual( - json.dumps({MisbehavingBytesSubtype(data): 42}), - json.dumps({data: 42}) - ) - - def test_bytes_toplevel(self): - self.assertEqual(json.dumps(b('\xe2\x82\xac')), r'"\u20ac"') - self.assertRaises(UnicodeDecodeError, json.dumps, b('\xa4')) - self.assertEqual(json.dumps(b('\xa4'), encoding='iso-8859-1'), - r'"\u00a4"') - self.assertEqual(json.dumps(b('\xa4'), encoding='iso-8859-15'), - r'"\u20ac"') - if PY3: - self.assertRaises(TypeError, json.dumps, b('\xe2\x82\xac'), - encoding=None) - self.assertRaises(TypeError, json.dumps, b('\xa4'), - encoding=None) - self.assertEqual(json.dumps(b('\xa4'), encoding=None, - default=decode_iso_8859_15), - r'"\u20ac"') - else: - self.assertEqual(json.dumps(b('\xe2\x82\xac'), encoding=None), - r'"\u20ac"') - self.assertRaises(UnicodeDecodeError, json.dumps, b('\xa4'), - encoding=None) - self.assertRaises(UnicodeDecodeError, json.dumps, b('\xa4'), - encoding=None, default=decode_iso_8859_15) - - def test_bytes_nested(self): - self.assertEqual(json.dumps([b('\xe2\x82\xac')]), r'["\u20ac"]') - self.assertRaises(UnicodeDecodeError, json.dumps, [b('\xa4')]) - self.assertEqual(json.dumps([b('\xa4')], encoding='iso-8859-1'), - r'["\u00a4"]') - self.assertEqual(json.dumps([b('\xa4')], encoding='iso-8859-15'), - r'["\u20ac"]') - if PY3: - self.assertRaises(TypeError, json.dumps, [b('\xe2\x82\xac')], - encoding=None) - self.assertRaises(TypeError, json.dumps, [b('\xa4')], - encoding=None) - self.assertEqual(json.dumps([b('\xa4')], encoding=None, - default=decode_iso_8859_15), - r'["\u20ac"]') - else: - self.assertEqual(json.dumps([b('\xe2\x82\xac')], encoding=None), - r'["\u20ac"]') - self.assertRaises(UnicodeDecodeError, json.dumps, [b('\xa4')], - encoding=None) - self.assertRaises(UnicodeDecodeError, json.dumps, [b('\xa4')], - encoding=None, default=decode_iso_8859_15) - - def test_bytes_key(self): - self.assertEqual(json.dumps({b('\xe2\x82\xac'): 42}), r'{"\u20ac": 42}') - self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42}) - self.assertEqual(json.dumps({b('\xa4'): 42}, encoding='iso-8859-1'), - r'{"\u00a4": 42}') - self.assertEqual(json.dumps({b('\xa4'): 42}, encoding='iso-8859-15'), - r'{"\u20ac": 42}') - if PY3: - self.assertRaises(TypeError, json.dumps, {b('\xe2\x82\xac'): 42}, - encoding=None) - self.assertRaises(TypeError, json.dumps, {b('\xa4'): 42}, - encoding=None) - self.assertRaises(TypeError, json.dumps, {b('\xa4'): 42}, - encoding=None, default=decode_iso_8859_15) - self.assertEqual(json.dumps({b('\xa4'): 42}, encoding=None, - skipkeys=True), - r'{}') - else: - self.assertEqual(json.dumps({b('\xe2\x82\xac'): 42}, encoding=None), - r'{"\u20ac": 42}') - self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42}, - encoding=None) - self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42}, - encoding=None, default=decode_iso_8859_15) - self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42}, - encoding=None, skipkeys=True) diff --git a/script.module.simplejson/lib/simplejson/tests/test_encode_basestring_ascii.py b/script.module.simplejson/lib/simplejson/tests/test_encode_basestring_ascii.py deleted file mode 100644 index 49706bfd97..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_encode_basestring_ascii.py +++ /dev/null @@ -1,47 +0,0 @@ -from unittest import TestCase - -import simplejson.encoder -from simplejson.compat import b - -CASES = [ - (u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'), - (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'), - (u'controls', '"controls"'), - (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'), - (u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'), - (u' s p a c e d ', '" s p a c e d "'), - (u'\U0001d120', '"\\ud834\\udd20"'), - (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), - (b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'), - (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), - (b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'), - (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), - (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), - (u"`1~!@#$%^&*()_+-={':[,]}|;.?", '"`1~!@#$%^&*()_+-={\':[,]}|;.?"'), - (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'), - (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'), -] - -class TestEncodeBaseStringAscii(TestCase): - def test_py_encode_basestring_ascii(self): - self._test_encode_basestring_ascii(simplejson.encoder.py_encode_basestring_ascii) - - def test_c_encode_basestring_ascii(self): - if not simplejson.encoder.c_encode_basestring_ascii: - return - self._test_encode_basestring_ascii(simplejson.encoder.c_encode_basestring_ascii) - - def _test_encode_basestring_ascii(self, encode_basestring_ascii): - fname = encode_basestring_ascii.__name__ - for input_string, expect in CASES: - result = encode_basestring_ascii(input_string) - #self.assertEqual(result, expect, - # '{0!r} != {1!r} for {2}({3!r})'.format( - # result, expect, fname, input_string)) - self.assertEqual(result, expect, - '%r != %r for %s(%r)' % (result, expect, fname, input_string)) - - def test_sorted_dict(self): - items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)] - s = simplejson.dumps(dict(items), sort_keys=True) - self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}') diff --git a/script.module.simplejson/lib/simplejson/tests/test_encode_for_html.py b/script.module.simplejson/lib/simplejson/tests/test_encode_for_html.py deleted file mode 100644 index 3a840aa48c..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_encode_for_html.py +++ /dev/null @@ -1,38 +0,0 @@ -import unittest - -import simplejson as json - -class TestEncodeForHTML(unittest.TestCase): - - def setUp(self): - self.decoder = json.JSONDecoder() - self.encoder = json.JSONEncoderForHTML() - self.non_ascii_encoder = json.JSONEncoderForHTML(ensure_ascii=False) - - def test_basic_encode(self): - self.assertEqual(r'"\u0026"', self.encoder.encode('&')) - self.assertEqual(r'"\u003c"', self.encoder.encode('<')) - self.assertEqual(r'"\u003e"', self.encoder.encode('>')) - self.assertEqual(r'"\u2028"', self.encoder.encode(u'\u2028')) - - def test_non_ascii_basic_encode(self): - self.assertEqual(r'"\u0026"', self.non_ascii_encoder.encode('&')) - self.assertEqual(r'"\u003c"', self.non_ascii_encoder.encode('<')) - self.assertEqual(r'"\u003e"', self.non_ascii_encoder.encode('>')) - self.assertEqual(r'"\u2028"', self.non_ascii_encoder.encode(u'\u2028')) - - def test_basic_roundtrip(self): - for char in '&<>': - self.assertEqual( - char, self.decoder.decode( - self.encoder.encode(char))) - - def test_prevent_script_breakout(self): - bad_string = '' - self.assertEqual( - r'"\u003c/script\u003e\u003cscript\u003e' - r'alert(\"gotcha\")\u003c/script\u003e"', - self.encoder.encode(bad_string)) - self.assertEqual( - bad_string, self.decoder.decode( - self.encoder.encode(bad_string))) diff --git a/script.module.simplejson/lib/simplejson/tests/test_errors.py b/script.module.simplejson/lib/simplejson/tests/test_errors.py deleted file mode 100644 index d573825454..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_errors.py +++ /dev/null @@ -1,68 +0,0 @@ -import sys, pickle -from unittest import TestCase - -import simplejson as json -from simplejson.compat import text_type, b - -class TestErrors(TestCase): - def test_string_keys_error(self): - data = [{'a': 'A', 'b': (2, 4), 'c': 3.0, ('d',): 'D tuple'}] - try: - json.dumps(data) - except TypeError: - err = sys.exc_info()[1] - else: - self.fail('Expected TypeError') - self.assertEqual(str(err), - 'keys must be str, int, float, bool or None, not tuple') - - def test_not_serializable(self): - try: - json.dumps(json) - except TypeError: - err = sys.exc_info()[1] - else: - self.fail('Expected TypeError') - self.assertEqual(str(err), - 'Object of type module is not JSON serializable') - - def test_decode_error(self): - err = None - try: - json.loads('{}\na\nb') - except json.JSONDecodeError: - err = sys.exc_info()[1] - else: - self.fail('Expected JSONDecodeError') - self.assertEqual(err.lineno, 2) - self.assertEqual(err.colno, 1) - self.assertEqual(err.endlineno, 3) - self.assertEqual(err.endcolno, 2) - - def test_scan_error(self): - err = None - for t in (text_type, b): - try: - json.loads(t('{"asdf": "')) - except json.JSONDecodeError: - err = sys.exc_info()[1] - else: - self.fail('Expected JSONDecodeError') - self.assertEqual(err.lineno, 1) - self.assertEqual(err.colno, 10) - - def test_error_is_pickable(self): - err = None - try: - json.loads('{}\na\nb') - except json.JSONDecodeError: - err = sys.exc_info()[1] - else: - self.fail('Expected JSONDecodeError') - s = pickle.dumps(err) - e = pickle.loads(s) - - self.assertEqual(err.msg, e.msg) - self.assertEqual(err.doc, e.doc) - self.assertEqual(err.pos, e.pos) - self.assertEqual(err.end, e.end) diff --git a/script.module.simplejson/lib/simplejson/tests/test_fail.py b/script.module.simplejson/lib/simplejson/tests/test_fail.py deleted file mode 100644 index 788f3a525b..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_fail.py +++ /dev/null @@ -1,176 +0,0 @@ -import sys -from unittest import TestCase - -import simplejson as json - -# 2007-10-05 -JSONDOCS = [ - # http://json.org/JSON_checker/test/fail1.json - '"A JSON payload should be an object or array, not a string."', - # http://json.org/JSON_checker/test/fail2.json - '["Unclosed array"', - # http://json.org/JSON_checker/test/fail3.json - '{unquoted_key: "keys must be quoted"}', - # http://json.org/JSON_checker/test/fail4.json - '["extra comma",]', - # http://json.org/JSON_checker/test/fail5.json - '["double extra comma",,]', - # http://json.org/JSON_checker/test/fail6.json - '[ , "<-- missing value"]', - # http://json.org/JSON_checker/test/fail7.json - '["Comma after the close"],', - # http://json.org/JSON_checker/test/fail8.json - '["Extra close"]]', - # http://json.org/JSON_checker/test/fail9.json - '{"Extra comma": true,}', - # http://json.org/JSON_checker/test/fail10.json - '{"Extra value after close": true} "misplaced quoted value"', - # http://json.org/JSON_checker/test/fail11.json - '{"Illegal expression": 1 + 2}', - # http://json.org/JSON_checker/test/fail12.json - '{"Illegal invocation": alert()}', - # http://json.org/JSON_checker/test/fail13.json - '{"Numbers cannot have leading zeroes": 013}', - # http://json.org/JSON_checker/test/fail14.json - '{"Numbers cannot be hex": 0x14}', - # http://json.org/JSON_checker/test/fail15.json - '["Illegal backslash escape: \\x15"]', - # http://json.org/JSON_checker/test/fail16.json - '[\\naked]', - # http://json.org/JSON_checker/test/fail17.json - '["Illegal backslash escape: \\017"]', - # http://json.org/JSON_checker/test/fail18.json - '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', - # http://json.org/JSON_checker/test/fail19.json - '{"Missing colon" null}', - # http://json.org/JSON_checker/test/fail20.json - '{"Double colon":: null}', - # http://json.org/JSON_checker/test/fail21.json - '{"Comma instead of colon", null}', - # http://json.org/JSON_checker/test/fail22.json - '["Colon instead of comma": false]', - # http://json.org/JSON_checker/test/fail23.json - '["Bad value", truth]', - # http://json.org/JSON_checker/test/fail24.json - "['single quote']", - # http://json.org/JSON_checker/test/fail25.json - '["\ttab\tcharacter\tin\tstring\t"]', - # http://json.org/JSON_checker/test/fail26.json - '["tab\\ character\\ in\\ string\\ "]', - # http://json.org/JSON_checker/test/fail27.json - '["line\nbreak"]', - # http://json.org/JSON_checker/test/fail28.json - '["line\\\nbreak"]', - # http://json.org/JSON_checker/test/fail29.json - '[0e]', - # http://json.org/JSON_checker/test/fail30.json - '[0e+]', - # http://json.org/JSON_checker/test/fail31.json - '[0e+-1]', - # http://json.org/JSON_checker/test/fail32.json - '{"Comma instead if closing brace": true,', - # http://json.org/JSON_checker/test/fail33.json - '["mismatch"}', - # http://code.google.com/p/simplejson/issues/detail?id=3 - u'["A\u001FZ control characters in string"]', - # misc based on coverage - '{', - '{]', - '{"foo": "bar"]', - '{"foo": "bar"', - 'nul', - 'nulx', - '-', - '-x', - '-e', - '-e0', - '-Infinite', - '-Inf', - 'Infinit', - 'Infinite', - 'NaM', - 'NuN', - 'falsy', - 'fal', - 'trug', - 'tru', - '1e', - '1ex', - '1e-', - '1e-x', -] - -SKIPS = { - 1: "why not have a string payload?", - 18: "spec doesn't specify any nesting limitations", -} - -class TestFail(TestCase): - def test_failures(self): - for idx, doc in enumerate(JSONDOCS): - idx = idx + 1 - if idx in SKIPS: - json.loads(doc) - continue - try: - json.loads(doc) - except json.JSONDecodeError: - pass - else: - self.fail("Expected failure for fail%d.json: %r" % (idx, doc)) - - def test_array_decoder_issue46(self): - # http://code.google.com/p/simplejson/issues/detail?id=46 - for doc in [u'[,]', '[,]']: - try: - json.loads(doc) - except json.JSONDecodeError: - e = sys.exc_info()[1] - self.assertEqual(e.pos, 1) - self.assertEqual(e.lineno, 1) - self.assertEqual(e.colno, 2) - except Exception: - e = sys.exc_info()[1] - self.fail("Unexpected exception raised %r %s" % (e, e)) - else: - self.fail("Unexpected success parsing '[,]'") - - def test_truncated_input(self): - test_cases = [ - ('', 'Expecting value', 0), - ('[', "Expecting value or ']'", 1), - ('[42', "Expecting ',' delimiter", 3), - ('[42,', 'Expecting value', 4), - ('["', 'Unterminated string starting at', 1), - ('["spam', 'Unterminated string starting at', 1), - ('["spam"', "Expecting ',' delimiter", 7), - ('["spam",', 'Expecting value', 8), - ('{', 'Expecting property name enclosed in double quotes', 1), - ('{"', 'Unterminated string starting at', 1), - ('{"spam', 'Unterminated string starting at', 1), - ('{"spam"', "Expecting ':' delimiter", 7), - ('{"spam":', 'Expecting value', 8), - ('{"spam":42', "Expecting ',' delimiter", 10), - ('{"spam":42,', 'Expecting property name enclosed in double quotes', - 11), - ('"', 'Unterminated string starting at', 0), - ('"spam', 'Unterminated string starting at', 0), - ('[,', "Expecting value", 1), - ] - for data, msg, idx in test_cases: - try: - json.loads(data) - except json.JSONDecodeError: - e = sys.exc_info()[1] - self.assertEqual( - e.msg[:len(msg)], - msg, - "%r doesn't start with %r for %r" % (e.msg, msg, data)) - self.assertEqual( - e.pos, idx, - "pos %r != %r for %r" % (e.pos, idx, data)) - except Exception: - e = sys.exc_info()[1] - self.fail("Unexpected exception raised %r %s" % (e, e)) - else: - self.fail("Unexpected success parsing '%r'" % (data,)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_float.py b/script.module.simplejson/lib/simplejson/tests/test_float.py deleted file mode 100644 index e382ec21ab..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_float.py +++ /dev/null @@ -1,35 +0,0 @@ -import math -from unittest import TestCase -from simplejson.compat import long_type, text_type -import simplejson as json -from simplejson.decoder import NaN, PosInf, NegInf - -class TestFloat(TestCase): - def test_degenerates_allow(self): - for inf in (PosInf, NegInf): - self.assertEqual(json.loads(json.dumps(inf)), inf) - # Python 2.5 doesn't have math.isnan - nan = json.loads(json.dumps(NaN)) - self.assertTrue((0 + nan) != nan) - - def test_degenerates_ignore(self): - for f in (PosInf, NegInf, NaN): - self.assertEqual(json.loads(json.dumps(f, ignore_nan=True)), None) - - def test_degenerates_deny(self): - for f in (PosInf, NegInf, NaN): - self.assertRaises(ValueError, json.dumps, f, allow_nan=False) - - def test_floats(self): - for num in [1617161771.7650001, math.pi, math.pi**100, - math.pi**-100, 3.1]: - self.assertEqual(float(json.dumps(num)), num) - self.assertEqual(json.loads(json.dumps(num)), num) - self.assertEqual(json.loads(text_type(json.dumps(num))), num) - - def test_ints(self): - for num in [1, long_type(1), 1<<32, 1<<64]: - self.assertEqual(json.dumps(num), str(num)) - self.assertEqual(int(json.dumps(num)), num) - self.assertEqual(json.loads(json.dumps(num)), num) - self.assertEqual(json.loads(text_type(json.dumps(num))), num) diff --git a/script.module.simplejson/lib/simplejson/tests/test_for_json.py b/script.module.simplejson/lib/simplejson/tests/test_for_json.py deleted file mode 100644 index b791b883b0..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_for_json.py +++ /dev/null @@ -1,97 +0,0 @@ -import unittest -import simplejson as json - - -class ForJson(object): - def for_json(self): - return {'for_json': 1} - - -class NestedForJson(object): - def for_json(self): - return {'nested': ForJson()} - - -class ForJsonList(object): - def for_json(self): - return ['list'] - - -class DictForJson(dict): - def for_json(self): - return {'alpha': 1} - - -class ListForJson(list): - def for_json(self): - return ['list'] - - -class TestForJson(unittest.TestCase): - def assertRoundTrip(self, obj, other, for_json=True): - if for_json is None: - # None will use the default - s = json.dumps(obj) - else: - s = json.dumps(obj, for_json=for_json) - self.assertEqual( - json.loads(s), - other) - - def test_for_json_encodes_stand_alone_object(self): - self.assertRoundTrip( - ForJson(), - ForJson().for_json()) - - def test_for_json_encodes_object_nested_in_dict(self): - self.assertRoundTrip( - {'hooray': ForJson()}, - {'hooray': ForJson().for_json()}) - - def test_for_json_encodes_object_nested_in_list_within_dict(self): - self.assertRoundTrip( - {'list': [0, ForJson(), 2, 3]}, - {'list': [0, ForJson().for_json(), 2, 3]}) - - def test_for_json_encodes_object_nested_within_object(self): - self.assertRoundTrip( - NestedForJson(), - {'nested': {'for_json': 1}}) - - def test_for_json_encodes_list(self): - self.assertRoundTrip( - ForJsonList(), - ForJsonList().for_json()) - - def test_for_json_encodes_list_within_object(self): - self.assertRoundTrip( - {'nested': ForJsonList()}, - {'nested': ForJsonList().for_json()}) - - def test_for_json_encodes_dict_subclass(self): - self.assertRoundTrip( - DictForJson(a=1), - DictForJson(a=1).for_json()) - - def test_for_json_encodes_list_subclass(self): - self.assertRoundTrip( - ListForJson(['l']), - ListForJson(['l']).for_json()) - - def test_for_json_ignored_if_not_true_with_dict_subclass(self): - for for_json in (None, False): - self.assertRoundTrip( - DictForJson(a=1), - {'a': 1}, - for_json=for_json) - - def test_for_json_ignored_if_not_true_with_list_subclass(self): - for for_json in (None, False): - self.assertRoundTrip( - ListForJson(['l']), - ['l'], - for_json=for_json) - - def test_raises_typeerror_if_for_json_not_true_with_object(self): - self.assertRaises(TypeError, json.dumps, ForJson()) - self.assertRaises(TypeError, json.dumps, ForJson(), for_json=False) diff --git a/script.module.simplejson/lib/simplejson/tests/test_indent.py b/script.module.simplejson/lib/simplejson/tests/test_indent.py deleted file mode 100644 index cea25a575e..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_indent.py +++ /dev/null @@ -1,86 +0,0 @@ -from unittest import TestCase -import textwrap - -import simplejson as json -from simplejson.compat import StringIO - -class TestIndent(TestCase): - def test_indent(self): - h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', - 'i-vhbjkhnth', - {'nifty': 87}, {'field': 'yes', 'morefield': False} ] - - expect = textwrap.dedent("""\ - [ - \t[ - \t\t"blorpie" - \t], - \t[ - \t\t"whoops" - \t], - \t[], - \t"d-shtaeou", - \t"d-nthiouh", - \t"i-vhbjkhnth", - \t{ - \t\t"nifty": 87 - \t}, - \t{ - \t\t"field": "yes", - \t\t"morefield": false - \t} - ]""") - - - d1 = json.dumps(h) - d2 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': ')) - d3 = json.dumps(h, indent=' ', sort_keys=True, separators=(',', ': ')) - d4 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': ')) - - h1 = json.loads(d1) - h2 = json.loads(d2) - h3 = json.loads(d3) - h4 = json.loads(d4) - - self.assertEqual(h1, h) - self.assertEqual(h2, h) - self.assertEqual(h3, h) - self.assertEqual(h4, h) - self.assertEqual(d3, expect.replace('\t', ' ')) - self.assertEqual(d4, expect.replace('\t', ' ')) - # NOTE: Python 2.4 textwrap.dedent converts tabs to spaces, - # so the following is expected to fail. Python 2.4 is not a - # supported platform in simplejson 2.1.0+. - self.assertEqual(d2, expect) - - def test_indent0(self): - h = {3: 1} - def check(indent, expected): - d1 = json.dumps(h, indent=indent) - self.assertEqual(d1, expected) - - sio = StringIO() - json.dump(h, sio, indent=indent) - self.assertEqual(sio.getvalue(), expected) - - # indent=0 should emit newlines - check(0, '{\n"3": 1\n}') - # indent=None is more compact - check(None, '{"3": 1}') - - def test_separators(self): - lst = [1,2,3,4] - expect = '[\n1,\n2,\n3,\n4\n]' - expect_spaces = '[\n1, \n2, \n3, \n4\n]' - # Ensure that separators still works - self.assertEqual( - expect_spaces, - json.dumps(lst, indent=0, separators=(', ', ': '))) - # Force the new defaults - self.assertEqual( - expect, - json.dumps(lst, indent=0, separators=(',', ': '))) - # Added in 2.1.4 - self.assertEqual( - expect, - json.dumps(lst, indent=0)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_item_sort_key.py b/script.module.simplejson/lib/simplejson/tests/test_item_sort_key.py deleted file mode 100644 index 98971b86ac..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_item_sort_key.py +++ /dev/null @@ -1,27 +0,0 @@ -from unittest import TestCase - -import simplejson as json -from operator import itemgetter - -class TestItemSortKey(TestCase): - def test_simple_first(self): - a = {'a': 1, 'c': 5, 'jack': 'jill', 'pick': 'axe', 'array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'} - self.assertEqual( - '{"a": 1, "c": 5, "crate": "dog", "jack": "jill", "pick": "axe", "zeak": "oh", "array": [1, 5, 6, 9], "tuple": [83, 12, 3]}', - json.dumps(a, item_sort_key=json.simple_first)) - - def test_case(self): - a = {'a': 1, 'c': 5, 'Jack': 'jill', 'pick': 'axe', 'Array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'} - self.assertEqual( - '{"Array": [1, 5, 6, 9], "Jack": "jill", "a": 1, "c": 5, "crate": "dog", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}', - json.dumps(a, item_sort_key=itemgetter(0))) - self.assertEqual( - '{"a": 1, "Array": [1, 5, 6, 9], "c": 5, "crate": "dog", "Jack": "jill", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}', - json.dumps(a, item_sort_key=lambda kv: kv[0].lower())) - - def test_item_sort_key_value(self): - # https://github.com/simplejson/simplejson/issues/173 - a = {'a': 1, 'b': 0} - self.assertEqual( - '{"b": 0, "a": 1}', - json.dumps(a, item_sort_key=lambda kv: kv[1])) diff --git a/script.module.simplejson/lib/simplejson/tests/test_iterable.py b/script.module.simplejson/lib/simplejson/tests/test_iterable.py deleted file mode 100644 index 35d3e75911..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_iterable.py +++ /dev/null @@ -1,31 +0,0 @@ -import unittest -from simplejson.compat import StringIO - -import simplejson as json - -def iter_dumps(obj, **kw): - return ''.join(json.JSONEncoder(**kw).iterencode(obj)) - -def sio_dump(obj, **kw): - sio = StringIO() - json.dumps(obj, **kw) - return sio.getvalue() - -class TestIterable(unittest.TestCase): - def test_iterable(self): - for l in ([], [1], [1, 2], [1, 2, 3]): - for opts in [{}, {'indent': 2}]: - for dumps in (json.dumps, iter_dumps, sio_dump): - expect = dumps(l, **opts) - default_expect = dumps(sum(l), **opts) - # Default is False - self.assertRaises(TypeError, dumps, iter(l), **opts) - self.assertRaises(TypeError, dumps, iter(l), iterable_as_array=False, **opts) - self.assertEqual(expect, dumps(iter(l), iterable_as_array=True, **opts)) - # Ensure that the "default" gets called - self.assertEqual(default_expect, dumps(iter(l), default=sum, **opts)) - self.assertEqual(default_expect, dumps(iter(l), iterable_as_array=False, default=sum, **opts)) - # Ensure that the "default" does not get called - self.assertEqual( - expect, - dumps(iter(l), iterable_as_array=True, default=sum, **opts)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_namedtuple.py b/script.module.simplejson/lib/simplejson/tests/test_namedtuple.py deleted file mode 100644 index 438789405d..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_namedtuple.py +++ /dev/null @@ -1,122 +0,0 @@ -from __future__ import absolute_import -import unittest -import simplejson as json -from simplejson.compat import StringIO - -try: - from collections import namedtuple -except ImportError: - class Value(tuple): - def __new__(cls, *args): - return tuple.__new__(cls, args) - - def _asdict(self): - return {'value': self[0]} - class Point(tuple): - def __new__(cls, *args): - return tuple.__new__(cls, args) - - def _asdict(self): - return {'x': self[0], 'y': self[1]} -else: - Value = namedtuple('Value', ['value']) - Point = namedtuple('Point', ['x', 'y']) - -class DuckValue(object): - def __init__(self, *args): - self.value = Value(*args) - - def _asdict(self): - return self.value._asdict() - -class DuckPoint(object): - def __init__(self, *args): - self.point = Point(*args) - - def _asdict(self): - return self.point._asdict() - -class DeadDuck(object): - _asdict = None - -class DeadDict(dict): - _asdict = None - -CONSTRUCTORS = [ - lambda v: v, - lambda v: [v], - lambda v: [{'key': v}], -] - -class TestNamedTuple(unittest.TestCase): - def test_namedtuple_dumps(self): - for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]: - d = v._asdict() - self.assertEqual(d, json.loads(json.dumps(v))) - self.assertEqual( - d, - json.loads(json.dumps(v, namedtuple_as_object=True))) - self.assertEqual(d, json.loads(json.dumps(v, tuple_as_array=False))) - self.assertEqual( - d, - json.loads(json.dumps(v, namedtuple_as_object=True, - tuple_as_array=False))) - - def test_namedtuple_dumps_false(self): - for v in [Value(1), Point(1, 2)]: - l = list(v) - self.assertEqual( - l, - json.loads(json.dumps(v, namedtuple_as_object=False))) - self.assertRaises(TypeError, json.dumps, v, - tuple_as_array=False, namedtuple_as_object=False) - - def test_namedtuple_dump(self): - for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]: - d = v._asdict() - sio = StringIO() - json.dump(v, sio) - self.assertEqual(d, json.loads(sio.getvalue())) - sio = StringIO() - json.dump(v, sio, namedtuple_as_object=True) - self.assertEqual( - d, - json.loads(sio.getvalue())) - sio = StringIO() - json.dump(v, sio, tuple_as_array=False) - self.assertEqual(d, json.loads(sio.getvalue())) - sio = StringIO() - json.dump(v, sio, namedtuple_as_object=True, - tuple_as_array=False) - self.assertEqual( - d, - json.loads(sio.getvalue())) - - def test_namedtuple_dump_false(self): - for v in [Value(1), Point(1, 2)]: - l = list(v) - sio = StringIO() - json.dump(v, sio, namedtuple_as_object=False) - self.assertEqual( - l, - json.loads(sio.getvalue())) - self.assertRaises(TypeError, json.dump, v, StringIO(), - tuple_as_array=False, namedtuple_as_object=False) - - def test_asdict_not_callable_dump(self): - for f in CONSTRUCTORS: - self.assertRaises(TypeError, - json.dump, f(DeadDuck()), StringIO(), namedtuple_as_object=True) - sio = StringIO() - json.dump(f(DeadDict()), sio, namedtuple_as_object=True) - self.assertEqual( - json.dumps(f({})), - sio.getvalue()) - - def test_asdict_not_callable_dumps(self): - for f in CONSTRUCTORS: - self.assertRaises(TypeError, - json.dumps, f(DeadDuck()), namedtuple_as_object=True) - self.assertEqual( - json.dumps(f({})), - json.dumps(f(DeadDict()), namedtuple_as_object=True)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_pass1.py b/script.module.simplejson/lib/simplejson/tests/test_pass1.py deleted file mode 100644 index f0b5b10e76..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_pass1.py +++ /dev/null @@ -1,71 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -# from http://json.org/JSON_checker/test/pass1.json -JSON = r''' -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact": [1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] -''' - -class TestPass1(TestCase): - def test_parse(self): - # test in/out equivalence and parsing - res = json.loads(JSON) - out = json.dumps(res) - self.assertEqual(res, json.loads(out)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_pass2.py b/script.module.simplejson/lib/simplejson/tests/test_pass2.py deleted file mode 100644 index 5d812b3bbe..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_pass2.py +++ /dev/null @@ -1,14 +0,0 @@ -from unittest import TestCase -import simplejson as json - -# from http://json.org/JSON_checker/test/pass2.json -JSON = r''' -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] -''' - -class TestPass2(TestCase): - def test_parse(self): - # test in/out equivalence and parsing - res = json.loads(JSON) - out = json.dumps(res) - self.assertEqual(res, json.loads(out)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_pass3.py b/script.module.simplejson/lib/simplejson/tests/test_pass3.py deleted file mode 100644 index 821d60b22c..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_pass3.py +++ /dev/null @@ -1,20 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -# from http://json.org/JSON_checker/test/pass3.json -JSON = r''' -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} -''' - -class TestPass3(TestCase): - def test_parse(self): - # test in/out equivalence and parsing - res = json.loads(JSON) - out = json.dumps(res) - self.assertEqual(res, json.loads(out)) diff --git a/script.module.simplejson/lib/simplejson/tests/test_raw_json.py b/script.module.simplejson/lib/simplejson/tests/test_raw_json.py deleted file mode 100644 index 1dfcc2c5f9..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_raw_json.py +++ /dev/null @@ -1,47 +0,0 @@ -import unittest -import simplejson as json - -dct1 = { - 'key1': 'value1' -} - -dct2 = { - 'key2': 'value2', - 'd1': dct1 -} - -dct3 = { - 'key2': 'value2', - 'd1': json.dumps(dct1) -} - -dct4 = { - 'key2': 'value2', - 'd1': json.RawJSON(json.dumps(dct1)) -} - - -class TestRawJson(unittest.TestCase): - - def test_normal_str(self): - self.assertNotEqual(json.dumps(dct2), json.dumps(dct3)) - - def test_raw_json_str(self): - self.assertEqual(json.dumps(dct2), json.dumps(dct4)) - self.assertEqual(dct2, json.loads(json.dumps(dct4))) - - def test_list(self): - self.assertEqual( - json.dumps([dct2]), - json.dumps([json.RawJSON(json.dumps(dct2))])) - self.assertEqual( - [dct2], - json.loads(json.dumps([json.RawJSON(json.dumps(dct2))]))) - - def test_direct(self): - self.assertEqual( - json.dumps(dct2), - json.dumps(json.RawJSON(json.dumps(dct2)))) - self.assertEqual( - dct2, - json.loads(json.dumps(json.RawJSON(json.dumps(dct2))))) diff --git a/script.module.simplejson/lib/simplejson/tests/test_recursion.py b/script.module.simplejson/lib/simplejson/tests/test_recursion.py deleted file mode 100644 index 662eb667ec..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_recursion.py +++ /dev/null @@ -1,67 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -class JSONTestObject: - pass - - -class RecursiveJSONEncoder(json.JSONEncoder): - recurse = False - def default(self, o): - if o is JSONTestObject: - if self.recurse: - return [JSONTestObject] - else: - return 'JSONTestObject' - return json.JSONEncoder.default(o) - - -class TestRecursion(TestCase): - def test_listrecursion(self): - x = [] - x.append(x) - try: - json.dumps(x) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on list recursion") - x = [] - y = [x] - x.append(y) - try: - json.dumps(x) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on alternating list recursion") - y = [] - x = [y, y] - # ensure that the marker is cleared - json.dumps(x) - - def test_dictrecursion(self): - x = {} - x["test"] = x - try: - json.dumps(x) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on dict recursion") - x = {} - y = {"a": x, "b": x} - # ensure that the marker is cleared - json.dumps(y) - - def test_defaultrecursion(self): - enc = RecursiveJSONEncoder() - self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"') - enc.recurse = True - try: - enc.encode(JSONTestObject) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on default recursion") diff --git a/script.module.simplejson/lib/simplejson/tests/test_scanstring.py b/script.module.simplejson/lib/simplejson/tests/test_scanstring.py deleted file mode 100644 index d5de1801ae..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_scanstring.py +++ /dev/null @@ -1,196 +0,0 @@ -import sys -from unittest import TestCase - -import simplejson as json -import simplejson.decoder -from simplejson.compat import b, PY3 - -class TestScanString(TestCase): - # The bytes type is intentionally not used in most of these tests - # under Python 3 because the decoder immediately coerces to str before - # calling scanstring. In Python 2 we are testing the code paths - # for both unicode and str. - # - # The reason this is done is because Python 3 would require - # entirely different code paths for parsing bytes and str. - # - def test_py_scanstring(self): - self._test_scanstring(simplejson.decoder.py_scanstring) - - def test_c_scanstring(self): - if not simplejson.decoder.c_scanstring: - return - self._test_scanstring(simplejson.decoder.c_scanstring) - - self.assertTrue(isinstance(simplejson.decoder.c_scanstring('""', 0)[0], str)) - - def _test_scanstring(self, scanstring): - if sys.maxunicode == 65535: - self.assertEqual( - scanstring(u'"z\U0001d120x"', 1, None, True), - (u'z\U0001d120x', 6)) - else: - self.assertEqual( - scanstring(u'"z\U0001d120x"', 1, None, True), - (u'z\U0001d120x', 5)) - - self.assertEqual( - scanstring('"\\u007b"', 1, None, True), - (u'{', 8)) - - self.assertEqual( - scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True), - (u'A JSON payload should be an object or array, not a string.', 60)) - - self.assertEqual( - scanstring('["Unclosed array"', 2, None, True), - (u'Unclosed array', 17)) - - self.assertEqual( - scanstring('["extra comma",]', 2, None, True), - (u'extra comma', 14)) - - self.assertEqual( - scanstring('["double extra comma",,]', 2, None, True), - (u'double extra comma', 21)) - - self.assertEqual( - scanstring('["Comma after the close"],', 2, None, True), - (u'Comma after the close', 24)) - - self.assertEqual( - scanstring('["Extra close"]]', 2, None, True), - (u'Extra close', 14)) - - self.assertEqual( - scanstring('{"Extra comma": true,}', 2, None, True), - (u'Extra comma', 14)) - - self.assertEqual( - scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True), - (u'Extra value after close', 26)) - - self.assertEqual( - scanstring('{"Illegal expression": 1 + 2}', 2, None, True), - (u'Illegal expression', 21)) - - self.assertEqual( - scanstring('{"Illegal invocation": alert()}', 2, None, True), - (u'Illegal invocation', 21)) - - self.assertEqual( - scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True), - (u'Numbers cannot have leading zeroes', 37)) - - self.assertEqual( - scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True), - (u'Numbers cannot be hex', 24)) - - self.assertEqual( - scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True), - (u'Too deep', 30)) - - self.assertEqual( - scanstring('{"Missing colon" null}', 2, None, True), - (u'Missing colon', 16)) - - self.assertEqual( - scanstring('{"Double colon":: null}', 2, None, True), - (u'Double colon', 15)) - - self.assertEqual( - scanstring('{"Comma instead of colon", null}', 2, None, True), - (u'Comma instead of colon', 25)) - - self.assertEqual( - scanstring('["Colon instead of comma": false]', 2, None, True), - (u'Colon instead of comma', 25)) - - self.assertEqual( - scanstring('["Bad value", truth]', 2, None, True), - (u'Bad value', 12)) - - for c in map(chr, range(0x00, 0x1f)): - self.assertEqual( - scanstring(c + '"', 0, None, False), - (c, 2)) - self.assertRaises( - ValueError, - scanstring, c + '"', 0, None, True) - - self.assertRaises(ValueError, scanstring, '', 0, None, True) - self.assertRaises(ValueError, scanstring, 'a', 0, None, True) - self.assertRaises(ValueError, scanstring, '\\', 0, None, True) - self.assertRaises(ValueError, scanstring, '\\u', 0, None, True) - self.assertRaises(ValueError, scanstring, '\\u0', 0, None, True) - self.assertRaises(ValueError, scanstring, '\\u01', 0, None, True) - self.assertRaises(ValueError, scanstring, '\\u012', 0, None, True) - self.assertRaises(ValueError, scanstring, '\\u0123', 0, None, True) - if sys.maxunicode > 65535: - self.assertRaises(ValueError, - scanstring, '\\ud834\\u"', 0, None, True) - self.assertRaises(ValueError, - scanstring, '\\ud834\\x0123"', 0, None, True) - - def test_issue3623(self): - self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1, - "xxx") - self.assertRaises(UnicodeDecodeError, - json.encoder.encode_basestring_ascii, b("xx\xff")) - - def test_overflow(self): - # Python 2.5 does not have maxsize, Python 3 does not have maxint - maxsize = getattr(sys, 'maxsize', getattr(sys, 'maxint', None)) - assert maxsize is not None - self.assertRaises(OverflowError, json.decoder.scanstring, "xxx", - maxsize + 1) - - def test_surrogates(self): - scanstring = json.decoder.scanstring - - def assertScan(given, expect, test_utf8=True): - givens = [given] - if not PY3 and test_utf8: - givens.append(given.encode('utf8')) - for given in givens: - (res, count) = scanstring(given, 1, None, True) - self.assertEqual(len(given), count) - self.assertEqual(res, expect) - - assertScan( - u'"z\\ud834\\u0079x"', - u'z\ud834yx') - assertScan( - u'"z\\ud834\\udd20x"', - u'z\U0001d120x') - assertScan( - u'"z\\ud834\\ud834\\udd20x"', - u'z\ud834\U0001d120x') - assertScan( - u'"z\\ud834x"', - u'z\ud834x') - assertScan( - u'"z\\udd20x"', - u'z\udd20x') - assertScan( - u'"z\ud834x"', - u'z\ud834x') - # It may look strange to join strings together, but Python is drunk. - # https://gist.github.com/etrepum/5538443 - assertScan( - u'"z\\ud834\udd20x12345"', - u''.join([u'z\ud834', u'\udd20x12345'])) - assertScan( - u'"z\ud834\\udd20x"', - u''.join([u'z\ud834', u'\udd20x'])) - # these have different behavior given UTF8 input, because the surrogate - # pair may be joined (in maxunicode > 65535 builds) - assertScan( - u''.join([u'"z\ud834', u'\udd20x"']), - u''.join([u'z\ud834', u'\udd20x']), - test_utf8=False) - - self.assertRaises(ValueError, - scanstring, u'"z\\ud83x"', 1, None, True) - self.assertRaises(ValueError, - scanstring, u'"z\\ud834\\udd2x"', 1, None, True) diff --git a/script.module.simplejson/lib/simplejson/tests/test_separators.py b/script.module.simplejson/lib/simplejson/tests/test_separators.py deleted file mode 100644 index 91b4d4fb6f..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_separators.py +++ /dev/null @@ -1,42 +0,0 @@ -import textwrap -from unittest import TestCase - -import simplejson as json - - -class TestSeparators(TestCase): - def test_separators(self): - h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth', - {'nifty': 87}, {'field': 'yes', 'morefield': False} ] - - expect = textwrap.dedent("""\ - [ - [ - "blorpie" - ] , - [ - "whoops" - ] , - [] , - "d-shtaeou" , - "d-nthiouh" , - "i-vhbjkhnth" , - { - "nifty" : 87 - } , - { - "field" : "yes" , - "morefield" : false - } - ]""") - - - d1 = json.dumps(h) - d2 = json.dumps(h, indent=' ', sort_keys=True, separators=(' ,', ' : ')) - - h1 = json.loads(d1) - h2 = json.loads(d2) - - self.assertEqual(h1, h) - self.assertEqual(h2, h) - self.assertEqual(d2, expect) diff --git a/script.module.simplejson/lib/simplejson/tests/test_speedups.py b/script.module.simplejson/lib/simplejson/tests/test_speedups.py deleted file mode 100644 index 8b146df9a0..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_speedups.py +++ /dev/null @@ -1,114 +0,0 @@ -from __future__ import with_statement - -import sys -import unittest -from unittest import TestCase - -import simplejson -from simplejson import encoder, decoder, scanner -from simplejson.compat import PY3, long_type, b - - -def has_speedups(): - return encoder.c_make_encoder is not None - - -def skip_if_speedups_missing(func): - def wrapper(*args, **kwargs): - if not has_speedups(): - if hasattr(unittest, 'SkipTest'): - raise unittest.SkipTest("C Extension not available") - else: - sys.stdout.write("C Extension not available") - return - return func(*args, **kwargs) - - return wrapper - - -class BadBool: - def __bool__(self): - 1/0 - __nonzero__ = __bool__ - - -class TestDecode(TestCase): - @skip_if_speedups_missing - def test_make_scanner(self): - self.assertRaises(AttributeError, scanner.c_make_scanner, 1) - - @skip_if_speedups_missing - def test_bad_bool_args(self): - def test(value): - decoder.JSONDecoder(strict=BadBool()).decode(value) - self.assertRaises(ZeroDivisionError, test, '""') - self.assertRaises(ZeroDivisionError, test, '{}') - if not PY3: - self.assertRaises(ZeroDivisionError, test, u'""') - self.assertRaises(ZeroDivisionError, test, u'{}') - -class TestEncode(TestCase): - @skip_if_speedups_missing - def test_make_encoder(self): - self.assertRaises( - TypeError, - encoder.c_make_encoder, - None, - ("\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7" - "\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75"), - None - ) - - @skip_if_speedups_missing - def test_bad_str_encoder(self): - # Issue #31505: There shouldn't be an assertion failure in case - # c_make_encoder() receives a bad encoder() argument. - import decimal - def bad_encoder1(*args): - return None - enc = encoder.c_make_encoder( - None, lambda obj: str(obj), - bad_encoder1, None, ': ', ', ', - False, False, False, {}, False, False, False, - None, None, 'utf-8', False, False, decimal.Decimal, False) - self.assertRaises(TypeError, enc, 'spam', 4) - self.assertRaises(TypeError, enc, {'spam': 42}, 4) - - def bad_encoder2(*args): - 1/0 - enc = encoder.c_make_encoder( - None, lambda obj: str(obj), - bad_encoder2, None, ': ', ', ', - False, False, False, {}, False, False, False, - None, None, 'utf-8', False, False, decimal.Decimal, False) - self.assertRaises(ZeroDivisionError, enc, 'spam', 4) - - @skip_if_speedups_missing - def test_bad_bool_args(self): - def test(name): - encoder.JSONEncoder(**{name: BadBool()}).encode({}) - self.assertRaises(ZeroDivisionError, test, 'skipkeys') - self.assertRaises(ZeroDivisionError, test, 'ensure_ascii') - self.assertRaises(ZeroDivisionError, test, 'check_circular') - self.assertRaises(ZeroDivisionError, test, 'allow_nan') - self.assertRaises(ZeroDivisionError, test, 'sort_keys') - self.assertRaises(ZeroDivisionError, test, 'use_decimal') - self.assertRaises(ZeroDivisionError, test, 'namedtuple_as_object') - self.assertRaises(ZeroDivisionError, test, 'tuple_as_array') - self.assertRaises(ZeroDivisionError, test, 'bigint_as_string') - self.assertRaises(ZeroDivisionError, test, 'for_json') - self.assertRaises(ZeroDivisionError, test, 'ignore_nan') - self.assertRaises(ZeroDivisionError, test, 'iterable_as_array') - - @skip_if_speedups_missing - def test_int_as_string_bitcount_overflow(self): - long_count = long_type(2)**32+31 - def test(): - encoder.JSONEncoder(int_as_string_bitcount=long_count).encode(0) - self.assertRaises((TypeError, OverflowError), test) - - if PY3: - @skip_if_speedups_missing - def test_bad_encoding(self): - with self.assertRaises(UnicodeEncodeError): - encoder.JSONEncoder(encoding='\udcff').encode({b('key'): 123}) diff --git a/script.module.simplejson/lib/simplejson/tests/test_str_subclass.py b/script.module.simplejson/lib/simplejson/tests/test_str_subclass.py deleted file mode 100644 index b6c8351f26..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_str_subclass.py +++ /dev/null @@ -1,21 +0,0 @@ -from unittest import TestCase - -import simplejson -from simplejson.compat import text_type - -# Tests for issue demonstrated in https://github.com/simplejson/simplejson/issues/144 -class WonkyTextSubclass(text_type): - def __getslice__(self, start, end): - return self.__class__('not what you wanted!') - -class TestStrSubclass(TestCase): - def test_dump_load(self): - for s in ['', '"hello"', 'text', u'\u005c']: - self.assertEqual( - s, - simplejson.loads(simplejson.dumps(WonkyTextSubclass(s)))) - - self.assertEqual( - s, - simplejson.loads(simplejson.dumps(WonkyTextSubclass(s), - ensure_ascii=False))) diff --git a/script.module.simplejson/lib/simplejson/tests/test_subclass.py b/script.module.simplejson/lib/simplejson/tests/test_subclass.py deleted file mode 100644 index 2bae3b6309..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_subclass.py +++ /dev/null @@ -1,37 +0,0 @@ -from unittest import TestCase -import simplejson as json - -from decimal import Decimal - -class AlternateInt(int): - def __repr__(self): - return 'invalid json' - __str__ = __repr__ - - -class AlternateFloat(float): - def __repr__(self): - return 'invalid json' - __str__ = __repr__ - - -# class AlternateDecimal(Decimal): -# def __repr__(self): -# return 'invalid json' - - -class TestSubclass(TestCase): - def test_int(self): - self.assertEqual(json.dumps(AlternateInt(1)), '1') - self.assertEqual(json.dumps(AlternateInt(-1)), '-1') - self.assertEqual(json.loads(json.dumps({AlternateInt(1): 1})), {'1': 1}) - - def test_float(self): - self.assertEqual(json.dumps(AlternateFloat(1.0)), '1.0') - self.assertEqual(json.dumps(AlternateFloat(-1.0)), '-1.0') - self.assertEqual(json.loads(json.dumps({AlternateFloat(1.0): 1})), {'1.0': 1}) - - # NOTE: Decimal subclasses are not supported as-is - # def test_decimal(self): - # self.assertEqual(json.dumps(AlternateDecimal('1.0')), '1.0') - # self.assertEqual(json.dumps(AlternateDecimal('-1.0')), '-1.0') diff --git a/script.module.simplejson/lib/simplejson/tests/test_tool.py b/script.module.simplejson/lib/simplejson/tests/test_tool.py deleted file mode 100644 index 914bff8bca..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_tool.py +++ /dev/null @@ -1,114 +0,0 @@ -from __future__ import with_statement -import os -import sys -import textwrap -import unittest -import subprocess -import tempfile -try: - # Python 3.x - from test.support import strip_python_stderr -except ImportError: - # Python 2.6+ - try: - from test.test_support import strip_python_stderr - except ImportError: - # Python 2.5 - import re - def strip_python_stderr(stderr): - return re.sub( - r"\[\d+ refs\]\r?\n?$".encode(), - "".encode(), - stderr).strip() - -def open_temp_file(): - if sys.version_info >= (2, 6): - file = tempfile.NamedTemporaryFile(delete=False) - filename = file.name - else: - fd, filename = tempfile.mkstemp() - file = os.fdopen(fd, 'w+b') - return file, filename - -class TestTool(unittest.TestCase): - data = """ - - [["blorpie"],[ "whoops" ] , [ - ],\t"d-shtaeou",\r"d-nthiouh", - "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field" - :"yes"} ] - """ - - expect = textwrap.dedent("""\ - [ - [ - "blorpie" - ], - [ - "whoops" - ], - [], - "d-shtaeou", - "d-nthiouh", - "i-vhbjkhnth", - { - "nifty": 87 - }, - { - "field": "yes", - "morefield": false - } - ] - """) - - def runTool(self, args=None, data=None): - argv = [sys.executable, '-m', 'simplejson.tool'] - if args: - argv.extend(args) - proc = subprocess.Popen(argv, - stdin=subprocess.PIPE, - stderr=subprocess.PIPE, - stdout=subprocess.PIPE) - out, err = proc.communicate(data) - self.assertEqual(strip_python_stderr(err), ''.encode()) - self.assertEqual(proc.returncode, 0) - return out.decode('utf8').splitlines() - - def test_stdin_stdout(self): - self.assertEqual( - self.runTool(data=self.data.encode()), - self.expect.splitlines()) - - def test_infile_stdout(self): - infile, infile_name = open_temp_file() - try: - infile.write(self.data.encode()) - infile.close() - self.assertEqual( - self.runTool(args=[infile_name]), - self.expect.splitlines()) - finally: - os.unlink(infile_name) - - def test_infile_outfile(self): - infile, infile_name = open_temp_file() - try: - infile.write(self.data.encode()) - infile.close() - # outfile will get overwritten by tool, so the delete - # may not work on some platforms. Do it manually. - outfile, outfile_name = open_temp_file() - try: - outfile.close() - self.assertEqual( - self.runTool(args=[infile_name, outfile_name]), - []) - with open(outfile_name, 'rb') as f: - self.assertEqual( - f.read().decode('utf8').splitlines(), - self.expect.splitlines() - ) - finally: - os.unlink(outfile_name) - finally: - os.unlink(infile_name) diff --git a/script.module.simplejson/lib/simplejson/tests/test_tuple.py b/script.module.simplejson/lib/simplejson/tests/test_tuple.py deleted file mode 100644 index 4ad7b0e861..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_tuple.py +++ /dev/null @@ -1,47 +0,0 @@ -import unittest - -from simplejson.compat import StringIO -import simplejson as json - -class TestTuples(unittest.TestCase): - def test_tuple_array_dumps(self): - t = (1, 2, 3) - expect = json.dumps(list(t)) - # Default is True - self.assertEqual(expect, json.dumps(t)) - self.assertEqual(expect, json.dumps(t, tuple_as_array=True)) - self.assertRaises(TypeError, json.dumps, t, tuple_as_array=False) - # Ensure that the "default" does not get called - self.assertEqual(expect, json.dumps(t, default=repr)) - self.assertEqual(expect, json.dumps(t, tuple_as_array=True, - default=repr)) - # Ensure that the "default" gets called - self.assertEqual( - json.dumps(repr(t)), - json.dumps(t, tuple_as_array=False, default=repr)) - - def test_tuple_array_dump(self): - t = (1, 2, 3) - expect = json.dumps(list(t)) - # Default is True - sio = StringIO() - json.dump(t, sio) - self.assertEqual(expect, sio.getvalue()) - sio = StringIO() - json.dump(t, sio, tuple_as_array=True) - self.assertEqual(expect, sio.getvalue()) - self.assertRaises(TypeError, json.dump, t, StringIO(), - tuple_as_array=False) - # Ensure that the "default" does not get called - sio = StringIO() - json.dump(t, sio, default=repr) - self.assertEqual(expect, sio.getvalue()) - sio = StringIO() - json.dump(t, sio, tuple_as_array=True, default=repr) - self.assertEqual(expect, sio.getvalue()) - # Ensure that the "default" gets called - sio = StringIO() - json.dump(t, sio, tuple_as_array=False, default=repr) - self.assertEqual( - json.dumps(repr(t)), - sio.getvalue()) diff --git a/script.module.simplejson/lib/simplejson/tests/test_unicode.py b/script.module.simplejson/lib/simplejson/tests/test_unicode.py deleted file mode 100644 index 0c7b1a6c44..0000000000 --- a/script.module.simplejson/lib/simplejson/tests/test_unicode.py +++ /dev/null @@ -1,154 +0,0 @@ -import sys -import codecs -from unittest import TestCase - -import simplejson as json -from simplejson.compat import unichr, text_type, b, BytesIO - -class TestUnicode(TestCase): - def test_encoding1(self): - encoder = json.JSONEncoder(encoding='utf-8') - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - s = u.encode('utf-8') - ju = encoder.encode(u) - js = encoder.encode(s) - self.assertEqual(ju, js) - - def test_encoding2(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - s = u.encode('utf-8') - ju = json.dumps(u, encoding='utf-8') - js = json.dumps(s, encoding='utf-8') - self.assertEqual(ju, js) - - def test_encoding3(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps(u) - self.assertEqual(j, '"\\u03b1\\u03a9"') - - def test_encoding4(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps([u]) - self.assertEqual(j, '["\\u03b1\\u03a9"]') - - def test_encoding5(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps(u, ensure_ascii=False) - self.assertEqual(j, u'"' + u + u'"') - - def test_encoding6(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps([u], ensure_ascii=False) - self.assertEqual(j, u'["' + u + u'"]') - - def test_big_unicode_encode(self): - u = u'\U0001d120' - self.assertEqual(json.dumps(u), '"\\ud834\\udd20"') - self.assertEqual(json.dumps(u, ensure_ascii=False), u'"\U0001d120"') - - def test_big_unicode_decode(self): - u = u'z\U0001d120x' - self.assertEqual(json.loads('"' + u + '"'), u) - self.assertEqual(json.loads('"z\\ud834\\udd20x"'), u) - - def test_unicode_decode(self): - for i in range(0, 0xd7ff): - u = unichr(i) - #s = '"\\u{0:04x}"'.format(i) - s = '"\\u%04x"' % (i,) - self.assertEqual(json.loads(s), u) - - def test_object_pairs_hook_with_unicode(self): - s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}' - p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4), - (u"qrt", 5), (u"pad", 6), (u"hoy", 7)] - self.assertEqual(json.loads(s), eval(s)) - self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p) - od = json.loads(s, object_pairs_hook=json.OrderedDict) - self.assertEqual(od, json.OrderedDict(p)) - self.assertEqual(type(od), json.OrderedDict) - # the object_pairs_hook takes priority over the object_hook - self.assertEqual(json.loads(s, - object_pairs_hook=json.OrderedDict, - object_hook=lambda x: None), - json.OrderedDict(p)) - - - def test_default_encoding(self): - self.assertEqual(json.loads(u'{"a": "\xe9"}'.encode('utf-8')), - {'a': u'\xe9'}) - - def test_unicode_preservation(self): - self.assertEqual(type(json.loads(u'""')), text_type) - self.assertEqual(type(json.loads(u'"a"')), text_type) - self.assertEqual(type(json.loads(u'["a"]')[0]), text_type) - - def test_ensure_ascii_false_returns_unicode(self): - # http://code.google.com/p/simplejson/issues/detail?id=48 - self.assertEqual(type(json.dumps([], ensure_ascii=False)), text_type) - self.assertEqual(type(json.dumps(0, ensure_ascii=False)), text_type) - self.assertEqual(type(json.dumps({}, ensure_ascii=False)), text_type) - self.assertEqual(type(json.dumps("", ensure_ascii=False)), text_type) - - def test_ensure_ascii_false_bytestring_encoding(self): - # http://code.google.com/p/simplejson/issues/detail?id=48 - doc1 = {u'quux': b('Arr\xc3\xaat sur images')} - doc2 = {u'quux': u'Arr\xeat sur images'} - doc_ascii = '{"quux": "Arr\\u00eat sur images"}' - doc_unicode = u'{"quux": "Arr\xeat sur images"}' - self.assertEqual(json.dumps(doc1), doc_ascii) - self.assertEqual(json.dumps(doc2), doc_ascii) - self.assertEqual(json.dumps(doc1, ensure_ascii=False), doc_unicode) - self.assertEqual(json.dumps(doc2, ensure_ascii=False), doc_unicode) - - def test_ensure_ascii_linebreak_encoding(self): - # http://timelessrepo.com/json-isnt-a-javascript-subset - s1 = u'\u2029\u2028' - s2 = s1.encode('utf8') - expect = '"\\u2029\\u2028"' - expect_non_ascii = u'"\u2029\u2028"' - self.assertEqual(json.dumps(s1), expect) - self.assertEqual(json.dumps(s2), expect) - self.assertEqual(json.dumps(s1, ensure_ascii=False), expect_non_ascii) - self.assertEqual(json.dumps(s2, ensure_ascii=False), expect_non_ascii) - - def test_invalid_escape_sequences(self): - # incomplete escape sequence - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1234') - # invalid escape sequence - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123x"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12x4"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1x34"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ux234"') - if sys.maxunicode > 65535: - # invalid escape sequence for low surrogate - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000x"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00x0"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0x00"') - self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\ux000"') - - def test_ensure_ascii_still_works(self): - # in the ascii range, ensure that everything is the same - for c in map(unichr, range(0, 127)): - self.assertEqual( - json.dumps(c, ensure_ascii=False), - json.dumps(c)) - snowman = u'\N{SNOWMAN}' - self.assertEqual( - json.dumps(c, ensure_ascii=False), - '"' + c + '"') - - def test_strip_bom(self): - content = u"\u3053\u3093\u306b\u3061\u308f" - json_doc = codecs.BOM_UTF8 + b(json.dumps(content)) - self.assertEqual(json.load(BytesIO(json_doc)), content) - for doc in json_doc, json_doc.decode('utf8'): - self.assertEqual(json.loads(doc), content) diff --git a/script.module.simplejson/lib/simplejson/tool.py b/script.module.simplejson/lib/simplejson/tool.py deleted file mode 100644 index 062e8e2c18..0000000000 --- a/script.module.simplejson/lib/simplejson/tool.py +++ /dev/null @@ -1,42 +0,0 @@ -r"""Command-line tool to validate and pretty-print JSON - -Usage:: - - $ echo '{"json":"obj"}' | python -m simplejson.tool - { - "json": "obj" - } - $ echo '{ 1.2:3.4}' | python -m simplejson.tool - Expecting property name: line 1 column 2 (char 2) - -""" -from __future__ import with_statement -import sys -import simplejson as json - -def main(): - if len(sys.argv) == 1: - infile = sys.stdin - outfile = sys.stdout - elif len(sys.argv) == 2: - infile = open(sys.argv[1], 'r') - outfile = sys.stdout - elif len(sys.argv) == 3: - infile = open(sys.argv[1], 'r') - outfile = open(sys.argv[2], 'w') - else: - raise SystemExit(sys.argv[0] + " [infile [outfile]]") - with infile: - try: - obj = json.load(infile, - object_pairs_hook=json.OrderedDict, - use_decimal=True) - except ValueError: - raise SystemExit(sys.exc_info()[1]) - with outfile: - json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True) - outfile.write('\n') - - -if __name__ == '__main__': - main() diff --git a/script.module.simplejson/icon.png b/script.module.simplejson/resources/icon.png similarity index 100% rename from script.module.simplejson/icon.png rename to script.module.simplejson/resources/icon.png