Property changes on: .
___________________________________________________________________
Name: svnmerge-blocked
- /python/trunk:61438,61442,61454,61475,61478,61486,61489,61491-61492,61529,61564,61570,61577,61585,61587,61590,61779,61810,61867,61871,61875,61878-61882,61904,61936-61937,61940,62005-62006,62015,62019-62020,62023,62025-62026,62028,62030,62067,62075,62095,62097,62099,62103,62123,62126,62138,62143,62195,62223,62309-62310,62398,62431,62511,62517-62518,62536,62599,62666,62684,62720,62722,62734,62736,62748,62769,62793,62797
+ /python/trunk:61438,61442,61454,61475,61478,61486,61489,61491-61492,61529,61564,61570,61577,61585,61587,61590,61779,61810,61867,61871,61875,61878-61882,61904,61936-61937,61940,62005-62006,62015,62019-62020,62023,62025-62026,62028,62030,62067,62075,62095,62097,62099,62103,62123,62126,62138,62143,62195,62223,62309-62310,62398,62431,62511,62517-62518,62536,62599,62666,62684,62720,62722,62793,62797
Name: svnmerge-integrated
- /python/trunk:1-61437,61439-61441,61443-61453,61455-61474,61476-61477,61479-61485,61487-61488,61490,61493-61528,61530-61563,61565-61569,61571-61576,61578-61584,61586,61588-61589,61591-61778,61780-61809,61811-61866,61868-61870,61872-61874,61876-61877,61883-61903,61905-61935,61938-61939,61941-62004,62007-62014,62016-62018,62021-62022,62024,62027,62029,62031-62066,62068-62074,62076-62094,62096,62098,62100-62102,62104-62122,62124-62125,62127-62137,62139-62142,62144-62194,62196-62222,62224-62308,62311-62397,62399-62430,62432-62510,62512-62516,62519-62535,62537-62598,62600-62665,62667-62683,62685-62719,62721,62723-62733,62735,62737-62747,62749-62768,62770-62792,62794-62796,62798-62801
+ /python/trunk:1-61437,61439-61441,61443-61453,61455-61474,61476-61477,61479-61485,61487-61488,61490,61493-61528,61530-61563,61565-61569,61571-61576,61578-61584,61586,61588-61589,61591-61778,61780-61809,61811-61866,61868-61870,61872-61874,61876-61877,61883-61903,61905-61935,61938-61939,61941-62004,62007-62014,62016-62018,62021-62022,62024,62027,62029,62031-62066,62068-62074,62076-62094,62096,62098,62100-62102,62104-62122,62124-62125,62127-62137,62139-62142,62144-62194,62196-62222,62224-62308,62311-62397,62399-62430,62432-62510,62512-62516,62519-62535,62537-62598,62600-62665,62667-62683,62685-62719,62721,62723-62792,62794-62796,62798-62801
Index: PCbuild/pythoncore.vcproj
===================================================================
--- PCbuild/pythoncore.vcproj (revision 62816)
+++ PCbuild/pythoncore.vcproj (working copy)
@@ -1007,6 +1007,10 @@
>
+
+
Index: PCbuild/pcbuild.sln
===================================================================
--- PCbuild/pcbuild.sln (revision 62816)
+++ PCbuild/pcbuild.sln (working copy)
@@ -29,7 +29,6 @@
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{553EC33E-9816-4996-A660-5D6186A0B0B3}"
ProjectSection(SolutionItems) = preProject
- ..\Modules\getbuildinfo.c = ..\Modules\getbuildinfo.c
readme.txt = readme.txt
EndProjectSection
EndProject
Index: setup.py
===================================================================
--- setup.py (revision 62816)
+++ setup.py (working copy)
@@ -430,6 +430,8 @@
exts.append( Extension("_bytesio", ["_bytesio.c"]) )
# atexit
exts.append( Extension("atexit", ["atexitmodule.c"]) )
+ # _json speedups
+ exts.append( Extension("_json", ["_json.c"]) )
# Python C API test module
exts.append( Extension('_testcapi', ['_testcapimodule.c']) )
# profiler (_lsprof is for cProfile.py)
Index: PC/config.c
===================================================================
--- PC/config.c (revision 62816)
+++ PC/config.c (working copy)
@@ -46,6 +46,7 @@
extern void init_struct(void);
extern void initdatetime(void);
extern void init_functools(void);
+extern void init_json(void);
extern void initzlib(void);
extern void init_multibytecodec(void);
@@ -119,6 +120,7 @@
{"_struct", init_struct},
{"datetime", initdatetime},
{"_functools", init_functools},
+ {"_json", init_json},
{"xxsubtype", initxxsubtype},
{"zipimport", initzipimport},
Index: PC/VS7.1/pythoncore.vcproj
===================================================================
--- PC/VS7.1/pythoncore.vcproj (revision 62816)
+++ PC/VS7.1/pythoncore.vcproj (working copy)
@@ -377,6 +377,9 @@
RelativePath="..\..\Modules\_heapqmodule.c">
+
+
+
+
Index: PC/VC6/pythoncore.dsp
===================================================================
--- PC/VC6/pythoncore.dsp (revision 62816)
+++ PC/VC6/pythoncore.dsp (working copy)
@@ -145,6 +145,10 @@
# End Source File
# Begin Source File
+SOURCE=..\..\Modules\_json.c
+# End Source File
+# Begin Source File
+
SOURCE=..\..\Modules\_localemodule.c
# End Source File
# Begin Source File
Index: Doc/whatsnew/2.6.rst
===================================================================
--- Doc/whatsnew/2.6.rst (revision 62816)
+++ Doc/whatsnew/2.6.rst (working copy)
@@ -1210,6 +1210,7 @@
long time. This implementation was significantly updated by Jeffrey
Yasskin.
+
Other Language Changes
======================
@@ -2146,6 +2147,31 @@
.. ======================================================================
.. whole new modules get described in subsections here
+The :mod:`json` module
+----------------------
+
+The new :mod:`json` module supports the encoding and decoding of Python types in
+JSON (Javascript Object Notation). JSON is a lightweight interchange format
+often used in web applications. For more information about JSON, see
+http://www.json.org.
+
+:mod:`json` comes with support for decoding and encoding most builtin Python
+types. The following example encodes and decodes a dictionary::
+
+ >>> import json
+ >>> data = {"spam" : "foo", "parrot" : 42}
+ >>> in_json = json.dumps(data) # Encode the data
+ >>> in_json
+ '{"parrot": 42, "spam": "foo"}'
+ >>> json.loads(in_json) # Decode into a Python object
+ {"spam" : "foo", "parrot" : 42}
+
+It is also possible to write your own decoders and encoders to support more
+types. Pretty-printing of the JSON strings is also supported.
+
+:mod:`json` (originally called simplejson) was written by Bob Ippolito.
+
+
Improved SSL Support
--------------------------------------------------
Index: Doc/library/netdata.rst
===================================================================
--- Doc/library/netdata.rst (revision 62816)
+++ Doc/library/netdata.rst (working copy)
@@ -12,6 +12,7 @@
.. toctree::
email.rst
+ json.rst
mailcap.rst
mailbox.rst
mhlib.rst
Property changes on: Lib/json
___________________________________________________________________
Name: svn:ignore
+ *.pyc
*.pyo
Property changes on: Lib/json/tests
___________________________________________________________________
Name: svn:ignore
+ *.pyc
*.pyo
Index: Lib/json/tests/__init__.py
===================================================================
--- Lib/json/tests/__init__.py (revision 62736)
+++ Lib/json/tests/__init__.py (working copy)
@@ -31,7 +31,5 @@
runner.run(suite)
if __name__ == '__main__':
- import os
- import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
main()
Index: Lib/json/tests/test_dump.py
===================================================================
--- Lib/json/tests/test_dump.py (revision 62736)
+++ Lib/json/tests/test_dump.py (working copy)
@@ -1,5 +1,5 @@
from unittest import TestCase
-from cStringIO import StringIO
+from io import StringIO
import json
Index: Lib/json/tests/test_recursion.py
===================================================================
--- Lib/json/tests/test_recursion.py (revision 62736)
+++ Lib/json/tests/test_recursion.py (working copy)
@@ -5,6 +5,7 @@
class JSONTestObject:
pass
+
class RecursiveJSONEncoder(json.JSONEncoder):
recurse = False
def default(self, o):
@@ -15,6 +16,7 @@
return 'JSONTestObject'
return json.JSONEncoder.default(o)
+
class TestRecursion(TestCase):
def test_listrecursion(self):
x = []
Index: Lib/json/tests/test_scanstring.py
===================================================================
--- Lib/json/tests/test_scanstring.py (revision 62736)
+++ Lib/json/tests/test_scanstring.py (working copy)
@@ -9,94 +9,95 @@
self._test_scanstring(json.decoder.py_scanstring)
def test_c_scanstring(self):
- self._test_scanstring(json.decoder.c_scanstring)
+ if json.decoder.c_scanstring is not None:
+ self._test_scanstring(json.decoder.c_scanstring)
def _test_scanstring(self, scanstring):
self.assertEquals(
scanstring('"z\\ud834\\udd20x"', 1, None, True),
- (u'z\U0001d120x', 16))
+ ('z\U0001d120x', 16))
if sys.maxunicode == 65535:
self.assertEquals(
- scanstring(u'"z\U0001d120x"', 1, None, True),
- (u'z\U0001d120x', 6))
+ scanstring('"z\U0001d120x"', 1, None, True),
+ ('z\U0001d120x', 6))
else:
self.assertEquals(
- scanstring(u'"z\U0001d120x"', 1, None, True),
- (u'z\U0001d120x', 5))
+ scanstring('"z\U0001d120x"', 1, None, True),
+ ('z\U0001d120x', 5))
self.assertEquals(
scanstring('"\\u007b"', 1, None, True),
- (u'{', 8))
+ ('{', 8))
self.assertEquals(
scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True),
- (u'A JSON payload should be an object or array, not a string.', 60))
+ ('A JSON payload should be an object or array, not a string.', 60))
self.assertEquals(
scanstring('["Unclosed array"', 2, None, True),
- (u'Unclosed array', 17))
+ ('Unclosed array', 17))
self.assertEquals(
scanstring('["extra comma",]', 2, None, True),
- (u'extra comma', 14))
+ ('extra comma', 14))
self.assertEquals(
scanstring('["double extra comma",,]', 2, None, True),
- (u'double extra comma', 21))
+ ('double extra comma', 21))
self.assertEquals(
scanstring('["Comma after the close"],', 2, None, True),
- (u'Comma after the close', 24))
+ ('Comma after the close', 24))
self.assertEquals(
scanstring('["Extra close"]]', 2, None, True),
- (u'Extra close', 14))
+ ('Extra close', 14))
self.assertEquals(
scanstring('{"Extra comma": true,}', 2, None, True),
- (u'Extra comma', 14))
+ ('Extra comma', 14))
self.assertEquals(
scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True),
- (u'Extra value after close', 26))
+ ('Extra value after close', 26))
self.assertEquals(
scanstring('{"Illegal expression": 1 + 2}', 2, None, True),
- (u'Illegal expression', 21))
+ ('Illegal expression', 21))
self.assertEquals(
scanstring('{"Illegal invocation": alert()}', 2, None, True),
- (u'Illegal invocation', 21))
+ ('Illegal invocation', 21))
self.assertEquals(
scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True),
- (u'Numbers cannot have leading zeroes', 37))
+ ('Numbers cannot have leading zeroes', 37))
self.assertEquals(
scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True),
- (u'Numbers cannot be hex', 24))
+ ('Numbers cannot be hex', 24))
self.assertEquals(
scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True),
- (u'Too deep', 30))
+ ('Too deep', 30))
self.assertEquals(
scanstring('{"Missing colon" null}', 2, None, True),
- (u'Missing colon', 16))
+ ('Missing colon', 16))
self.assertEquals(
scanstring('{"Double colon":: null}', 2, None, True),
- (u'Double colon', 15))
+ ('Double colon', 15))
self.assertEquals(
scanstring('{"Comma instead of colon", null}', 2, None, True),
- (u'Comma instead of colon', 25))
+ ('Comma instead of colon', 25))
self.assertEquals(
scanstring('["Colon instead of comma": false]', 2, None, True),
- (u'Colon instead of comma', 25))
+ ('Colon instead of comma', 25))
self.assertEquals(
scanstring('["Bad value", truth]', 2, None, True),
- (u'Bad value', 12))
+ ('Bad value', 12))
Index: Lib/json/tests/test_fail.py
===================================================================
--- Lib/json/tests/test_fail.py (revision 62736)
+++ Lib/json/tests/test_fail.py (working copy)
@@ -53,7 +53,7 @@
# http://json.org/JSON_checker/test/fail24.json
"['single quote']",
# http://code.google.com/p/simplejson/issues/detail?id=3
- u'["A\u001FZ control characters in string"]',
+ '["A\u001FZ control characters in string"]',
]
SKIPS = {
Index: Lib/json/tests/test_unicode.py
===================================================================
--- Lib/json/tests/test_unicode.py (revision 62736)
+++ Lib/json/tests/test_unicode.py (working copy)
@@ -5,51 +5,51 @@
class TestUnicode(TestCase):
def test_encoding1(self):
encoder = json.JSONEncoder(encoding='utf-8')
- u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
s = u.encode('utf-8')
ju = encoder.encode(u)
js = encoder.encode(s)
self.assertEquals(ju, js)
def test_encoding2(self):
- u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
s = u.encode('utf-8')
ju = json.dumps(u, encoding='utf-8')
js = json.dumps(s, encoding='utf-8')
self.assertEquals(ju, js)
def test_encoding3(self):
- u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps(u)
self.assertEquals(j, '"\\u03b1\\u03a9"')
def test_encoding4(self):
- u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps([u])
self.assertEquals(j, '["\\u03b1\\u03a9"]')
def test_encoding5(self):
- u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps(u, ensure_ascii=False)
- self.assertEquals(j, u'"{0}"'.format(u))
+ self.assertEquals(j, '"{0}"'.format(u))
def test_encoding6(self):
- u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps([u], ensure_ascii=False)
- self.assertEquals(j, u'["{0}"]'.format(u))
+ self.assertEquals(j, '["{0}"]'.format(u))
def test_big_unicode_encode(self):
- u = u'\U0001d120'
+ u = '\U0001d120'
self.assertEquals(json.dumps(u), '"\\ud834\\udd20"')
- self.assertEquals(json.dumps(u, ensure_ascii=False), u'"\U0001d120"')
+ self.assertEquals(json.dumps(u, ensure_ascii=False), '"\U0001d120"')
def test_big_unicode_decode(self):
- u = u'z\U0001d120x'
+ u = 'z\U0001d120x'
self.assertEquals(json.loads('"' + u + '"'), u)
self.assertEquals(json.loads('"z\\ud834\\udd20x"'), u)
def test_unicode_decode(self):
for i in range(0, 0xd7ff):
- u = unichr(i)
+ u = chr(i)
js = '"\\u{0:04x}"'.format(i)
self.assertEquals(json.loads(js), u)
Index: Lib/json/tests/test_encode_basestring_ascii.py
===================================================================
--- Lib/json/tests/test_encode_basestring_ascii.py (revision 62736)
+++ Lib/json/tests/test_encode_basestring_ascii.py (working copy)
@@ -3,22 +3,22 @@
import json.encoder
CASES = [
- (u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
- (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
- (u'controls', '"controls"'),
- (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
- (u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
- (u' s p a c e d ', '" s p a c e d "'),
- (u'\U0001d120', '"\\ud834\\udd20"'),
- (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
- ('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
- (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
- ('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
- (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
- (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
- (u"`1~!@#$%^&*()_+-={':[,]}|;.>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.>?"'),
- (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
- (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
+ ('/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', b'"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
+ ('\u0123\u4567\u89ab\ucdef\uabcd\uef4a', b'"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
+ ('controls', b'"controls"'),
+ ('\x08\x0c\n\r\t', b'"\\b\\f\\n\\r\\t"'),
+ ('{"object with 1 member":["array with 1 element"]}', b'"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
+ (' s p a c e d ', b'" s p a c e d "'),
+ ('\U0001d120', b'"\\ud834\\udd20"'),
+ ('\u03b1\u03a9', b'"\\u03b1\\u03a9"'),
+ (b'\xce\xb1\xce\xa9', b'"\\u03b1\\u03a9"'),
+ ('\u03b1\u03a9', b'"\\u03b1\\u03a9"'),
+ (b'\xce\xb1\xce\xa9', b'"\\u03b1\\u03a9"'),
+ ('\u03b1\u03a9', b'"\\u03b1\\u03a9"'),
+ ('\u03b1\u03a9', b'"\\u03b1\\u03a9"'),
+ ("`1~!@#$%^&*()_+-={':[,]}|;.>?", b'"`1~!@#$%^&*()_+-={\':[,]}|;.>?"'),
+ ('\x08\x0c\n\r\t', b'"\\b\\f\\n\\r\\t"'),
+ ('\u0123\u4567\u89ab\ucdef\uabcd\uef4a', b'"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
]
class TestEncodeBaseStringAscii(TestCase):
@@ -26,10 +26,12 @@
self._test_encode_basestring_ascii(json.encoder.py_encode_basestring_ascii)
def test_c_encode_basestring_ascii(self):
- self._test_encode_basestring_ascii(json.encoder.c_encode_basestring_ascii)
+ if json.encoder.c_encode_basestring_ascii is not None:
+ self._test_encode_basestring_ascii(json.encoder.c_encode_basestring_ascii)
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
fname = encode_basestring_ascii.__name__
for input_string, expect in CASES:
result = encode_basestring_ascii(input_string)
+ result = result.encode("ascii")
self.assertEquals(result, expect)
Index: Lib/json/tool.py
===================================================================
--- Lib/json/tool.py (revision 62736)
+++ Lib/json/tool.py (working copy)
@@ -8,6 +8,7 @@
}
$ echo '{ 1.2:3.4}' | python -mjson.tool
Expecting property name: line 1 column 2 (char 2)
+
"""
import sys
import json
@@ -26,7 +27,7 @@
raise SystemExit("{0} [infile [outfile]]".format(sys.argv[0]))
try:
obj = json.load(infile)
- except ValueError, e:
+ except ValueError as e:
raise SystemExit(e)
json.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
Index: Lib/json/__init__.py
===================================================================
--- Lib/json/__init__.py (revision 62736)
+++ Lib/json/__init__.py (working copy)
@@ -12,15 +12,15 @@
>>> import json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
- >>> print json.dumps("\"foo\bar")
+ >>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
- >>> print json.dumps(u'\u1234')
+ >>> print(json.dumps('\u1234'))
"\u1234"
- >>> print json.dumps('\\')
+ >>> print(json.dumps('\\'))
"\\"
- >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+ >>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
- >>> from StringIO import StringIO
+ >>> from io import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
@@ -35,20 +35,20 @@
Pretty printing (using repr() because of extraneous whitespace in the output)::
>>> import json
- >>> print repr(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
+ >>> print(repr(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)))
'{\n "4": 5, \n "6": 7\n}'
Decoding JSON::
>>> import json
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
- [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+ ['foo', {'bar': ['baz', None, 1.0, 2]}]
>>> json.loads('"\\"foo\\bar"')
- u'"foo\x08ar'
- >>> from StringIO import StringIO
+ '"foo\x08ar'
+ >>> from io import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)
- [u'streaming API']
+ ['streaming API']
Specializing JSON object decoding::
@@ -316,3 +316,4 @@
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
+
Index: Lib/json/encoder.py
===================================================================
--- Lib/json/encoder.py (revision 62736)
+++ Lib/json/encoder.py (working copy)
@@ -59,7 +59,7 @@
def py_encode_basestring_ascii(s):
- if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ if isinstance(s, bytes): # and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
@@ -75,7 +75,7 @@
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
- return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+ return '"' + (ESCAPE_ASCII.sub(replace, s)) + '"'
if c_encode_basestring_ascii is not None:
@@ -236,11 +236,11 @@
encoder = encode_basestring
allow_nan = self.allow_nan
if self.sort_keys:
- keys = dct.keys()
+ keys = list(dct.keys())
keys.sort()
items = [(k, dct[k]) for k in keys]
else:
- items = dct.iteritems()
+ items = iter(dct.items())
_encoding = self.encoding
_do_decode = (_encoding is not None
and not (_encoding == 'utf-8'))
@@ -248,13 +248,13 @@
if isinstance(key, str):
if _do_decode:
key = key.decode(_encoding)
- elif isinstance(key, basestring):
+ elif isinstance(key, str):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = floatstr(key, allow_nan)
- elif isinstance(key, (int, long)):
+ elif isinstance(key, (int, int)):
key = str(key)
elif key is True:
key = 'true'
@@ -282,7 +282,7 @@
del markers[markerid]
def _iterencode(self, o, markers=None):
- if isinstance(o, basestring):
+ if isinstance(o, str):
if self.ensure_ascii:
encoder = encode_basestring_ascii
else:
@@ -298,7 +298,7 @@
yield 'true'
elif o is False:
yield 'false'
- elif isinstance(o, (int, long)):
+ elif isinstance(o, (int, int)):
yield str(o)
elif isinstance(o, float):
yield floatstr(o, self.allow_nan)
@@ -351,8 +351,8 @@
"""
# This is for extremely simple cases and benchmarks.
- if isinstance(o, basestring):
- if isinstance(o, str):
+ if isinstance(o, (str, bytes)):
+ if isinstance(o, bytes):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
Index: Lib/json/decoder.py
===================================================================
--- Lib/json/decoder.py (revision 62736)
+++ Lib/json/decoder.py (working copy)
@@ -14,19 +14,9 @@
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+NaN, PosInf, NegInf = float('nan'), float('inf'), float('-inf')
-def _floatconstants():
- import struct
- import sys
- _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-NaN, PosInf, NegInf = _floatconstants()
-
-
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
@@ -82,8 +72,8 @@
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
- '"': u'"', '\\': u'\\', '/': u'/',
- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+ '"': '"', '\\': '\\', '/': '/',
+ 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t',
}
DEFAULT_ENCODING = "utf-8"
@@ -103,8 +93,8 @@
end = chunk.end()
content, terminator = chunk.groups()
if content:
- if not isinstance(content, unicode):
- content = unicode(content, encoding)
+ if not isinstance(content, str):
+ content = str(content, encoding)
_append(content)
if terminator == '"':
break
@@ -145,12 +135,12 @@
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
- m = unichr(uni)
+ m = chr(uni)
except ValueError:
raise ValueError(errmsg(msg, s, end))
end = next_end
_append(m)
- return u''.join(chunks), end
+ return ''.join(chunks), end
# Use speedup
@@ -190,7 +180,7 @@
raise ValueError(errmsg("Expecting : delimiter", s, end))
end = _w(s, end + 1).end()
try:
- value, end = iterscan(s, idx=end, context=context).next()
+ value, end = next(iterscan(s, idx=end, context=context))
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
@@ -224,7 +214,7 @@
iterscan = JSONScanner.iterscan
while True:
try:
- value, end = iterscan(s, idx=end, context=context).next()
+ value, end = next(iterscan(s, idx=end, context=context))
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
values.append(value)
@@ -343,7 +333,7 @@
"""
kw.setdefault('context', self)
try:
- obj, end = self._scanner.iterscan(s, **kw).next()
+ obj, end = next(self._scanner.iterscan(s, **kw))
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
Index: Modules/_json.c
===================================================================
--- Modules/_json.c (revision 62816)
+++ Modules/_json.c (working copy)
@@ -215,7 +215,7 @@
ustr = PyUnicode_FromUnicode(&c, 0);
}
if (joinstr == NULL) {
- joinstr = PyString_FromString("join");
+ joinstr = PyUnicode_InternFromString("join");
}
if (joinstr == NULL || ustr == NULL) {
return NULL;
@@ -231,6 +231,7 @@
Py_ssize_t begin = end - 1;
Py_ssize_t next = begin;
char *buf = PyString_AS_STRING(pystr);
+ Py_buffer info;
PyObject *chunks = PyList_New(0);
if (chunks == NULL) {
goto bail;
@@ -255,7 +256,10 @@
}
/* Pick up this chunk if it's not zero length */
if (next != end) {
- PyObject *strchunk = PyBuffer_FromMemory(&buf[end], next - end);
+ if (PyBuffer_FillInfo(&info, &buf[end], next - end, 1, 0) < 0) {
+ goto bail;
+ }
+ PyObject *strchunk = PyMemoryView_FromMemory(&info);
if (strchunk == NULL) {
goto bail;
}
@@ -540,7 +544,7 @@
}
PyDoc_STRVAR(pydoc_scanstring,
-"scanstring(basestring, end, encoding) -> (str, end)\n");
+"scanstring(str_or_bytes, end, encoding) -> (bytes, end)\n");
static PyObject *
py_scanstring(PyObject* self, PyObject *args)
@@ -563,24 +567,25 @@
}
else {
PyErr_Format(PyExc_TypeError,
- "first argument must be a string or unicode, not %.80s",
+ "first argument must be a string or bytes, not %.80s",
Py_TYPE(pystr)->tp_name);
return NULL;
}
}
PyDoc_STRVAR(pydoc_encode_basestring_ascii,
-"encode_basestring_ascii(basestring) -> str\n");
+"encode_basestring_ascii(str_or_bytes) -> bytes\n");
static PyObject *
py_encode_basestring_ascii(PyObject* self, PyObject *pystr)
{
+ PyObject *rval;
/* METH_O */
if (PyString_Check(pystr)) {
- return ascii_escape_str(pystr);
+ rval = ascii_escape_str(pystr);
}
else if (PyUnicode_Check(pystr)) {
- return ascii_escape_unicode(pystr);
+ rval = ascii_escape_unicode(pystr);
}
else {
PyErr_Format(PyExc_TypeError,
@@ -588,6 +593,12 @@
Py_TYPE(pystr)->tp_name);
return NULL;
}
+ if (PyString_Check(rval)) {
+ PyObject *urval = PyUnicode_DecodeASCII(PyString_AS_STRING(rval), PyString_GET_SIZE(rval), NULL);
+ Py_DECREF(rval);
+ return urval;
+ }
+ return rval;
}
static PyMethodDef json_methods[] = {