# HG changeset patch # User Kristján Valur Jónsson # Date 1332858224 0 # Node ID 0f84db580fea87e9b9f1e1688f7373c77f2b10a2 # Parent 7c48bb929e6ec5f80db093ebf3e1d0be584529cc pickling patch as submitted diff --git a/Include/iterobject.h b/Include/iterobject.h --- a/Include/iterobject.h +++ b/Include/iterobject.h @@ -18,6 +18,8 @@ PyAPI_FUNC(PyObject *) PyCallIter_New(PyObject *, PyObject *); +PyAPI_FUNC(PyObject *) _PyIter_GetBuiltin(const char *iter); + #ifdef __cplusplus } #endif diff --git a/Lib/test/seq_tests.py b/Lib/test/seq_tests.py --- a/Lib/test/seq_tests.py +++ b/Lib/test/seq_tests.py @@ -4,6 +4,7 @@ import unittest import sys +import pickle # Various iterables # This is used for checking the constructor (here and in test_deque.py) @@ -388,3 +389,9 @@ self.assertEqual(a.index(0, -4*sys.maxsize, 4*sys.maxsize), 2) self.assertRaises(ValueError, a.index, 0, 4*sys.maxsize,-4*sys.maxsize) self.assertRaises(ValueError, a.index, 2, 0, -10) + + def test_pickle(self): + lst = self.type2test([4, 5, 6, 7]) + lst2 = pickle.loads(pickle.dumps(lst)) + self.assertEqual(lst2, lst) + self.assertNotEqual(id(lst2), id(lst)) diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -285,6 +285,20 @@ self.assertEqual(a.x, b.x) self.assertEqual(type(a), type(b)) + def test_iterator_pickle(self): + data = array.array(self.typecode, self.example) + orgit = iter(data) + d = pickle.dumps(orgit) + it = pickle.loads(d) + self.assertEqual(type(orgit), type(it)) + self.assertEqual(list(it), list(data)) + + if len(data): + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + self.assertEqual(list(it), list(data)[1:]) + def test_insert(self): a = array.array(self.typecode, self.example) a.insert(0, self.example[0]) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -14,6 +14,7 @@ import traceback from test.support import TESTFN, unlink, run_unittest, check_warnings from operator import neg +import pickle try: import pty, signal except ImportError: @@ -110,7 +111,30 @@ def __iter__(self): raise RuntimeError +def filter_char(arg): + return ord(arg) > ord("d") + +def map_char(arg): + return chr(ord(arg)+1) + class BuiltinTest(unittest.TestCase): + # Helper to check picklability + def check_iter_pickle(self, it, seq): + itorg = it + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), seq) + + #test the iterator after dropping one from it + it = pickle.loads(d) + try: + next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), seq[1:]) def test_import(self): __import__('sys') @@ -566,6 +590,11 @@ self.assertEqual(list(filter(lambda x: x>=3, (1, 2, 3, 4))), [3, 4]) self.assertRaises(TypeError, list, filter(42, (1, 2))) + def test_filter_pickle(self): + f1 = filter(filter_char, "abcdeabcde") + f2 = filter(filter_char, "abcdeabcde") + self.check_iter_pickle(f1, list(f2)) + def test_getattr(self): self.assertTrue(getattr(sys, 'stdout') is sys.stdout) self.assertRaises(TypeError, getattr, sys, 1) @@ -759,6 +788,11 @@ raise RuntimeError self.assertRaises(RuntimeError, list, map(badfunc, range(5))) + def test_map_pickle(self): + m1 = map(map_char, "Is this the real life?") + m2 = map(map_char, "Is this the real life?") + self.check_iter_pickle(m1, list(m2)) + def test_max(self): self.assertEqual(max('123123'), '3') self.assertEqual(max(1, 2, 3), 3) @@ -1300,6 +1334,13 @@ return i self.assertRaises(ValueError, list, zip(BadSeq(), BadSeq())) + def test_zip_pickle(self): + a = (1, 2, 3) + b = (4, 5, 6) + t = [(1, 4), (2, 5), (3, 6)] + z1 = zip(a, b) + self.check_iter_pickle(z1, t) + def test_format(self): # Test the basic machinery of the format() builtin. Don't test # the specifics of the various formatters diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -518,6 +518,24 @@ q = pickle.loads(ps) self.assertEqual(b, q) + def test_iterator_pickling(self): + for b in b"", b"a", b"abc", b"\xffab\x80", b"\0\0\377\0\0": + it = itorg = iter(self.type2test(b)) + data = list(self.type2test(b)) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), data) + + it = pickle.loads(d) + try: + drop = next(it) + except StopIteration: + continue + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), data[1:]) + def test_strip(self): b = self.type2test(b'mississippi') self.assertEqual(b.strip(b'i'), b'mississipp') diff --git a/Lib/test/test_deque.py b/Lib/test/test_deque.py --- a/Lib/test/test_deque.py +++ b/Lib/test/test_deque.py @@ -471,6 +471,19 @@ ## self.assertNotEqual(id(d), id(e)) ## self.assertEqual(id(e), id(e[-1])) + def test_iterator_pickle(self): + data = deque(range(200)) + it = itorg = iter(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), list(data)) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + self.assertEqual(list(it), list(data)[1:]) + def test_deepcopy(self): mut = [10] d = deque([mut]) diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -3,6 +3,7 @@ import collections, random, string import gc, weakref +import pickle class DictTest(unittest.TestCase): @@ -803,6 +804,57 @@ pass self._tracked(MyDict()) + def test_iterator_pickling(self): + data = {1:"a", 2:"b", 3:"c"} + it = iter(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(sorted(it), sorted(data)) + + it = pickle.loads(d) + try: + drop = next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + del data[drop] + self.assertEqual(sorted(list(it)), sorted(list(data))) + + def test_itemiterator_pickling(self): + data = {1:"a", 2:"b", 3:"c"} + # dictviews aren't picklable, only their iterators + itorg = iter(data.items()) + d = pickle.dumps(itorg) + it = pickle.loads(d) + # note that the type of type of the unpickled iterator + # is not necessarily the same as the original. It is + # merely an object supporting the iterator protocol, yielding + # the same objects as the original one. + # self.assertEqual(type(itorg), type(it)) + self.assertEqual(dict(it), data) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + it = pickle.loads(d) + del data[drop[0]] + self.assertEqual(dict(it), data) + + def test_valuesiterator_pickling(self): + data = {1:"a", 2:"b", 3:"c"} + # data.values() isn't picklable, only its iterator + it = iter(data.values()) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(sorted(list(it)), sorted(list(data.values()))) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + it = pickle.loads(d) + values = list(it) + [drop] + self.assertEqual(sorted(values), sorted(list(data.values()))) from test import mapping_tests diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py --- a/Lib/test/test_enumerate.py +++ b/Lib/test/test_enumerate.py @@ -1,5 +1,6 @@ import unittest import sys +import pickle from test import support @@ -61,7 +62,25 @@ def __iter__(self): return self -class EnumerateTestCase(unittest.TestCase): +class PickleTest: + # Helper to check picklability + def check_pickle(self, itorg, seq): + d = pickle.dumps(itorg) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), seq) + + it = pickle.loads(d) + try: + next(it) + except StopIteration: + self.assertFalse(seq[1:]) + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), seq[1:]) + +class EnumerateTestCase(unittest.TestCase, PickleTest): enum = enumerate seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')] @@ -73,6 +92,9 @@ self.assertEqual(list(self.enum(self.seq)), self.res) self.enum.__doc__ + def test_pickle(self): + self.check_pickle(self.enum(self.seq), self.res) + def test_getitemseqn(self): self.assertEqual(list(self.enum(G(self.seq))), self.res) e = self.enum(G('')) @@ -126,7 +148,7 @@ seq = range(10,20000,2) res = list(zip(range(20000), seq)) -class TestReversed(unittest.TestCase): +class TestReversed(unittest.TestCase, PickleTest): def test_simple(self): class A: @@ -212,6 +234,10 @@ ngi = NoGetItem() self.assertRaises(TypeError, reversed, ngi) + def test_pickle(self): + for data in 'abc', range(5), tuple(enumerate('abc')), range(1,17,5): + self.check_pickle(reversed(data), list(data)[::-1]) + class EnumerateStartTestCase(EnumerateTestCase): diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -2,6 +2,7 @@ import unittest from test.support import run_unittest, TESTFN, unlink, cpython_only +import pickle # Test result of triple loop (too big to inline) TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2), @@ -28,6 +29,8 @@ raise StopIteration self.i = res + 1 return res + def __iter__(self): + return self class IteratingSequenceClass: def __init__(self, n): @@ -49,7 +52,9 @@ class TestCase(unittest.TestCase): # Helper to check that an iterator returns a given sequence - def check_iterator(self, it, seq): + def check_iterator(self, it, seq, pickle=True): + if pickle: + self.check_pickle(it, seq) res = [] while 1: try: @@ -60,12 +65,32 @@ self.assertEqual(res, seq) # Helper to check that a for loop generates a given sequence - def check_for_loop(self, expr, seq): + def check_for_loop(self, expr, seq, pickle=True): + if pickle: + self.check_pickle(iter(expr), seq) res = [] for val in expr: res.append(val) self.assertEqual(res, seq) + # Helper to check picklability + def check_pickle(self, itorg, seq): + d = pickle.dumps(itorg) + it = pickle.loads(d) + # Cannot assert type equality because dict iterators unpickle as list + # iterators. + # self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), seq) + + it = pickle.loads(d) + try: + next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), seq[1:]) + # Test basic use of iter() function def test_iter_basic(self): self.check_iterator(iter(range(10)), list(range(10))) @@ -138,7 +163,7 @@ if i > 100: raise IndexError # Emergency stop return i - self.check_iterator(iter(C(), 10), list(range(10))) + self.check_iterator(iter(C(), 10), list(range(10)), pickle=False) # Test two-argument iter() with function def test_iter_function(self): @@ -146,7 +171,7 @@ i = state[0] state[0] = i+1 return i - self.check_iterator(iter(spam, 10), list(range(10))) + self.check_iterator(iter(spam, 10), list(range(10)), pickle=False) # Test two-argument iter() with function that raises StopIteration def test_iter_function_stop(self): @@ -156,7 +181,7 @@ raise StopIteration state[0] = i+1 return i - self.check_iterator(iter(spam, 20), list(range(10))) + self.check_iterator(iter(spam, 20), list(range(10)), pickle=False) # Test exception propagation through function iterator def test_exception_function(self): @@ -198,7 +223,7 @@ if i == 10: raise StopIteration return SequenceClass.__getitem__(self, i) - self.check_for_loop(MySequenceClass(20), list(range(10))) + self.check_for_loop(MySequenceClass(20), list(range(10)), pickle=False) # Test a big range def test_iter_big_range(self): @@ -237,8 +262,8 @@ f.close() f = open(TESTFN, "r") try: - self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"]) - self.check_for_loop(f, []) + self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"], pickle=False) + self.check_for_loop(f, [], pickle=False) finally: f.close() try: diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -37,6 +37,13 @@ 'Test predicate' return x%2==1 +def tupleize(*args): + return args + +def irange(n): + for i in range(n): + yield i + class StopNow: 'Class emulating an empty iterable.' def __iter__(self): @@ -55,8 +62,59 @@ 'Factorial' return prod(range(1, n+1)) +# root level methods for pickling ability +def testR(r): + return r[0] + +def testR2(r): + return r[2] + +def underten(x): + return x<10 + class TestBasicOps(unittest.TestCase): + def pickletest(self, it, stop=4, take=1, compare=None): + """Test that an iterator is the same after pickling, also when part-consumed""" + def expand(it, i=0): + # Recursively expand iterables, within sensible bounds + if i > 10: + raise RuntimeError("infinite recursion encountered") + if isinstance(it, str): + return it + try: + l = list(islice(it, stop)) + except TypeError: + return it # can't expand it + return [expand(e, i+1) for e in l] + + # Test the initial copy against the original + dump = pickle.dumps(it) + i2 = pickle.loads(dump) + self.assertEqual(type(it), type(i2)) + a, b = expand(it), expand(i2) + self.assertEqual(a, b) + if compare: + c = expand(compare) + self.assertEqual(a, c) + + # Take from the copy, and create another copy and compare them. + i3 = pickle.loads(dump) + took = 0 + try: + for i in range(take): + next(i3) + took += 1 + except StopIteration: + pass #in case there is less data than 'take' + dump = pickle.dumps(i3) + i4 = pickle.loads(dump) + a, b = expand(i3), expand(i4) + self.assertEqual(a, b) + if compare: + c = expand(compare[took:]) + self.assertEqual(a, c); + def test_accumulate(self): self.assertEqual(list(accumulate(range(10))), # one positional arg [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]) @@ -83,6 +141,7 @@ [2, 16, 144, 720, 5040, 0, 0, 0, 0, 0]) with self.assertRaises(TypeError): list(accumulate(s, chr)) # unary-operation + self.pickletest(accumulate(range(10))) # test pickling def test_chain(self): @@ -106,14 +165,43 @@ self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd')) self.assertRaises(TypeError, list, chain.from_iterable([2, 3])) + def test_chain_reducible(self): + operators = [copy.deepcopy, + lambda s: pickle.loads(pickle.dumps(s))] + for oper in operators: + it = chain('abc', 'def') + self.assertEqual(list(oper(it)), list('abcdef')) + self.assertEqual(next(it), 'a') + self.assertEqual(list(oper(it)), list('bcdef')) + + self.assertEqual(list(oper(chain(''))), []) + self.assertEqual(take(4, oper(chain('abc', 'def'))), list('abcd')) + self.assertRaises(TypeError, list, oper(chain(2, 3))) + self.pickletest(chain('abc', 'def'), compare=list('abcdef')) + def test_combinations(self): self.assertRaises(TypeError, combinations, 'abc') # missing r argument self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, combinations, None) # pool is not iterable self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative - self.assertEqual(list(combinations('abc', 32)), []) # r > n - self.assertEqual(list(combinations(range(4), 3)), - [(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) + + for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))): + self.assertEqual(list(op(combinations('abc', 32))), []) # r > n + + self.assertEqual(list(op(combinations('ABCD', 2))), + [('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')]) + testIntermediate = combinations('ABCD', 2) + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), + [('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')]) + + self.assertEqual(list(op(combinations(range(4), 3))), + [(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) + testIntermediate = combinations(range(4), 3) + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), + [(0,1,3), (0,2,3), (1,2,3)]) + def combinations1(iterable, r): 'Pure python version shown in the docs' @@ -168,6 +256,9 @@ self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version + self.pickletest(combinations(values, r)) # test pickling + + # Test implementation detail: tuple re-use @support.impl_detail("tuple reuse is specific to CPython") def test_combinations_tuple_reuse(self): self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1) @@ -179,8 +270,15 @@ self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, cwr, None) # pool is not iterable self.assertRaises(ValueError, cwr, 'abc', -2) # r is negative - self.assertEqual(list(cwr('ABC', 2)), - [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) + + for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))): + self.assertEqual(list(op(cwr('ABC', 2))), + [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) + testIntermediate = cwr('ABC', 2) + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), + [('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) + def cwr1(iterable, r): 'Pure python version shown in the docs' @@ -239,6 +337,10 @@ self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version + self.pickletest(cwr(values,r)) # test pickling + + # Test implementation detail: tuple re-use + @support.impl_detail("tuple reuse is specific to CPython") def test_combinations_with_replacement_tuple_reuse(self): cwr = combinations_with_replacement @@ -305,6 +407,8 @@ self.assertEqual(result, list(permutations(values, None))) # test r as None self.assertEqual(result, list(permutations(values))) # test default r + self.pickletest(permutations(values, r)) # test pickling + @support.impl_detail("tuple resuse is CPython specific") def test_permutations_tuple_reuse(self): self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1) @@ -359,6 +463,24 @@ self.assertRaises(TypeError, compress, range(6)) # too few args self.assertRaises(TypeError, compress, range(6), None) # too many args + # check copy, deepcopy, pickle + for op in (lambda a:copy.copy(a), lambda a:copy.deepcopy(a), lambda a:pickle.loads(pickle.dumps(a))): + for data, selectors, result1, result2 in [ + ('ABCDEF', [1,0,1,0,1,1], 'ACEF', 'CEF'), + ('ABCDEF', [0,0,0,0,0,0], '', ''), + ('ABCDEF', [1,1,1,1,1,1], 'ABCDEF', 'BCDEF'), + ('ABCDEF', [1,0,1], 'AC', 'C'), + ('ABC', [0,1,1,1,1,1], 'BC', 'C'), + ]: + + self.assertEqual(list(op(compress(data=data, selectors=selectors))), list(result1)) + self.assertEqual(list(op(compress(data, selectors))), list(result1)) + testIntermediate = compress(data, selectors) + if result1: + next(testIntermediate) + self.assertEqual(list(op(testIntermediate)), list(result2)) + + def test_count(self): self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)]) self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)]) @@ -393,7 +515,7 @@ c = count(value) self.assertEqual(next(copy.copy(c)), value) self.assertEqual(next(copy.deepcopy(c)), value) - self.assertEqual(next(pickle.loads(pickle.dumps(c))), value) + self.pickletest(count(value)) #check proper internal error handling for large "step' sizes count(1, maxsize+5); sys.exc_info() @@ -440,6 +562,7 @@ else: r2 = ('count(%r, %r)' % (i, j)).replace('L', '') self.assertEqual(r1, r2) + self.pickletest(count(i, j)) def test_cycle(self): self.assertEqual(take(10, cycle('abc')), list('abcabcabca')) @@ -448,6 +571,18 @@ self.assertRaises(TypeError, cycle, 5) self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0]) + # check copy, deepcopy, pickle + c = cycle('abc') + self.assertEqual(next(c), 'a') + #simple copy currently not supported, because __reduce__ returns + #an internal iterator + #self.assertEqual(take(10, copy.copy(c)), list('bcabcabcab')) + self.assertEqual(take(10, copy.deepcopy(c)), list('bcabcabcab')) + self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('bcabcabcab')) + next(c) + self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('cabcabcabc')) + self.pickletest(cycle('abc')) + def test_groupby(self): # Check whether it accepts arguments correctly self.assertEqual([], list(groupby([]))) @@ -466,18 +601,37 @@ dup.append(elem) self.assertEqual(s, dup) + # Check normal pickled + dup = [] + for k, g in pickle.loads(pickle.dumps(groupby(s, testR))): + for elem in g: + self.assertEqual(k, elem[0]) + dup.append(elem) + self.assertEqual(s, dup) + # Check nested case dup = [] - for k, g in groupby(s, lambda r:r[0]): - for ik, ig in groupby(g, lambda r:r[2]): + for k, g in groupby(s, testR): + for ik, ig in groupby(g, testR2): for elem in ig: self.assertEqual(k, elem[0]) self.assertEqual(ik, elem[2]) dup.append(elem) self.assertEqual(s, dup) + # Check nested and pickled + dup = [] + for k, g in pickle.loads(pickle.dumps(groupby(s, testR))): + for ik, ig in pickle.loads(pickle.dumps(groupby(g, testR2))): + for elem in ig: + self.assertEqual(k, elem[0]) + self.assertEqual(ik, elem[2]) + dup.append(elem) + self.assertEqual(s, dup) + + # Check case where inner iterator is not used - keys = [k for k, g in groupby(s, lambda r:r[0])] + keys = [k for k, g in groupby(s, testR)] expectedkeys = set([r[0] for r in s]) self.assertEqual(set(keys), expectedkeys) self.assertEqual(len(keys), len(expectedkeys)) @@ -548,6 +702,20 @@ self.assertRaises(TypeError, filter, isEven, 3) self.assertRaises(TypeError, next, filter(range(6), range(6))) + # check copy, deepcopy, pickle + ans = [0,2,4] + + c = filter(isEven, range(6)) + self.assertEqual(list(copy.copy(c)), ans) + c = filter(isEven, range(6)) + self.assertEqual(list(copy.deepcopy(c)), ans) + c = filter(isEven, range(6)) + self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans) + next(c) + self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans[1:]) + c = filter(isEven, range(6)) + self.pickletest(c) + def test_filterfalse(self): self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5]) self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0]) @@ -558,6 +726,7 @@ self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7) self.assertRaises(TypeError, filterfalse, isEven, 3) self.assertRaises(TypeError, next, filterfalse(range(6), range(6))) + self.pickletest(filterfalse(isEven, range(6))) def test_zip(self): # XXX This is rather silly now that builtin zip() calls zip()... @@ -582,6 +751,23 @@ ids = list(map(id, list(zip('abc', 'def')))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) + # check copy, deepcopy, pickle + ans = [(x,y) for x, y in copy.copy(zip('abc',count()))] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + + ans = [(x,y) for x, y in copy.deepcopy(zip('abc',count()))] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + + ans = [(x,y) for x, y in pickle.loads(pickle.dumps(zip('abc',count())))] + self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) + + testIntermediate = zip('abc',count()) + next(testIntermediate) + ans = [(x,y) for x, y in pickle.loads(pickle.dumps(testIntermediate))] + self.assertEqual(ans, [('b', 1), ('c', 2)]) + + self.pickletest(zip('abc', count())) + def test_ziplongest(self): for args in [ ['abc', range(6)], @@ -631,6 +817,12 @@ ids = list(map(id, list(zip_longest('abc', 'def')))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) + def test_zip_longest_pickling(self): + self.pickletest(zip_longest("abc", "def")) + self.pickletest(zip_longest("abc", "defgh")) + self.pickletest(zip_longest("abc", "defgh", fillvalue=1)) + self.pickletest(zip_longest("", "defgh")) + def test_bug_7244(self): class Repeater: @@ -734,6 +926,20 @@ self.assertEqual(len(set(map(id, product('abc', 'def')))), 1) self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1) + def test_product_pickling(self): + # check copy, deepcopy, pickle + for args, result in [ + ([], [()]), # zero iterables + (['ab'], [('a',), ('b',)]), # one iterable + ([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]), # two iterables + ([range(0), range(2), range(3)], []), # first iterable with zero length + ([range(2), range(0), range(3)], []), # middle iterable with zero length + ([range(2), range(3), range(0)], []), # last iterable with zero length + ]: + self.assertEqual(list(copy.copy(product(*args))), result) + self.assertEqual(list(copy.deepcopy(product(*args))), result) + self.pickletest(product(*args)) + def test_repeat(self): self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a']) self.assertEqual(lzip(range(3),repeat('a')), @@ -752,11 +958,16 @@ list(r) self.assertEqual(repr(r), 'repeat((1+0j), 0)') + # check copy, deepcopy, pickle + c = repeat(object='a', times=10) + self.assertEqual(next(c), 'a') + self.assertEqual(take(2, copy.copy(c)), list('a' * 2)) + self.assertEqual(take(2, copy.deepcopy(c)), list('a' * 2)) + self.pickletest(repeat(object='a', times=10)) + def test_map(self): self.assertEqual(list(map(operator.pow, range(3), range(1,7))), [0**1, 1**2, 2**3]) - def tupleize(*args): - return args self.assertEqual(list(map(tupleize, 'abc', range(5))), [('a',0),('b',1),('c',2)]) self.assertEqual(list(map(tupleize, 'abc', count())), @@ -771,6 +982,18 @@ self.assertRaises(ValueError, next, map(errfunc, [4], [5])) self.assertRaises(TypeError, next, map(onearg, [4], [5])) + # check copy, deepcopy, pickle + ans = [('a',0),('b',1),('c',2)] + + c = map(tupleize, 'abc', count()) + self.assertEqual(list(copy.copy(c)), ans) + + c = map(tupleize, 'abc', count()) + self.assertEqual(list(copy.deepcopy(c)), ans) + + c = map(tupleize, 'abc', count()) + self.pickletest(c) + def test_starmap(self): self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))), [0**1, 1**2, 2**3]) @@ -785,6 +1008,18 @@ self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)])) self.assertRaises(TypeError, next, starmap(onearg, [(4,5)])) + # check copy, deepcopy, pickle + ans = [0**1, 1**2, 2**3] + + c = starmap(operator.pow, zip(range(3), range(1,7))) + self.assertEqual(list(copy.copy(c)), ans) + + c = starmap(operator.pow, zip(range(3), range(1,7))) + self.assertEqual(list(copy.deepcopy(c)), ans) + + c = starmap(operator.pow, zip(range(3), range(1,7))) + self.pickletest(c) + def test_islice(self): for args in [ # islice(args) should agree with range(args) (10, 20, 3), @@ -817,17 +1052,18 @@ self.assertEqual(list(it), list(range(3, 10))) # Test invalid arguments - self.assertRaises(TypeError, islice, range(10)) - self.assertRaises(TypeError, islice, range(10), 1, 2, 3, 4) - self.assertRaises(ValueError, islice, range(10), -5, 10, 1) - self.assertRaises(ValueError, islice, range(10), 1, -5, -1) - self.assertRaises(ValueError, islice, range(10), 1, 10, -1) - self.assertRaises(ValueError, islice, range(10), 1, 10, 0) - self.assertRaises(ValueError, islice, range(10), 'a') - self.assertRaises(ValueError, islice, range(10), 'a', 1) - self.assertRaises(ValueError, islice, range(10), 1, 'a') - self.assertRaises(ValueError, islice, range(10), 'a', 1, 1) - self.assertRaises(ValueError, islice, range(10), 1, 'a', 1) + ra = range(10) + self.assertRaises(TypeError, islice, ra) + self.assertRaises(TypeError, islice, ra, 1, 2, 3, 4) + self.assertRaises(ValueError, islice, ra, -5, 10, 1) + self.assertRaises(ValueError, islice, ra, 1, -5, -1) + self.assertRaises(ValueError, islice, ra, 1, 10, -1) + self.assertRaises(ValueError, islice, ra, 1, 10, 0) + self.assertRaises(ValueError, islice, ra, 'a') + self.assertRaises(ValueError, islice, ra, 'a', 1) + self.assertRaises(ValueError, islice, ra, 1, 'a') + self.assertRaises(ValueError, islice, ra, 'a', 1, 1) + self.assertRaises(ValueError, islice, ra, 1, 'a', 1) self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1) # Issue #10323: Less islice in a predictable state @@ -835,9 +1071,22 @@ self.assertEqual(list(islice(c, 1, 3, 50)), [1]) self.assertEqual(next(c), 3) + # check copy, deepcopy, pickle + for args in [ # islice(args) should agree with range(args) + (10, 20, 3), + (10, 3, 20), + (10, 20), + (10, 3), + (20,) + ]: + self.assertEqual(list(copy.copy(islice(range(100), *args))), + list(range(*args))) + self.assertEqual(list(copy.deepcopy(islice(range(100), *args))), + list(range(*args))) + self.pickletest(islice(range(100), *args)) + def test_takewhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] - underten = lambda x: x<10 self.assertEqual(list(takewhile(underten, data)), [1, 3, 5]) self.assertEqual(list(takewhile(underten, [])), []) self.assertRaises(TypeError, takewhile) @@ -849,9 +1098,14 @@ self.assertEqual(list(t), [1, 1, 1]) self.assertRaises(StopIteration, next, t) + # check copy, deepcopy, pickle + self.assertEqual(list(copy.copy(takewhile(underten, data))), [1, 3, 5]) + self.assertEqual(list(copy.deepcopy(takewhile(underten, data))), + [1, 3, 5]) + self.pickletest(takewhile(underten, data)) + def test_dropwhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] - underten = lambda x: x<10 self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8]) self.assertEqual(list(dropwhile(underten, [])), []) self.assertRaises(TypeError, dropwhile) @@ -860,11 +1114,14 @@ self.assertRaises(TypeError, next, dropwhile(10, [(4,5)])) self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)])) + # check copy, deepcopy, pickle + self.assertEqual(list(copy.copy(dropwhile(underten, data))), [20, 2, 4, 6, 8]) + self.assertEqual(list(copy.deepcopy(dropwhile(underten, data))), + [20, 2, 4, 6, 8]) + self.pickletest(dropwhile(underten, data)) + def test_tee(self): n = 200 - def irange(n): - for i in range(n): - yield i a, b = tee([]) # test empty iterator self.assertEqual(list(a), []) @@ -949,6 +1206,67 @@ del a self.assertRaises(ReferenceError, getattr, p, '__class__') + ans = list('abc') + long_ans = list(range(10000)) + + # check copy + a, b = tee('abc') + self.assertEqual(list(copy.copy(a)), ans) + self.assertEqual(list(copy.copy(b)), ans) + a, b = tee(list(range(10000))) + self.assertEqual(list(copy.copy(a)), long_ans) + self.assertEqual(list(copy.copy(b)), long_ans) + + # check partially consumed copy + a, b = tee('abc') + take(2, a) + take(1, b) + self.assertEqual(list(copy.copy(a)), ans[2:]) + self.assertEqual(list(copy.copy(b)), ans[1:]) + self.assertEqual(list(a), ans[2:]) + self.assertEqual(list(b), ans[1:]) + a, b = tee(range(10000)) + take(100, a) + take(60, b) + self.assertEqual(list(copy.copy(a)), long_ans[100:]) + self.assertEqual(list(copy.copy(b)), long_ans[60:]) + self.assertEqual(list(a), long_ans[100:]) + self.assertEqual(list(b), long_ans[60:]) + + # check deepcopy + a, b = tee('abc') + self.assertEqual(list(copy.deepcopy(a)), ans) + self.assertEqual(list(copy.deepcopy(b)), ans) + self.assertEqual(list(a), ans) + self.assertEqual(list(b), ans) + a, b = tee(range(10000)) + self.assertEqual(list(copy.deepcopy(a)), long_ans) + self.assertEqual(list(copy.deepcopy(b)), long_ans) + self.assertEqual(list(a), long_ans) + self.assertEqual(list(b), long_ans) + + # check partially consumed deepcopy + a, b = tee('abc') + take(2, a) + take(1, b) + self.assertEqual(list(copy.deepcopy(a)), ans[2:]) + self.assertEqual(list(copy.deepcopy(b)), ans[1:]) + self.assertEqual(list(a), ans[2:]) + self.assertEqual(list(b), ans[1:]) + a, b = tee(range(10000)) + take(100, a) + take(60, b) + self.assertEqual(list(copy.deepcopy(a)), long_ans[100:]) + self.assertEqual(list(copy.deepcopy(b)), long_ans[60:]) + self.assertEqual(list(a), long_ans[100:]) + self.assertEqual(list(b), long_ans[60:]) + + # check pickle + self.pickletest(iter(tee('abc'))) + a, b = tee('abc') + self.pickletest(a, compare=ans) + self.pickletest(b, compare=ans) + def test_StopIteration(self): self.assertRaises(StopIteration, next, zip()) @@ -974,9 +1292,21 @@ class TestExamples(unittest.TestCase): - def test_accumlate(self): + def test_accumulate(self): self.assertEqual(list(accumulate([1,2,3,4,5])), [1, 3, 6, 10, 15]) + def test_accumulate_reducible(self): + # check copy, deepcopy, pickle + data = [1, 2, 3, 4, 5] + accumulated = [1, 3, 6, 10, 15] + it = accumulate(data) + + self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[:]) + self.assertEqual(next(it), 1) + self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[1:]) + self.assertEqual(list(copy.deepcopy(it)), accumulated[1:]) + self.assertEqual(list(copy.copy(it)), accumulated[1:]) + def test_chain(self): self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF') diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -1,5 +1,6 @@ import sys from test import support, list_tests +import pickle class ListTest(list_tests.CommonTest): type2test = list @@ -69,6 +70,33 @@ check(10) # check our checking code check(1000000) + def test_iterator_pickle(self): + # Userlist iterators don't support pickling yet since + # they are based on generators. + data = self.type2test([4, 5, 6, 7]) + it = itorg = iter(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(data)) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(data)[1:]) + + def test_reversed_pickle(self): + data = self.type2test([4, 5, 6, 7]) + it = itorg = reversed(data) + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:]) def test_main(verbose=None): support.run_unittest(ListTest) diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py --- a/Lib/test/test_range.py +++ b/Lib/test/test_range.py @@ -341,13 +341,35 @@ def test_pickling(self): testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1), - (13, 21, 3), (-2, 2, 2)] + (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)] for proto in range(pickle.HIGHEST_PROTOCOL + 1): for t in testcases: r = range(*t) self.assertEqual(list(pickle.loads(pickle.dumps(r, proto))), list(r)) + def test_iterator_pickling(self): + testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1), + (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)] + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + for t in testcases: + it = itorg = iter(range(*t)) + data = list(range(*t)) + + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(list(it), data) + + it = pickle.loads(d) + try: + drop = next(it) + except StopIteration: + continue + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(list(it), data[1:]) + def test_odd_bug(self): # This used to raise a "SystemError: NULL result without error" # because the range validation step was eating the exception diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py --- a/Lib/test/test_set.py +++ b/Lib/test/test_set.py @@ -234,6 +234,25 @@ dup = pickle.loads(p) self.assertEqual(self.s.x, dup.x) + def test_iterator_pickling(self): + it = itorg = iter(self.s) + data = self.thetype(self.s) + d = pickle.dumps(it) + it = pickle.loads(d) + # Set iterators unpickle as list iterators due to the + # undefined order of set items. + # self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.thetype(it), data) + + it = pickle.loads(d) + try: + drop = next(it) + except StopIteration: + return + d = pickle.dumps(it) + it = pickle.loads(d) + self.assertEqual(self.thetype(it), data - self.thetype((drop,))) + def test_deepcopy(self): class Tracer: def __init__(self, value): diff --git a/Lib/test/test_tuple.py b/Lib/test/test_tuple.py --- a/Lib/test/test_tuple.py +++ b/Lib/test/test_tuple.py @@ -1,6 +1,7 @@ from test import support, seq_tests import gc +import pickle class TupleTest(seq_tests.CommonTest): type2test = tuple @@ -164,6 +165,34 @@ check(10) # check our checking code check(1000000) + def test_iterator_pickle(self): + # Userlist iterators don't support pickling yet since + # they are based on generators. + data = self.type2test([4, 5, 6, 7]) + itorg = iter(data) + d = pickle.dumps(itorg) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(data)) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(data)[1:]) + + def test_reversed_pickle(self): + data = self.type2test([4, 5, 6, 7]) + itorg = reversed(data) + d = pickle.dumps(itorg) + it = pickle.loads(d) + self.assertEqual(type(itorg), type(it)) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))) + + it = pickle.loads(d) + drop = next(it) + d = pickle.dumps(it) + self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:]) + def test_main(): support.run_unittest(TupleTest) diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -1122,6 +1122,35 @@ } static PyObject * +dequeiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + Py_ssize_t i, index=0; + PyObject *deque; + dequeiterobject *it; + if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index)) + return NULL; + assert(type == &dequeiter_type); + + it = (dequeiterobject*)deque_iter((dequeobject *)deque); + if (!it) + return NULL; + /* consume items from the queue */ + for(i=0; icounter) { + Py_DECREF(it); + return NULL; + } else + break; + } + } + return (PyObject*)it; +} + +static PyObject * dequeiter_len(dequeiterobject *it) { return PyLong_FromSsize_t(it->counter); @@ -1129,14 +1158,21 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +dequeiter_reduce(dequeiterobject *it) +{ + return Py_BuildValue("O(On)", Py_TYPE(it), it->deque, it->deque->len - it->counter); +} + static PyMethodDef dequeiter_methods[] = { {"__length_hint__", (PyCFunction)dequeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)dequeiter_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; static PyTypeObject dequeiter_type = { PyVarObject_HEAD_INIT(NULL, 0) - "deque_iterator", /* tp_name */ + "_collections._deque_iterator", /* tp_name */ sizeof(dequeiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -1164,6 +1200,16 @@ PyObject_SelfIter, /* tp_iter */ (iternextfunc)dequeiter_next, /* tp_iternext */ dequeiter_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + dequeiter_new, /* tp_new */ 0, }; @@ -1217,9 +1263,38 @@ return item; } +static PyObject * +dequereviter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + Py_ssize_t i, index=0; + PyObject *deque; + dequeiterobject *it; + if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index)) + return NULL; + assert(type == &dequereviter_type); + + it = (dequeiterobject*)deque_reviter((dequeobject *)deque); + if (!it) + return NULL; + /* consume items from the queue */ + for(i=0; icounter) { + Py_DECREF(it); + return NULL; + } else + break; + } + } + return (PyObject*)it; +} + static PyTypeObject dequereviter_type = { PyVarObject_HEAD_INIT(NULL, 0) - "deque_reverse_iterator", /* tp_name */ + "_collections._deque_reverse_iterator", /* tp_name */ sizeof(dequeiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -1247,6 +1322,16 @@ PyObject_SelfIter, /* tp_iter */ (iternextfunc)dequereviter_next, /* tp_iternext */ dequeiter_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + dequereviter_new, /* tp_new */ 0, }; @@ -1653,9 +1738,13 @@ if (PyType_Ready(&dequeiter_type) < 0) return NULL; + Py_INCREF(&dequeiter_type); + PyModule_AddObject(m, "_deque_iterator", (PyObject *)&dequeiter_type); if (PyType_Ready(&dequereviter_type) < 0) return NULL; + Py_INCREF(&dequereviter_type); + PyModule_AddObject(m, "_deque_reverse_iterator", (PyObject *)&dequereviter_type); return m; } diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -2753,6 +2753,34 @@ return 0; } +static PyObject * +arrayiter_reduce(arrayiterobject *it) +{ + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->ao, it->index); +} + +static PyObject * +arrayiter_setstate(arrayiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); +static PyMethodDef arrayiter_methods[] = { + {"__reduce__", (PyCFunction)arrayiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)arrayiter_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + static PyTypeObject PyArrayIter_Type = { PyVarObject_HEAD_INIT(NULL, 0) "arrayiterator", /* tp_name */ @@ -2782,7 +2810,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)arrayiter_next, /* tp_iternext */ - 0, /* tp_methods */ + arrayiter_methods, /* tp_methods */ }; diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -134,6 +134,53 @@ return r; } +static PyObject * +groupby_reduce(groupbyobject *lz) +{ + /* reduce as a 'new' call with an optional 'setstate' if groupby + * has started + */ + PyObject *value; + if (lz->tgtkey && lz->currkey && lz->currvalue) + value = Py_BuildValue("O(OO)(OOO)", Py_TYPE(lz), + lz->it, lz->keyfunc, lz->currkey, lz->currvalue, lz->tgtkey); + else + value = Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->it, lz->keyfunc); + + return value; +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +groupby_setstate(groupbyobject *lz, PyObject *state) +{ + PyObject *currkey, *currvalue, *tgtkey; + if (!PyArg_ParseTuple(state, "OOO", &currkey, &currvalue, &tgtkey)) + return NULL; + Py_CLEAR(lz->currkey); + lz->currkey = currkey; + Py_INCREF(lz->currkey); + Py_CLEAR(lz->currvalue); + lz->currvalue = currvalue; + Py_INCREF(lz->currvalue); + Py_CLEAR(lz->tgtkey); + lz->tgtkey = tgtkey; + Py_INCREF(lz->tgtkey); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + +static PyMethodDef groupby_methods[] = { + {"__reduce__", (PyCFunction)groupby_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)groupby_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(groupby_doc, "groupby(iterable[, keyfunc]) -> create an iterator which returns\n\ (key, sub-iterator) grouped by each value of key(value).\n"); @@ -168,7 +215,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)groupby_next, /* tp_iternext */ - 0, /* tp_methods */ + groupby_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -194,6 +241,17 @@ static PyTypeObject _grouper_type; static PyObject * +_grouper_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + PyObject *parent, *tgtkey; + + if (!PyArg_ParseTuple(args, "O!O", &groupby_type, &parent, &tgtkey)) + return NULL; + + return _grouper_create((groupbyobject*) parent, tgtkey); +} + +static PyObject * _grouper_create(groupbyobject *parent, PyObject *tgtkey) { _grouperobject *igo; @@ -269,6 +327,20 @@ return r; } +static PyObject * +_grouper_reduce(_grouperobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->parent, lz->tgtkey); +} + +static PyMethodDef _grouper_methods[] = { + {"__reduce__", (PyCFunction)_grouper_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + + static PyTypeObject _grouper_type = { PyVarObject_HEAD_INIT(NULL, 0) "itertools._grouper", /* tp_name */ @@ -298,7 +370,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)_grouper_next, /* tp_iternext */ - 0, /* tp_methods */ + _grouper_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -308,7 +380,7 @@ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - 0, /* tp_new */ + _grouper_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ }; @@ -344,7 +416,7 @@ static PyTypeObject teedataobject_type; static PyObject * -teedataobject_new(PyObject *it) +teedataobject_newinternal(PyObject *it) { teedataobject *tdo; @@ -364,7 +436,7 @@ teedataobject_jumplink(teedataobject *tdo) { if (tdo->nextlink == NULL) - tdo->nextlink = teedataobject_new(tdo->it); + tdo->nextlink = teedataobject_newinternal(tdo->it); Py_XINCREF(tdo->nextlink); return tdo->nextlink; } @@ -420,11 +492,80 @@ PyObject_GC_Del(tdo); } +static PyObject * +teedataobject_reduce(teedataobject *tdo) +{ + int i; + /* create a temporary list of already iterated values */ + PyObject *values = PyList_New(tdo->numread); + if (!values) + return NULL; + for (i=0 ; inumread ; i++) { + Py_INCREF(tdo->values[i]); + PyList_SET_ITEM(values, i, tdo->values[i]); + } + return Py_BuildValue("O(ONO)", Py_TYPE(tdo), tdo->it, + values, + tdo->nextlink ? tdo->nextlink : Py_None); +} + +static PyTypeObject teedataobject_type; + +static PyObject * +teedataobject_new(PyTypeObject *type, PyObject *args, PyObject *kw) +{ + teedataobject *tdo; + PyObject *it, *values, *next; + Py_ssize_t i, len; + + assert(type == &teedataobject_type); + if (!PyArg_ParseTuple(args, "OO!O", &it, &PyList_Type, &values, &next)) + return NULL; + + tdo = (teedataobject *)teedataobject_newinternal(it); + if (!tdo) + return NULL; + + len = PyList_GET_SIZE(values); + if (len > LINKCELLS) + goto err; + for (i=0; ivalues[i] = PyList_GET_ITEM(values, i); + Py_INCREF(tdo->values[i]); + } + tdo->numread = len; + + if (len == LINKCELLS) { + if (next != Py_None) { + if (Py_TYPE(next) != &teedataobject_type) + goto err; + assert(tdo->nextlink == NULL); + Py_INCREF(next); + tdo->nextlink = next; + } + } else { + if (next != Py_None) + goto err; /* shouldn't have a next if we are not full */ + } + return (PyObject*)tdo; + +err: + Py_XDECREF(tdo); + PyErr_SetString(PyExc_ValueError, "Invalid arguments"); + return NULL; +} + +static PyMethodDef teedataobject_methods[] = { + {"__reduce__", (PyCFunction)teedataobject_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(teedataobject_doc, "Data container common to multiple tee objects."); static PyTypeObject teedataobject_type = { PyVarObject_HEAD_INIT(0, 0) /* Must fill in type value later */ - "itertools.tee_dataobject", /* tp_name */ + "itertools._tee_dataobject", /* tp_name */ sizeof(teedataobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -451,7 +592,7 @@ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ - 0, /* tp_methods */ + teedataobject_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -461,7 +602,7 @@ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - 0, /* tp_new */ + teedataobject_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ }; @@ -528,7 +669,7 @@ to = PyObject_GC_New(teeobject, &tee_type); if (to == NULL) goto done; - to->dataobj = (teedataobject *)teedataobject_new(it); + to->dataobj = (teedataobject *)teedataobject_newinternal(it); if (!to->dataobj) { PyObject_GC_Del(to); to = NULL; @@ -548,7 +689,7 @@ { PyObject *iterable; - if (!PyArg_UnpackTuple(args, "tee", 1, 1, &iterable)) + if (!PyArg_UnpackTuple(args, "_tee", 1, 1, &iterable)) return NULL; return tee_fromiterable(iterable); } @@ -570,17 +711,43 @@ PyObject_GC_Del(to); } +static PyObject * +tee_reduce(teeobject *to) +{ + return Py_BuildValue("O(())(Oi)", Py_TYPE(to), to->dataobj, to->index); +} + +static PyObject * +tee_setstate(teeobject *to, PyObject *state) +{ + teedataobject *tdo; + int index; + if (!PyArg_ParseTuple(state, "O!i", &teedataobject_type, &tdo, &index)) + return NULL; + if (index < 0 || index > LINKCELLS) { + PyErr_SetString(PyExc_ValueError, "Index out of range"); + return NULL; + } + Py_CLEAR(to->dataobj); + to->dataobj = tdo; + Py_INCREF(to->dataobj); + to->index = index; + Py_RETURN_NONE; +} + PyDoc_STRVAR(teeobject_doc, "Iterator wrapped to make it copyable"); static PyMethodDef tee_methods[] = { {"__copy__", (PyCFunction)tee_copy, METH_NOARGS, teecopy_doc}, + {"__reduce__", (PyCFunction)tee_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)tee_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; static PyTypeObject tee_type = { PyVarObject_HEAD_INIT(NULL, 0) - "itertools.tee", /* tp_name */ + "itertools._tee", /* tp_name */ sizeof(teeobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ @@ -771,6 +938,38 @@ } } +static PyObject * +cycle_reduce(cycleobject *lz) +{ + /* Create a new cycle with the iterator tuple, then set + * the saved state on it. + */ + return Py_BuildValue("O(O)(Oi)", Py_TYPE(lz), + lz->it, lz->saved, lz->firstpass); + } + +static PyObject * +cycle_setstate(cycleobject *lz, PyObject *state) +{ + PyObject *saved=NULL; + int firstpass; + if (!PyArg_ParseTuple(state, "Oi", &saved, &firstpass)) + return NULL; + Py_CLEAR(lz->saved); + lz->saved = saved; + Py_XINCREF(lz->saved); + lz->firstpass = firstpass != 0; + Py_RETURN_NONE; +} + +static PyMethodDef cycle_methods[] = { + {"__reduce__", (PyCFunction)cycle_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)cycle_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(cycle_doc, "cycle(iterable) --> cycle object\n\ \n\ @@ -807,7 +1006,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)cycle_next, /* tp_iternext */ - 0, /* tp_methods */ + cycle_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -913,6 +1112,31 @@ } } +static PyObject * +dropwhile_reduce(dropwhileobject *lz) +{ + return Py_BuildValue("O(OO)l", Py_TYPE(lz), + lz->func, lz->it, lz->start); +} + +static PyObject * +dropwhile_setstate(dropwhileobject *lz, PyObject *state) +{ + int start = PyObject_IsTrue(state); + if (start == -1) + return NULL; + lz->start = start; + Py_RETURN_NONE; +} + +static PyMethodDef dropwhile_methods[] = { + {"__reduce__", (PyCFunction)dropwhile_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)dropwhile_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(dropwhile_doc, "dropwhile(predicate, iterable) --> dropwhile object\n\ \n\ @@ -949,7 +1173,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)dropwhile_next, /* tp_iternext */ - 0, /* tp_methods */ + dropwhile_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1052,6 +1276,30 @@ return NULL; } +static PyObject * +takewhile_reduce(takewhileobject *lz) +{ + return Py_BuildValue("O(OO)l", Py_TYPE(lz), + lz->func, lz->it, lz->stop); +} + +static PyObject * +takewhile_reduce_setstate(takewhileobject *lz, PyObject *state) +{ + int stop = PyObject_IsTrue(state); + if (stop == -1) + return NULL; + lz->stop = stop; + Py_RETURN_NONE; +} + +static PyMethodDef takewhile_reduce_methods[] = { + {"__reduce__", (PyCFunction)takewhile_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)takewhile_reduce_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; PyDoc_STRVAR(takewhile_doc, "takewhile(predicate, iterable) --> takewhile object\n\ \n\ @@ -1088,7 +1336,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)takewhile_next, /* tp_iternext */ - 0, /* tp_methods */ + takewhile_reduce_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1244,6 +1492,44 @@ return item; } +static PyObject * +islice_reduce(isliceobject *lz) +{ + /* When unpickled, generate a new object with the same bounds, + * then 'setstate' with the next and count + */ + PyObject *stop; + if (lz->stop == -1) { + stop = Py_None; + Py_INCREF(stop); + } else { + stop = PyLong_FromSsize_t(lz->stop); + if (stop == NULL) + return NULL; + } + return Py_BuildValue("O(OnNn)n", Py_TYPE(lz), + lz->it, lz->next, stop, lz->step, + lz->cnt); +} + +static PyObject * +islice_setstate(isliceobject *lz, PyObject *state) +{ + Py_ssize_t cnt = PyLong_AsSsize_t(state); + if (cnt == -1 && PyErr_Occurred()) + return NULL; + lz->cnt = cnt; + Py_RETURN_NONE; +} + +static PyMethodDef islice_methods[] = { + {"__reduce__", (PyCFunction)islice_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)islice_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(islice_doc, "islice(iterable, [start,] stop [, step]) --> islice object\n\ \n\ @@ -1284,7 +1570,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)islice_next, /* tp_iternext */ - 0, /* tp_methods */ + islice_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1379,6 +1665,19 @@ return result; } +static PyObject * +starmap_reduce(starmapobject *lz) +{ + /* Just pickle the iterator */ + return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it); +} + +static PyMethodDef starmap_methods[] = { + {"__reduce__", (PyCFunction)starmap_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(starmap_doc, "starmap(function, sequence) --> starmap object\n\ \n\ @@ -1415,7 +1714,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)starmap_next, /* tp_iternext */ - 0, /* tp_methods */ + starmap_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1534,6 +1833,41 @@ return chain_next(lz); /* recurse and use next active */ } +static PyObject * +chain_reduce(chainobject *lz) +{ + if (lz->source) { + /* we can't pickle function objects (itertools.from_iterable) so + * we must use setstate to replace the iterable. One day we + * will fix pickling of functions + */ + if (lz->active) { + return Py_BuildValue("O()(OO)", Py_TYPE(lz), lz->source, lz->active); + } else { + return Py_BuildValue("O()(O)", Py_TYPE(lz), lz->source); + } + } else { + return Py_BuildValue("O()", Py_TYPE(lz)); /* exhausted */ + } + return NULL; +} + +static PyObject * +chain_setstate(chainobject *lz, PyObject *state) +{ + PyObject *source, *active=NULL; + if (! PyArg_ParseTuple(state, "O|O", &source, &active)) + return NULL; + + Py_CLEAR(lz->source); + lz->source = source; + Py_INCREF(lz->source); + Py_CLEAR(lz->active); + lz->active = active; + Py_XINCREF(lz->active); + Py_RETURN_NONE; +} + PyDoc_STRVAR(chain_doc, "chain(*iterables) --> chain object\n\ \n\ @@ -1550,6 +1884,10 @@ static PyMethodDef chain_methods[] = { {"from_iterable", (PyCFunction) chain_new_from_iterable, METH_O | METH_CLASS, chain_from_iterable_doc}, + {"__reduce__", (PyCFunction)chain_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)chain_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -1790,6 +2128,83 @@ return NULL; } +static PyObject * +product_reduce(productobject *lz) +{ + if (lz->stopped) { + return Py_BuildValue("O(())", Py_TYPE(lz)); + } else if (lz->result == NULL) { + return Py_BuildValue("OO", Py_TYPE(lz), lz->pools); + } else { + PyObject *indices; + Py_ssize_t n, i; + + /* we must pickle the indices use them for setstate, and + * additionally indicate that the iterator has started + */ + n = PyTuple_GET_SIZE(lz->pools); + indices = PyTuple_New(n); + if (indices == NULL) + return NULL; + for (i=0; iindices[i]); + if (!index) { + Py_DECREF(indices); + return NULL; + } + PyTuple_SET_ITEM(indices, i, index); + } + return Py_BuildValue("OON", Py_TYPE(lz), lz->pools, indices); + } +} + +static PyObject * +product_setstate(productobject *lz, PyObject *state) +{ + PyObject *result; + Py_ssize_t n, i; + + n = PyTuple_GET_SIZE(lz->pools); + if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != n) { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + for (i=0; i n-1) + index = n-1; + lz->indices[i] = index; + } + + result = PyTuple_New(n); + if (!result) + return NULL; + for (i=0; ipools, i); + PyObject *element = PyTuple_GET_ITEM(pool, lz->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + Py_CLEAR(lz->result); + lz->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef product_methods[] = { + {"__reduce__", (PyCFunction)product_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)product_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(product_doc, "product(*iterables) --> product object\n\ \n\ @@ -1834,7 +2249,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)product_next, /* tp_iternext */ - 0, /* tp_methods */ + product_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2021,6 +2436,86 @@ return NULL; } +static PyObject * +combinations_reduce(combinationsobject *lz) +{ + if (lz->result == NULL) { + return Py_BuildValue("O(On)", Py_TYPE(lz), lz->pool, lz->r); + } else if (lz->stopped) { + return Py_BuildValue("O(()n)", Py_TYPE(lz), lz->r); + } else { + PyObject *indices; + Py_ssize_t i; + + /* we must pickle the indices and use them for setstate */ + indices = PyTuple_New(lz->r); + if (!indices) + return NULL; + for (i=0; ir; i++) + { + PyObject* index = PyLong_FromSsize_t(lz->indices[i]); + if (!index) { + Py_DECREF(indices); + return NULL; + } + PyTuple_SET_ITEM(indices, i, index); + } + + return Py_BuildValue("O(On)N", Py_TYPE(lz), lz->pool, lz->r, indices); + } +} + +static PyObject * +combinations_setstate(combinationsobject *lz, PyObject *state) +{ + PyObject *result; + Py_ssize_t i; + Py_ssize_t n = PyTuple_GET_SIZE(lz->pool); + + if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != lz->r) + { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + + for (i=0; ir; i++) + { + Py_ssize_t max; + PyObject* indexObject = PyTuple_GET_ITEM(state, i); + Py_ssize_t index = PyLong_AsSsize_t(indexObject); + if (index == -1 && PyErr_Occurred()) + return NULL; /* not an integer */ + max = i + n - lz->r; + /* clamp the index (beware of negative max) */ + if (index > max) + index = max; + if (index < 0) + index = 0; + lz->indices[i] = index; + } + + result = PyTuple_New(lz->r); + if (result == NULL) + return NULL; + for (i=0; ir; i++) { + PyObject *element = PyTuple_GET_ITEM(lz->pool, lz->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + + Py_CLEAR(lz->result); + lz->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef combinations_methods[] = { + {"__reduce__", (PyCFunction)combinations_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)combinations_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(combinations_doc, "combinations(iterable, r) --> combinations object\n\ \n\ @@ -2029,11 +2524,11 @@ static PyTypeObject combinations_type = { PyVarObject_HEAD_INIT(NULL, 0) - "itertools.combinations", /* tp_name */ + "itertools.combinations", /* tp_name */ sizeof(combinationsobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)combinations_dealloc, /* tp_dealloc */ + (destructor)combinations_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -2050,14 +2545,14 @@ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /* tp_flags */ - combinations_doc, /* tp_doc */ - (traverseproc)combinations_traverse, /* tp_traverse */ + combinations_doc, /* tp_doc */ + (traverseproc)combinations_traverse,/* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)combinations_next, /* tp_iternext */ - 0, /* tp_methods */ + (iternextfunc)combinations_next, /* tp_iternext */ + combinations_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2067,7 +2562,7 @@ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - combinations_new, /* tp_new */ + combinations_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ }; @@ -2266,6 +2761,82 @@ return NULL; } +static PyObject * +cwr_reduce(cwrobject *lz) +{ + if (lz->result == NULL) { + return Py_BuildValue("O(On)", Py_TYPE(lz), lz->pool, lz->r); + } else if (lz->stopped) { + return Py_BuildValue("O(()n)", Py_TYPE(lz), lz->r); + } else { + PyObject *indices; + Py_ssize_t i; + + /* we must pickle the indices and use them for setstate */ + indices = PyTuple_New(lz->r); + if (!indices) + return NULL; + for (i=0; ir; i++) + { + PyObject* index = PyLong_FromSsize_t(lz->indices[i]); + if (!index) { + Py_DECREF(indices); + return NULL; + } + PyTuple_SET_ITEM(indices, i, index); + } + + return Py_BuildValue("O(On)N", Py_TYPE(lz), lz->pool, lz->r, indices); + } +} + +static PyObject * +cwr_setstate(cwrobject *lz, PyObject *state) +{ + PyObject *result; + Py_ssize_t n, i; + + if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != lz->r) + { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + + n = PyTuple_GET_SIZE(lz->pool); + for (i=0; ir; i++) + { + PyObject* indexObject = PyTuple_GET_ITEM(state, i); + Py_ssize_t index = PyLong_AsSsize_t(indexObject); + if (index < 0 && PyErr_Occurred()) + return NULL; /* not an integer */ + /* clamp the index */ + if (index < 0) + index = 0; + else if (index > n-1) + index = n-1; + lz->indices[i] = index; + } + result = PyTuple_New(lz->r); + if (result == NULL) + return NULL; + for (i=0; ir; i++) { + PyObject *element = PyTuple_GET_ITEM(lz->pool, lz->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + Py_CLEAR(lz->result); + lz->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef cwr_methods[] = { + {"__reduce__", (PyCFunction)cwr_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)cwr_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(cwr_doc, "combinations_with_replacement(iterable, r) --> combinations_with_replacement object\n\ \n\ @@ -2275,11 +2846,11 @@ static PyTypeObject cwr_type = { PyVarObject_HEAD_INIT(NULL, 0) - "itertools.combinations_with_replacement", /* tp_name */ - sizeof(cwrobject), /* tp_basicsize */ + "itertools.combinations_with_replacement", /* tp_name */ + sizeof(cwrobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)cwr_dealloc, /* tp_dealloc */ + (destructor)cwr_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -2291,19 +2862,19 @@ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericGetAttr, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | - Py_TPFLAGS_BASETYPE, /* tp_flags */ - cwr_doc, /* tp_doc */ - (traverseproc)cwr_traverse, /* tp_traverse */ + Py_TPFLAGS_BASETYPE, /* tp_flags */ + cwr_doc, /* tp_doc */ + (traverseproc)cwr_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc)cwr_next, /* tp_iternext */ - 0, /* tp_methods */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)cwr_next, /* tp_iternext */ + cwr_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2313,8 +2884,8 @@ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ - cwr_new, /* tp_new */ - PyObject_GC_Del, /* tp_free */ + cwr_new, /* tp_new */ + PyObject_GC_Del, /* tp_free */ }; @@ -2538,6 +3109,115 @@ return NULL; } +static PyObject * +permutations_reduce(permutationsobject *po) +{ + if (po->result == NULL) { + return Py_BuildValue("O(On)", Py_TYPE(po), po->pool, po->r); + } else if (po->stopped) { + return Py_BuildValue("O(()n)", Py_TYPE(po), po->r); + } else { + PyObject *indices=NULL, *cycles=NULL; + Py_ssize_t n, i; + + /* we must pickle the indices and cycles and use them for setstate */ + n = PyTuple_GET_SIZE(po->pool); + indices = PyTuple_New(n); + if (indices == NULL) + goto err; + for (i=0; iindices[i]); + if (!index) + goto err; + PyTuple_SET_ITEM(indices, i, index); + } + + cycles = PyTuple_New(po->r); + if (cycles == NULL) + goto err; + for (i=0; ir; i++) + { + PyObject* index = PyLong_FromSsize_t(po->cycles[i]); + if (!index) + goto err; + PyTuple_SET_ITEM(cycles, i, index); + } + return Py_BuildValue("O(On)(NN)", Py_TYPE(po), + po->pool, po->r, + indices, cycles); + err: + Py_XDECREF(indices); + Py_XDECREF(cycles); + return NULL; + } +} + +static PyObject * +permutations_setstate(permutationsobject *po, PyObject *state) +{ + PyObject *indices, *cycles, *result; + Py_ssize_t n, i; + + if (!PyArg_ParseTuple(state, "O!O!", + &PyTuple_Type, &indices, + &PyTuple_Type, &cycles)) + return NULL; + + n = PyTuple_GET_SIZE(po->pool); + if (PyTuple_GET_SIZE(indices) != n || + PyTuple_GET_SIZE(cycles) != po->r) + { + PyErr_SetString(PyExc_ValueError, "invalid arguments"); + return NULL; + } + + for (i=0; i n-1) + index = n-1; + po->indices[i] = index; + } + + for (i=0; ir; i++) + { + PyObject* indexObject = PyTuple_GET_ITEM(cycles, i); + Py_ssize_t index = PyLong_AsSsize_t(indexObject); + if (index < 0 && PyErr_Occurred()) + return NULL; /* not an integer */ + if (index < 1) + index = 1; + else if (index > n-i) + index = n-i; + po->cycles[i] = index; + } + result = PyTuple_New(po->r); + if (result == NULL) + return NULL; + for (i=0; ir; i++) { + PyObject *element = PyTuple_GET_ITEM(po->pool, po->indices[i]); + Py_INCREF(element); + PyTuple_SET_ITEM(result, i, element); + } + Py_CLEAR(po->result); + po->result = result; + Py_RETURN_NONE; +} + +static PyMethodDef permuations_methods[] = { + {"__reduce__", (PyCFunction)permutations_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)permutations_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(permutations_doc, "permutations(iterable[, r]) --> permutations object\n\ \n\ @@ -2574,7 +3254,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)permutations_next, /* tp_iternext */ - 0, /* tp_methods */ + permuations_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2605,7 +3285,7 @@ static char *kwargs[] = {"iterable", "func", NULL}; PyObject *iterable; PyObject *it; - PyObject *binop = NULL; + PyObject *binop = Py_None; accumulateobject *lz; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:accumulate", @@ -2624,8 +3304,10 @@ return NULL; } - Py_XINCREF(binop); - lz->binop = binop; + if (binop != Py_None) { + Py_XINCREF(binop); + lz->binop = binop; + } lz->total = NULL; lz->it = it; return (PyObject *)lz; @@ -2681,6 +3363,31 @@ return newtotal; } +static PyObject * +accumulate_reduce(accumulateobject *lz) +{ + return Py_BuildValue("O(OO)O", Py_TYPE(lz), + lz->it, lz->binop?lz->binop:Py_None, + lz->total?lz->total:Py_None); + } + +static PyObject * +accumulate_setstate(accumulateobject *lz, PyObject *state) +{ + Py_CLEAR(lz->total); + lz->total = state; + Py_INCREF(lz->total); + Py_RETURN_NONE; +} + +static PyMethodDef accumulate_methods[] = { + {"__reduce__", (PyCFunction)accumulate_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)accumulate_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(accumulate_doc, "accumulate(iterable[, func]) --> accumulate object\n\ \n\ @@ -2716,7 +3423,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)accumulate_next, /* tp_iternext */ - 0, /* tp_methods */ + accumulate_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2833,6 +3540,19 @@ } } +static PyObject * +compress_reduce(compressobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->data, lz->selectors); + } + +static PyMethodDef compress_methods[] = { + {"__reduce__", (PyCFunction)compress_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(compress_doc, "compress(data, selectors) --> iterator over selected data\n\ \n\ @@ -2870,7 +3590,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)compress_next, /* tp_iternext */ - 0, /* tp_methods */ + compress_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2977,6 +3697,19 @@ } } +static PyObject * +filterfalse_reduce(filterfalseobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), + lz->func, lz->it); + } + +static PyMethodDef filterfalse_methods[] = { + {"__reduce__", (PyCFunction)filterfalse_reduce, METH_NOARGS, + reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(filterfalse_doc, "filterfalse(function or None, sequence) --> filterfalse object\n\ \n\ @@ -3013,7 +3746,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)filterfalse_next, /* tp_iternext */ - 0, /* tp_methods */ + filterfalse_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -3207,11 +3940,9 @@ return Py_BuildValue("O(n)", Py_TYPE(lz), lz->cnt); } -PyDoc_STRVAR(count_reduce_doc, "Return state information for pickling."); - static PyMethodDef count_methods[] = { {"__reduce__", (PyCFunction)count_reduce, METH_NOARGS, - count_reduce_doc}, + reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -3352,8 +4083,21 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +repeat_reduce(repeatobject *ro) +{ + /* unpickle this so that a new repeat iterator is constructed with an + * object, then call __setstate__ on it to set cnt + */ + if (ro->cnt >= 0) + return Py_BuildValue("O(On)", Py_TYPE(ro), ro->element, ro->cnt); + else + return Py_BuildValue("O(O)", Py_TYPE(ro), ro->element); +} + static PyMethodDef repeat_methods[] = { {"__length_hint__", (PyCFunction)repeat_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)repeat_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -3579,6 +4323,49 @@ return result; } +static PyObject * +zip_longest_reduce(ziplongestobject *lz) +{ + + /* Create a new tuple with empty sequences where appropriate to pickle. + * Then use setstate to set the fillvalue + */ + int i; + PyObject *args = PyTuple_New(PyTuple_GET_SIZE(lz->ittuple)); + if (args == NULL) + return NULL; + for (i=0; iittuple); i++) { + PyObject *elem = PyTuple_GET_ITEM(lz->ittuple, i); + if (elem == NULL) { + elem = PyTuple_New(0); + if (elem == NULL) { + Py_DECREF(args); + return NULL; + } + } else + Py_INCREF(elem); + PyTuple_SET_ITEM(args, i, elem); + } + return Py_BuildValue("ONO", Py_TYPE(lz), args, lz->fillvalue); +} + +static PyObject * +zip_longest_setstate(ziplongestobject *lz, PyObject *state) +{ + Py_CLEAR(lz->fillvalue); + lz->fillvalue = state; + Py_INCREF(lz->fillvalue); + Py_RETURN_NONE; +} + +static PyMethodDef zip_longest_methods[] = { + {"__reduce__", (PyCFunction)zip_longest_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)zip_longest_setstate, METH_O, + setstate_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(zip_longest_doc, "zip_longest(iter1 [,iter2 [...]], [fillvalue=None]) --> zip_longest object\n\ \n\ @@ -3620,7 +4407,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)zip_longest_next, /* tp_iternext */ - 0, /* tp_methods */ + zip_longest_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -3708,6 +4495,9 @@ &product_type, &repeat_type, &groupby_type, + &_grouper_type, + &tee_type, + &teedataobject_type, NULL }; @@ -3725,11 +4515,5 @@ PyModule_AddObject(m, name+1, (PyObject *)typelist[i]); } - if (PyType_Ready(&teedataobject_type) < 0) - return NULL; - if (PyType_Ready(&tee_type) < 0) - return NULL; - if (PyType_Ready(&_grouper_type) < 0) - return NULL; return m; } diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -2999,7 +2999,7 @@ } static PyObject * -bytesarrayiter_length_hint(bytesiterobject *it) +bytearrayiter_length_hint(bytesiterobject *it) { Py_ssize_t len = 0; if (it->it_seq) @@ -3010,9 +3010,41 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +bytearrayiter_reduce(bytesiterobject *it) +{ + if (it->it_seq != NULL) { + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + PyObject *u = PyUnicode_FromUnicode(NULL, 0); + if (u == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + } +} + +static PyObject * +bytearrayiter_setstate(bytesiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->it_index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef bytearrayiter_methods[] = { - {"__length_hint__", (PyCFunction)bytesarrayiter_length_hint, METH_NOARGS, + {"__length_hint__", (PyCFunction)bytearrayiter_length_hint, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)bytearrayiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)bytearrayiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3074,9 +3074,43 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +striter_reduce(striterobject *it) +{ + if (it->it_seq != NULL) { + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + PyObject *u = PyUnicode_FromUnicode(NULL, 0); + if (u == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + } +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +striter_setstate(striterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->it_index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef striter_methods[] = { {"__length_hint__", (PyCFunction)striter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)striter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)striter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2323,9 +2323,16 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +dictiter_reduce(dictiterobject *di); + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + static PyMethodDef dictiter_methods[] = { {"__length_hint__", (PyCFunction)dictiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)dictiter_reduce, METH_NOARGS, + reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -2560,6 +2567,52 @@ }; +static PyObject * +dictiter_reduce(dictiterobject *di) +{ + PyObject *list; + dictiterobject tmp; + + list = PyList_New(0); + if (!list) + return NULL; + + /* copy the itertor state */ + tmp = *di; + Py_XINCREF(tmp.di_dict); + + /* iterate the temporary into a list */ + for(;;) { + PyObject *element = 0; + if (Py_TYPE(di) == &PyDictIterItem_Type) + element = dictiter_iternextitem(&tmp); + else if (Py_TYPE(di) == &PyDictIterKey_Type) + element = dictiter_iternextkey(&tmp); + else if (Py_TYPE(di) == &PyDictIterValue_Type) + element = dictiter_iternextvalue(&tmp); + else + assert(0); + if (element) { + if (PyList_Append(list, element)) { + Py_DECREF(element); + Py_DECREF(list); + Py_XDECREF(tmp.di_dict); + return NULL; + } + Py_DECREF(element); + } else + break; + } + Py_XDECREF(tmp.di_dict); + /* check for error */ + if (tmp.di_dict != NULL) { + /* we have an error */ + Py_DECREF(list); + return NULL; + } + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); +} + /***********************************************/ /* View objects for keys(), items(), values(). */ /***********************************************/ diff --git a/Objects/enumobject.c b/Objects/enumobject.c --- a/Objects/enumobject.c +++ b/Objects/enumobject.c @@ -158,6 +158,22 @@ return result; } +static PyObject * +enum_reduce(enumobject *en) +{ + if (en->en_longindex != NULL) + return Py_BuildValue("O(OO)", Py_TYPE(en), en->en_sit, en->en_longindex); + else + return Py_BuildValue("O(On)", Py_TYPE(en), en->en_sit, en->en_index); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyMethodDef enum_methods[] = { + {"__reduce__", (PyCFunction)enum_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(enum_doc, "enumerate(iterable[, start]) -> iterator for index, value of iterable\n" "\n" @@ -197,7 +213,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)enum_next, /* tp_iternext */ - 0, /* tp_methods */ + enum_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -319,8 +335,40 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +reversed_reduce(reversedobject *ro) +{ + if (ro->seq) + return Py_BuildValue("O(O)n", Py_TYPE(ro), ro->seq, ro->index); + else + return Py_BuildValue("O(())", Py_TYPE(ro)); +} + +static PyObject * +reversed_setstate(reversedobject *ro, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (ro->seq != 0) { + Py_ssize_t n = PySequence_Size(ro->seq); + if (n < 0) + return NULL; + if (index < -1) + index = -1; + else if (index > n-1) + index = n-1; + ro->index = index; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef reversediter_methods[] = { {"__length_hint__", (PyCFunction)reversed_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)reversed_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)reversed_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/iterobject.c b/Objects/iterobject.c --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -2,6 +2,19 @@ #include "Python.h" +/* Convenience function to get builtins.iter or builtins.reversed */ +PyObject * +_PyIter_GetBuiltin(const char *iter) +{ + PyObject *mod, *attr; + mod = PyImport_ImportModule("builtins"); + if (mod == NULL) + return NULL; + attr = PyObject_GetAttrString(mod, iter); + Py_DECREF(mod); + return attr; +} + typedef struct { PyObject_HEAD long it_index; @@ -88,8 +101,38 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +iter_reduce(seqiterobject *it) +{ + if (it->it_seq != NULL) + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + else + return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +iter_setstate(seqiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < 0) + index = 0; + it->it_index = index; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef seqiter_methods[] = { {"__length_hint__", (PyCFunction)iter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)iter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)iter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -195,6 +238,21 @@ return NULL; } +static PyObject * +calliter_reduce(calliterobject *it) +{ + if (it->it_callable != NULL && it->it_sentinel != NULL) + return Py_BuildValue("N(OO)", _PyIter_GetBuiltin("iter"), + it->it_callable, it->it_sentinel); + else + return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); +} + +static PyMethodDef calliter_methods[] = { + {"__reduce__", (PyCFunction)calliter_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyTypeObject PyCallIter_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "callable_iterator", /* tp_name */ @@ -224,7 +282,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)calliter_iternext, /* tp_iternext */ - 0, /* tp_methods */ + calliter_methods, /* tp_methods */ }; diff --git a/Objects/listobject.c b/Objects/listobject.c --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -2660,11 +2660,18 @@ static int listiter_traverse(listiterobject *, visitproc, void *); static PyObject *listiter_next(listiterobject *); static PyObject *listiter_len(listiterobject *); +static PyObject *listiter_reduce_general(void *_it, int forward); +static PyObject *listiter_reduce(listiterobject *); +static PyObject *listiter_setstate(listiterobject *, PyObject *state); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); static PyMethodDef listiter_methods[] = { {"__length_hint__", (PyCFunction)listiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)listiter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)listiter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -2771,6 +2778,27 @@ } return PyLong_FromLong(0); } + +static PyObject * +listiter_reduce(listiterobject *it) +{ + return listiter_reduce_general(it, 1); +} + +static PyObject * +listiter_setstate(listiterobject *it, PyObject *state) +{ + long index = PyLong_AsLong(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < 0) + index = 0; + it->it_index = index; + } + Py_RETURN_NONE; +} + /*********************** List Reverse Iterator **************************/ typedef struct { @@ -2784,9 +2812,13 @@ static int listreviter_traverse(listreviterobject *, visitproc, void *); static PyObject *listreviter_next(listreviterobject *); static PyObject *listreviter_len(listreviterobject *); +static PyObject *listreviter_reduce(listreviterobject *); +static PyObject *listreviter_setstate(listreviterobject *, PyObject *); static PyMethodDef listreviter_methods[] = { {"__length_hint__", (PyCFunction)listreviter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)listreviter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)listreviter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -2883,3 +2915,51 @@ len = 0; return PyLong_FromSsize_t(len); } + +static PyObject * +listreviter_reduce(listreviterobject *it) +{ + return listiter_reduce_general(it, 0); +} + +static PyObject * +listreviter_setstate(listreviterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < -1) + index = -1; + else if (index > PyList_GET_SIZE(it->it_seq) - 1) + index = PyList_GET_SIZE(it->it_seq) - 1; + it->it_index = index; + } + Py_RETURN_NONE; +} + +/* common pickling support */ + +static PyObject * +listiter_reduce_general(void *_it, int forward) +{ + PyObject *list; + + /* the objects are not the same, index is of different types! */ + if (forward) { + listiterobject *it = (listiterobject *)_it; + if (it->it_seq) + return Py_BuildValue("N(O)l", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + listreviterobject *it = (listreviterobject *)_it; + if (it->it_seq) + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("reversed"), + it->it_seq, it->it_index); + } + /* empty iterator, create an empty list */ + list = PyList_New(0); + if (list == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); +} diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -964,9 +964,59 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +rangeiter_reduce(rangeiterobject *r) +{ + PyObject *start=NULL, *stop=NULL, *step=NULL; + PyObject *range; + + /* create a range object for pickling */ + start = PyLong_FromLong(r->start); + if (start == NULL) + goto err; + stop = PyLong_FromLong(r->start + r->len * r->step); + if (stop == NULL) + goto err; + step = PyLong_FromLong(r->step); + if (step == NULL) + goto err; + range = (PyObject*)make_range_object(&PyRange_Type, + start, stop, step); + if (range == NULL) + goto err; + /* return the result */ + return Py_BuildValue("N(N)i", _PyIter_GetBuiltin("iter"), range, r->index); +err: + Py_XDECREF(start); + Py_XDECREF(stop); + Py_XDECREF(step); + return NULL; +} + +static PyObject * +rangeiter_setstate(rangeiterobject *r, PyObject *state) +{ + long index = PyLong_AsLong(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0 || index >= r->len) { + PyErr_SetString(PyExc_ValueError, "index out of range"); + return NULL; + } + r->index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef rangeiter_methods[] = { {"__length_hint__", (PyCFunction)rangeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)rangeiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)rangeiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -1095,9 +1145,51 @@ return PyNumber_Subtract(r->len, r->index); } +static PyObject * +longrangeiter_reduce(longrangeiterobject *r) +{ + PyObject *product, *stop=NULL; + PyObject *range; + + /* create a range object for pickling. Must calculate the "stop" value */ + product = PyNumber_Multiply(r->len, r->step); + if (product == NULL) + return NULL; + stop = PyNumber_Add(r->start, product); + Py_DECREF(product); + if (stop == NULL) + return NULL; + Py_INCREF(r->start); + Py_INCREF(r->step); + range = (PyObject*)make_range_object(&PyRange_Type, + r->start, stop, r->step); + if (range == NULL) { + Py_DECREF(r->start); + Py_DECREF(stop); + Py_DECREF(r->step); + return NULL; + } + + /* return the result */ + return Py_BuildValue("N(N)O", _PyIter_GetBuiltin("iter"), range, r->index); +} + +static PyObject * +longrangeiter_setstate(longrangeiterobject *r, PyObject *state) +{ + Py_CLEAR(r->index); + r->index = state; + Py_INCREF(r->index); + Py_RETURN_NONE; +} + static PyMethodDef longrangeiter_methods[] = { {"__length_hint__", (PyCFunction)longrangeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)longrangeiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)longrangeiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -819,8 +819,51 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject *setiter_iternext(setiterobject *si); + +static PyObject * +setiter_reduce(setiterobject *si) +{ + PyObject *list; + setiterobject tmp; + + list = PyList_New(0); + if (!list) + return NULL; + + /* copy the itertor state */ + tmp = *si; + Py_XINCREF(tmp.si_set); + + /* iterate the temporary into a list */ + for(;;) { + PyObject *element = setiter_iternext(&tmp); + if (element) { + if (PyList_Append(list, element)) { + Py_DECREF(element); + Py_DECREF(list); + Py_XDECREF(tmp.si_set); + return NULL; + } + Py_DECREF(element); + } else + break; + } + Py_XDECREF(tmp.si_set); + /* check for error */ + if (tmp.si_set != NULL) { + /* we have an error */ + Py_DECREF(list); + return NULL; + } + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), list); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + static PyMethodDef setiter_methods[] = { {"__length_hint__", (PyCFunction)setiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)setiter_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -1964,8 +2007,6 @@ return result; } -PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); - static PyObject * set_sizeof(PySetObject *so) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -967,8 +967,39 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +tupleiter_reduce(tupleiterobject *it) +{ + if (it->it_seq) + return Py_BuildValue("N(O)l", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + else + return Py_BuildValue("N(())", _PyIter_GetBuiltin("iter")); +} + +static PyObject * +tupleiter_setstate(tupleiterobject *it, PyObject *state) +{ + long index = PyLong_AsLong(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (it->it_seq != NULL) { + if (index < 0) + index = 0; + else if (it->it_seq != NULL && index > PyTuple_GET_SIZE(it->it_seq)) + index = PyTuple_GET_SIZE(it->it_seq); + it->it_index = index; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef tupleiter_methods[] = { {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14382,9 +14382,43 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); +static PyObject * +unicodeiter_reduce(unicodeiterobject *it) +{ + if (it->it_seq != NULL) { + return Py_BuildValue("N(O)n", _PyIter_GetBuiltin("iter"), + it->it_seq, it->it_index); + } else { + PyObject *u = PyUnicode_FromUnicode(NULL, 0); + if (u == NULL) + return NULL; + return Py_BuildValue("N(N)", _PyIter_GetBuiltin("iter"), u); + } +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyObject * +unicodeiter_setstate(unicodeiterobject *it, PyObject *state) +{ + Py_ssize_t index = PyLong_AsSsize_t(state); + if (index == -1 && PyErr_Occurred()) + return NULL; + if (index < 0) + index = 0; + it->it_index = index; + Py_RETURN_NONE; +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + static PyMethodDef unicodeiter_methods[] = { {"__length_hint__", (PyCFunction)unicodeiter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", (PyCFunction)unicodeiter_reduce, METH_NOARGS, + reduce_doc}, + {"__setstate__", (PyCFunction)unicodeiter_setstate, METH_O, + setstate_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -438,6 +438,19 @@ } } +static PyObject * +filter_reduce(filterobject *lz) +{ + return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it); +} + +PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); + +static PyMethodDef filter_methods[] = { + {"__reduce__", (PyCFunction)filter_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(filter_doc, "filter(function or None, iterable) --> filter object\n\ \n\ @@ -474,7 +487,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)filter_next, /* tp_iternext */ - 0, /* tp_methods */ + filter_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -1054,6 +1067,31 @@ return result; } +static PyObject * +map_reduce(mapobject *lz) +{ + Py_ssize_t numargs = PyTuple_GET_SIZE(lz->iters); + PyObject *args = PyTuple_New(numargs+1); + Py_ssize_t i; + if (args == NULL) + return NULL; + Py_INCREF(lz->func); + PyTuple_SET_ITEM(args, 0, lz->func); + for (i = 0; iiters, i); + Py_INCREF(it); + PyTuple_SET_ITEM(args, i+1, it); + } + + return Py_BuildValue("ON", Py_TYPE(lz), args); +} + +static PyMethodDef map_methods[] = { + {"__reduce__", (PyCFunction)map_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + + PyDoc_STRVAR(map_doc, "map(func, *iterables) --> map object\n\ \n\ @@ -1090,7 +1128,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)map_next, /* tp_iternext */ - 0, /* tp_methods */ + map_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ @@ -2238,6 +2276,18 @@ return result; } +static PyObject * +zip_reduce(zipobject *lz) +{ + /* Just recreate the zip with the internal iterator tuple */ + return Py_BuildValue("OO", Py_TYPE(lz), lz->ittuple); +} + +static PyMethodDef zip_methods[] = { + {"__reduce__", (PyCFunction)zip_reduce, METH_NOARGS, reduce_doc}, + {NULL, NULL} /* sentinel */ +}; + PyDoc_STRVAR(zip_doc, "zip(iter1 [,iter2 [...]]) --> zip object\n\ \n\ @@ -2276,7 +2326,7 @@ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ (iternextfunc)zip_next, /* tp_iternext */ - 0, /* tp_methods */ + zip_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */