Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code | Sign in
(34065)

Side by Side Diff: Lib/test/test_hashlib.py

Issue 16113: Add SHA-3 (Keccak) support
Patch Set: Created 3 years, 1 month ago
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments. Please Sign in to add in-line comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « Lib/hashlib.py ('k') | Modules/hashlib.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Test hashlib module 1 # Test hashlib module
2 # 2 #
3 # $Id$ 3 # $Id$
4 # 4 #
5 # Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org) 5 # Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org)
6 # Licensed to PSF under a Contributor Agreement. 6 # Licensed to PSF under a Contributor Agreement.
7 # 7 #
8 8
9 import array 9 import array
10 import hashlib 10 import hashlib
11 import itertools 11 import itertools
12 import os 12 import os
13 import sys 13 import sys
14 try: 14 try:
15 import threading 15 import threading
16 except ImportError: 16 except ImportError:
17 threading = None 17 threading = None
18 import unittest 18 import unittest
19 import warnings 19 import warnings
20 from test import support 20 from test import support
21 from test.support import _4G, bigmemtest, import_fresh_module 21 from test.support import _4G, bigmemtest, import_fresh_module
22 22
23 # Were we compiled --with-pydebug or with #define Py_DEBUG? 23 # Were we compiled --with-pydebug or with #define Py_DEBUG?
24 COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') 24 COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
25 25
26 c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) 26 c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
27 py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) 27 py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
28
29 try:
30 import _sha3
31 except ImportError:
32 _sha3 = None
33
34 requires_sha3 = unittest.skipUnless(_sha3, 'requires _sha3')
35
28 36
29 def hexstr(s): 37 def hexstr(s):
30 assert isinstance(s, bytes), repr(s) 38 assert isinstance(s, bytes), repr(s)
31 h = "0123456789abcdef" 39 h = "0123456789abcdef"
32 r = '' 40 r = ''
33 for i in s: 41 for i in s:
34 r += h[(i >> 4) & 0xF] + h[i & 0xF] 42 r += h[(i >> 4) & 0xF] + h[i & 0xF]
35 return r 43 return r
36 44
37 45
46 URL = "https://raw.githubusercontent.com/tiran/python_vectors/master/{}.txt"
47
48 def read_vectors(hash_name):
49 with support.open_urlresource(URL.format(hash_name)) as f:
50 for line in f:
51 line = line.strip()
52 if line.startswith('#') or not line:
53 continue
54 msg, md = line.split(',')
55 yield bytes.fromhex(msg), md
56
57
38 class HashLibTestCase(unittest.TestCase): 58 class HashLibTestCase(unittest.TestCase):
39 supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', 59 supported_hash_names = {'md5', 'MD5', 'sha1', 'SHA1',
40 'sha224', 'SHA224', 'sha256', 'SHA256', 60 'sha224', 'SHA224', 'sha256', 'SHA256',
41 'sha384', 'SHA384', 'sha512', 'SHA512') 61 'sha384', 'SHA384', 'sha512', 'SHA512',
62 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
63 'shake_128', 'shake_256'}
64
65 shakes = {'shake_128', 'shake_256'}
42 66
43 # Issue #14693: fallback modules are always compiled under POSIX 67 # Issue #14693: fallback modules are always compiled under POSIX
44 _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG 68 _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG
45 69
46 def _conditional_import_module(self, module_name): 70 def _conditional_import_module(self, module_name):
47 """Import a module and return a reference to it or None on failure.""" 71 """Import a module and return a reference to it or None on failure."""
48 try: 72 try:
49 exec('import '+module_name) 73 exec('import '+module_name)
50 except ImportError as error: 74 except ImportError as error:
51 if self._warn_on_extension_import: 75 if self._warn_on_extension_import:
52 warnings.warn('Did a C extension fail to compile? %s' % error) 76 warnings.warn('Did a C extension fail to compile? %s' % error)
53 return locals().get(module_name) 77 return locals().get(module_name)
54 78
55 def __init__(self, *args, **kwargs): 79 def __init__(self, *args, **kwargs):
56 algorithms = set() 80 algorithms = set()
57 for algorithm in self.supported_hash_names: 81 for algorithm in self.supported_hash_names:
58 algorithms.add(algorithm.lower()) 82 algorithms.add(algorithm.lower())
83
59 self.constructors_to_test = {} 84 self.constructors_to_test = {}
60 for algorithm in algorithms: 85 for algorithm in algorithms:
61 self.constructors_to_test[algorithm] = set() 86 self.constructors_to_test[algorithm] = set()
62 87
63 # For each algorithm, test the direct constructor and the use 88 # For each algorithm, test the direct constructor and the use
64 # of hashlib.new given the algorithm name. 89 # of hashlib.new given the algorithm name.
65 for algorithm, constructors in self.constructors_to_test.items(): 90 for algorithm, constructors in self.constructors_to_test.items():
66 constructors.add(getattr(hashlib, algorithm)) 91 constructors.add(getattr(hashlib, algorithm))
67 def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): 92 def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
68 if data is None: 93 if data is None:
(...skipping 23 matching lines...) Expand all
92 if _sha1: 117 if _sha1:
93 add_builtin_constructor('sha1') 118 add_builtin_constructor('sha1')
94 _sha256 = self._conditional_import_module('_sha256') 119 _sha256 = self._conditional_import_module('_sha256')
95 if _sha256: 120 if _sha256:
96 add_builtin_constructor('sha224') 121 add_builtin_constructor('sha224')
97 add_builtin_constructor('sha256') 122 add_builtin_constructor('sha256')
98 _sha512 = self._conditional_import_module('_sha512') 123 _sha512 = self._conditional_import_module('_sha512')
99 if _sha512: 124 if _sha512:
100 add_builtin_constructor('sha384') 125 add_builtin_constructor('sha384')
101 add_builtin_constructor('sha512') 126 add_builtin_constructor('sha512')
127
128 _sha3 = self._conditional_import_module('_sha3')
129 if _sha3:
130 add_builtin_constructor('sha3_224')
131 add_builtin_constructor('sha3_256')
132 add_builtin_constructor('sha3_384')
133 add_builtin_constructor('sha3_512')
134 add_builtin_constructor('shake_128')
135 add_builtin_constructor('shake_256')
102 136
103 super(HashLibTestCase, self).__init__(*args, **kwargs) 137 super(HashLibTestCase, self).__init__(*args, **kwargs)
104 138
105 @property 139 @property
106 def hash_constructors(self): 140 def hash_constructors(self):
107 constructors = self.constructors_to_test.values() 141 constructors = self.constructors_to_test.values()
108 return itertools.chain.from_iterable(constructors) 142 return itertools.chain.from_iterable(constructors)
109 143
110 def test_hash_array(self): 144 def test_hash_array(self):
111 a = array.array("b", range(10)) 145 a = array.array("b", range(10))
112 for cons in self.hash_constructors: 146 for cons in self.hash_constructors:
113 c = cons(a) 147 c = cons(a)
114 c.hexdigest() 148 if c.name in self.shakes:
149 c.hexdigest(16)
150 else:
151 c.hexdigest()
115 152
116 def test_algorithms_guaranteed(self): 153 def test_algorithms_guaranteed(self):
117 self.assertEqual(hashlib.algorithms_guaranteed, 154 self.assertEqual(hashlib.algorithms_guaranteed,
118 set(_algo for _algo in self.supported_hash_names 155 set(_algo for _algo in self.supported_hash_names
119 if _algo.islower())) 156 if _algo.islower()))
120 157
121 def test_algorithms_available(self): 158 def test_algorithms_available(self):
122 self.assertTrue(set(hashlib.algorithms_guaranteed). 159 self.assertTrue(set(hashlib.algorithms_guaranteed).
123 issubset(hashlib.algorithms_available)) 160 issubset(hashlib.algorithms_available))
124 161
(...skipping 23 matching lines...) Expand all
148 else: 185 else:
149 del sys.modules['_md5'] 186 del sys.modules['_md5']
150 self.assertRaises(TypeError, get_builtin_constructor, 3) 187 self.assertRaises(TypeError, get_builtin_constructor, 3)
151 constructor = get_builtin_constructor('md5') 188 constructor = get_builtin_constructor('md5')
152 self.assertIs(constructor, _md5.md5) 189 self.assertIs(constructor, _md5.md5)
153 self.assertEqual(sorted(builtin_constructor_cache), ['MD5', 'md5']) 190 self.assertEqual(sorted(builtin_constructor_cache), ['MD5', 'md5'])
154 191
155 def test_hexdigest(self): 192 def test_hexdigest(self):
156 for cons in self.hash_constructors: 193 for cons in self.hash_constructors:
157 h = cons() 194 h = cons()
158 self.assertIsInstance(h.digest(), bytes) 195 if h.name in self.shakes:
159 self.assertEqual(hexstr(h.digest()), h.hexdigest()) 196 self.assertIsInstance(h.digest(16), bytes)
197 self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16))
198 else:
199 self.assertIsInstance(h.digest(), bytes)
200 self.assertEqual(hexstr(h.digest()), h.hexdigest())
160 201
161 def test_name_attribute(self): 202 def test_name_attribute(self):
162 for cons in self.hash_constructors: 203 for cons in self.hash_constructors:
163 h = cons() 204 h = cons()
164 self.assertIsInstance(h.name, str) 205 self.assertIsInstance(h.name, str)
165 self.assertIn(h.name, self.supported_hash_names) 206 if h.name in self.supported_hash_names:
207 self.assertIn(h.name, self.supported_hash_names)
208 else:
209 self.assertNotIn(h.name, self.supported_hash_names)
166 self.assertEqual(h.name, hashlib.new(h.name).name) 210 self.assertEqual(h.name, hashlib.new(h.name).name)
167 211
168 def test_large_update(self): 212 def test_large_update(self):
169 aas = b'a' * 128 213 aas = b'a' * 128
170 bees = b'b' * 127 214 bees = b'b' * 127
171 cees = b'c' * 126 215 cees = b'c' * 126
172 dees = b'd' * 2048 # HASHLIB_GIL_MINSIZE 216 dees = b'd' * 2048 # HASHLIB_GIL_MINSIZE
173 217
174 for cons in self.hash_constructors: 218 for cons in self.hash_constructors:
175 m1 = cons() 219 m1 = cons()
176 m1.update(aas) 220 m1.update(aas)
177 m1.update(bees) 221 m1.update(bees)
178 m1.update(cees) 222 m1.update(cees)
179 m1.update(dees) 223 m1.update(dees)
224 if m1.name in self.shakes:
225 args = (16,)
226 else:
227 args = ()
180 228
181 m2 = cons() 229 m2 = cons()
182 m2.update(aas + bees + cees + dees) 230 m2.update(aas + bees + cees + dees)
183 self.assertEqual(m1.digest(), m2.digest()) 231 self.assertEqual(m1.digest(*args), m2.digest(*args))
184 232
185 m3 = cons(aas + bees + cees + dees) 233 m3 = cons(aas + bees + cees + dees)
186 self.assertEqual(m1.digest(), m3.digest()) 234 self.assertEqual(m1.digest(*args), m3.digest(*args))
187 235
188 # verify copy() doesn't touch original 236 # verify copy() doesn't touch original
189 m4 = cons(aas + bees + cees) 237 m4 = cons(aas + bees + cees)
190 m4_digest = m4.digest() 238 m4_digest = m4.digest(*args)
191 m4_copy = m4.copy() 239 m4_copy = m4.copy()
192 m4_copy.update(dees) 240 m4_copy.update(dees)
193 self.assertEqual(m1.digest(), m4_copy.digest()) 241 self.assertEqual(m1.digest(*args), m4_copy.digest(*args))
194 self.assertEqual(m4.digest(), m4_digest) 242 self.assertEqual(m4.digest(*args), m4_digest)
195 243
196 def check(self, name, data, hexdigest): 244 def check(self, name, data, hexdigest, shake=False):
245 length = len(hexdigest)//2
197 hexdigest = hexdigest.lower() 246 hexdigest = hexdigest.lower()
198 constructors = self.constructors_to_test[name] 247 constructors = self.constructors_to_test[name]
199 # 2 is for hashlib.name(...) and hashlib.new(name, ...) 248 # 2 is for hashlib.name(...) and hashlib.new(name, ...)
200 self.assertGreaterEqual(len(constructors), 2) 249 self.assertGreaterEqual(len(constructors), 2)
201 for hash_object_constructor in constructors: 250 for hash_object_constructor in constructors:
202 m = hash_object_constructor(data) 251 m = hash_object_constructor(data)
203 computed = m.hexdigest() 252 computed = m.hexdigest() if not shake else m.hexdigest(length)
204 self.assertEqual( 253 self.assertEqual(
205 computed, hexdigest, 254 computed, hexdigest,
206 "Hash algorithm %s constructed using %s returned hexdigest" 255 "Hash algorithm %s constructed using %s returned hexdigest"
207 " %r for %d byte input data that should have hashed to %r." 256 " %r for %d byte input data that should have hashed to %r."
208 % (name, hash_object_constructor, 257 % (name, hash_object_constructor,
209 computed, len(data), hexdigest)) 258 computed, len(data), hexdigest))
210 computed = m.digest() 259 computed = m.digest() if not shake else m.digest(length)
211 digest = bytes.fromhex(hexdigest) 260 digest = bytes.fromhex(hexdigest)
212 self.assertEqual(computed, digest) 261 self.assertEqual(computed, digest)
213 self.assertEqual(len(digest), m.digest_size) 262 if not shake:
263 self.assertEqual(len(digest), m.digest_size)
214 264
215 def check_no_unicode(self, algorithm_name): 265 def check_no_unicode(self, algorithm_name):
216 # Unicode objects are not allowed as input. 266 # Unicode objects are not allowed as input.
217 constructors = self.constructors_to_test[algorithm_name] 267 constructors = self.constructors_to_test[algorithm_name]
218 for hash_object_constructor in constructors: 268 for hash_object_constructor in constructors:
219 self.assertRaises(TypeError, hash_object_constructor, 'spam') 269 self.assertRaises(TypeError, hash_object_constructor, 'spam')
220 270
221 def test_no_unicode(self): 271 def test_no_unicode(self):
222 self.check_no_unicode('md5') 272 self.check_no_unicode('md5')
223 self.check_no_unicode('sha1') 273 self.check_no_unicode('sha1')
224 self.check_no_unicode('sha224') 274 self.check_no_unicode('sha224')
225 self.check_no_unicode('sha256') 275 self.check_no_unicode('sha256')
226 self.check_no_unicode('sha384') 276 self.check_no_unicode('sha384')
227 self.check_no_unicode('sha512') 277 self.check_no_unicode('sha512')
228 278
229 def check_blocksize_name(self, name, block_size=0, digest_size=0): 279 @requires_sha3
280 def test_no_unicode_sha3(self):
281 self.check_no_unicode('sha3_224')
282 self.check_no_unicode('sha3_256')
283 self.check_no_unicode('sha3_384')
284 self.check_no_unicode('sha3_512')
285 self.check_no_unicode('shake_128')
286 self.check_no_unicode('shake_256')
287
288 def check_blocksize_name(self, name, block_size=0, digest_size=0,
289 digest_length=None):
230 constructors = self.constructors_to_test[name] 290 constructors = self.constructors_to_test[name]
231 for hash_object_constructor in constructors: 291 for hash_object_constructor in constructors:
232 m = hash_object_constructor() 292 m = hash_object_constructor()
233 self.assertEqual(m.block_size, block_size) 293 self.assertEqual(m.block_size, block_size)
234 self.assertEqual(m.digest_size, digest_size) 294 self.assertEqual(m.digest_size, digest_size)
235 self.assertEqual(len(m.digest()), digest_size) 295 if digest_length:
296 self.assertEqual(len(m.digest(digest_length)),
297 digest_length)
298 self.assertEqual(len(m.hexdigest(digest_length)),
299 2*digest_length)
300 else:
301 self.assertEqual(len(m.digest()), digest_size)
302 self.assertEqual(len(m.hexdigest()), 2*digest_size)
236 self.assertEqual(m.name, name) 303 self.assertEqual(m.name, name)
237 # split for sha3_512 / _sha3.sha3 object 304 # split for sha3_512 / _sha3.sha3 object
238 self.assertIn(name.split("_")[0], repr(m)) 305 self.assertIn(name.split("_")[0], repr(m))
239 306
240 def test_blocksize_name(self): 307 def test_blocksize_name(self):
241 self.check_blocksize_name('md5', 64, 16) 308 self.check_blocksize_name('md5', 64, 16)
242 self.check_blocksize_name('sha1', 64, 20) 309 self.check_blocksize_name('sha1', 64, 20)
243 self.check_blocksize_name('sha224', 64, 28) 310 self.check_blocksize_name('sha224', 64, 28)
244 self.check_blocksize_name('sha256', 64, 32) 311 self.check_blocksize_name('sha256', 64, 32)
245 self.check_blocksize_name('sha384', 128, 48) 312 self.check_blocksize_name('sha384', 128, 48)
246 self.check_blocksize_name('sha512', 128, 64) 313 self.check_blocksize_name('sha512', 128, 64)
314 self.check_blocksize_name('sha3_224', 144, 28)
315 self.check_blocksize_name('sha3_256', 136, 32)
316 self.check_blocksize_name('sha3_384', 104, 48)
317 self.check_blocksize_name('sha3_512', 72, 64)
318 self.check_blocksize_name('shake_128', 168, 0, 32)
319 self.check_blocksize_name('shake_256', 136, 0, 64)
247 320
248 def test_case_md5_0(self): 321 def test_case_md5_0(self):
249 self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') 322 self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
250 323
251 def test_case_md5_1(self): 324 def test_case_md5_1(self):
252 self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') 325 self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72')
253 326
254 def test_case_md5_2(self): 327 def test_case_md5_2(self):
255 self.check('md5', 328 self.check('md5',
256 b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456 789', 329 b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456 789',
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
365 self.check('sha512', 438 self.check('sha512',
366 b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+ 439 b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
367 b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", 440 b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
368 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+ 441 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
369 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909") 442 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
370 443
371 def test_case_sha512_3(self): 444 def test_case_sha512_3(self):
372 self.check('sha512', b"a" * 1000000, 445 self.check('sha512', b"a" * 1000000,
373 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+ 446 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
374 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b") 447 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
448
449 @requires_sha3
450 def test_case_sha3_224_0(self):
451 self.check('sha3_224', b"",
452 "6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7")
453
454 @requires_sha3
455 def test_case_sha3_224_vector(self):
456 for msg, md in read_vectors('sha3_224'):
457 self.check('sha3_224', msg, md)
458
459 @requires_sha3
460 def test_case_sha3_256_0(self):
461 self.check('sha3_256', b"",
462 "a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a")
463
464 @requires_sha3
465 def test_case_sha3_256_vector(self):
466 for msg, md in read_vectors('sha3_256'):
467 self.check('sha3_256', msg, md)
468
469 @requires_sha3
470 def test_case_sha3_384_0(self):
471 self.check('sha3_384', b"",
472 "0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2a"+
473 "c3713831264adb47fb6bd1e058d5f004")
474
475 @requires_sha3
476 def test_case_sha3_384_vector(self):
477 for msg, md in read_vectors('sha3_384'):
478 self.check('sha3_384', msg, md)
479
480 @requires_sha3
481 def test_case_sha3_512_0(self):
482 self.check('sha3_512', b"",
483 "a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a6"+
484 "15b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26")
485
486 @requires_sha3
487 def test_case_sha3_512_vector(self):
488 for msg, md in read_vectors('sha3_512'):
489 self.check('sha3_512', msg, md)
490
491 @requires_sha3
492 def test_case_shake_128_0(self):
493 self.check('shake_128', b"",
494 "7f9c2ba4e88f827d616045507605853ed73b8093f6efbc88eb1a6eacfa66ef26",
495 True)
496 self.check('shake_128', b"", "7f9c", True)
497
498 @requires_sha3
499 def test_case_shake128_vector(self):
500 for msg, md in read_vectors('shake_128'):
501 self.check('shake_128', msg, md, True)
502
503 @requires_sha3
504 def test_case_shake_256_0(self):
505 self.check('shake_256', b"",
506 "46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f",
507 True)
508 self.check('shake_256', b"", "46b9", True)
509
510 @requires_sha3
511 def test_case_shake128_vector(self):
512 for msg, md in read_vectors('shake_128'):
513 self.check('shake_128', msg, md, True)
375 514
376 def test_gil(self): 515 def test_gil(self):
377 # Check things work fine with an input larger than the size required 516 # Check things work fine with an input larger than the size required
378 # for multithreaded operation (which is hardwired to 2048). 517 # for multithreaded operation (which is hardwired to 2048).
379 gil_minsize = 2048 518 gil_minsize = 2048
380 519
381 for cons in self.hash_constructors: 520 for cons in self.hash_constructors:
382 m = cons() 521 m = cons()
383 m.update(b'1') 522 m.update(b'1')
384 m.update(b'#' * gil_minsize) 523 m.update(b'#' * gil_minsize)
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
521 self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac) 660 self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac)
522 661
523 @unittest.skipUnless(hasattr(c_hashlib, 'pbkdf2_hmac'), 662 @unittest.skipUnless(hasattr(c_hashlib, 'pbkdf2_hmac'),
524 ' test requires OpenSSL > 1.0') 663 ' test requires OpenSSL > 1.0')
525 def test_pbkdf2_hmac_c(self): 664 def test_pbkdf2_hmac_c(self):
526 self._test_pbkdf2_hmac(c_hashlib.pbkdf2_hmac) 665 self._test_pbkdf2_hmac(c_hashlib.pbkdf2_hmac)
527 666
528 667
529 if __name__ == "__main__": 668 if __name__ == "__main__":
530 unittest.main() 669 unittest.main()
OLDNEW
« no previous file with comments | « Lib/hashlib.py ('k') | Modules/hashlib.h » ('j') | no next file with comments »

RSS Feeds Recent Issues | This issue
This is Rietveld 894c83f36cb7+