Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code | Sign in
(32797)

Side by Side Diff: Lib/test/test_hashlib.py

Issue 26798: add BLAKE2 to hashlib
Patch Set: Created 3 years, 1 month ago
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments. Please Sign in to add in-line comments.
Jump to:
View unified diff | Download patch
« no previous file with comments | « Lib/hashlib.py ('k') | Makefile.pre.in » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Test hashlib module 1 # Test hashlib module
2 # 2 #
3 # $Id$ 3 # $Id$
4 # 4 #
5 # Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org) 5 # Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org)
6 # Licensed to PSF under a Contributor Agreement. 6 # Licensed to PSF under a Contributor Agreement.
7 # 7 #
8 8
9 import array 9 import array
10 import hashlib 10 import hashlib
11 import itertools 11 import itertools
12 import os 12 import os
13 import sys 13 import sys
14 try: 14 try:
15 import threading 15 import threading
16 except ImportError: 16 except ImportError:
17 threading = None 17 threading = None
18 import unittest 18 import unittest
19 import warnings 19 import warnings
20 from test import support 20 from test import support
21 from test.support import _4G, bigmemtest, import_fresh_module 21 from test.support import _4G, bigmemtest, import_fresh_module
22 22
23 # Were we compiled --with-pydebug or with #define Py_DEBUG? 23 # Were we compiled --with-pydebug or with #define Py_DEBUG?
24 COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') 24 COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
25 25
26 c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) 26 c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
27 py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) 27 py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
28
29 try:
30 import _blake2
31 except ImportError:
32 _blake2 = None
33
34 requires_blake2 = unittest.skipUnless(_blake2, 'requires _blake2')
35
28 36
29 def hexstr(s): 37 def hexstr(s):
30 assert isinstance(s, bytes), repr(s) 38 assert isinstance(s, bytes), repr(s)
31 h = "0123456789abcdef" 39 h = "0123456789abcdef"
32 r = '' 40 r = ''
33 for i in s: 41 for i in s:
34 r += h[(i >> 4) & 0xF] + h[i & 0xF] 42 r += h[(i >> 4) & 0xF] + h[i & 0xF]
35 return r 43 return r
44
45
46 URL = "https://raw.githubusercontent.com/tiran/python_vectors/master/{}.txt"
47
48 def read_vectors(hash_name):
49 with support.open_urlresource(URL.format(hash_name)) as f:
50 for line in f:
51 line = line.strip()
52 if line.startswith('#') or not line:
53 continue
54 parts = line.split(',')
55 parts[0] = bytes.fromhex(parts[0])
56 yield parts
36 57
37 58
38 class HashLibTestCase(unittest.TestCase): 59 class HashLibTestCase(unittest.TestCase):
39 supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', 60 supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
40 'sha224', 'SHA224', 'sha256', 'SHA256', 61 'sha224', 'SHA224', 'sha256', 'SHA256',
41 'sha384', 'SHA384', 'sha512', 'SHA512') 62 'sha384', 'SHA384', 'sha512', 'SHA512',
63 'blake2b', 'blake2s')
42 64
43 # Issue #14693: fallback modules are always compiled under POSIX 65 # Issue #14693: fallback modules are always compiled under POSIX
44 _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG 66 _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG
45 67
46 def _conditional_import_module(self, module_name): 68 def _conditional_import_module(self, module_name):
47 """Import a module and return a reference to it or None on failure.""" 69 """Import a module and return a reference to it or None on failure."""
48 try: 70 try:
49 exec('import '+module_name) 71 exec('import '+module_name)
50 except ImportError as error: 72 except ImportError as error:
51 if self._warn_on_extension_import: 73 if self._warn_on_extension_import:
52 warnings.warn('Did a C extension fail to compile? %s' % error) 74 warnings.warn('Did a C extension fail to compile? %s' % error)
53 return locals().get(module_name) 75 return locals().get(module_name)
54 76
55 def __init__(self, *args, **kwargs): 77 def __init__(self, *args, **kwargs):
56 algorithms = set() 78 algorithms = set()
57 for algorithm in self.supported_hash_names: 79 for algorithm in self.supported_hash_names:
58 algorithms.add(algorithm.lower()) 80 algorithms.add(algorithm.lower())
81
82 _blake2 = self._conditional_import_module('_blake2')
83 if _blake2:
84 algorithms.update({'blake2b', 'blake2s'})
85
59 self.constructors_to_test = {} 86 self.constructors_to_test = {}
60 for algorithm in algorithms: 87 for algorithm in algorithms:
61 self.constructors_to_test[algorithm] = set() 88 self.constructors_to_test[algorithm] = set()
62 89
63 # For each algorithm, test the direct constructor and the use 90 # For each algorithm, test the direct constructor and the use
64 # of hashlib.new given the algorithm name. 91 # of hashlib.new given the algorithm name.
65 for algorithm, constructors in self.constructors_to_test.items(): 92 for algorithm, constructors in self.constructors_to_test.items():
66 constructors.add(getattr(hashlib, algorithm)) 93 constructors.add(getattr(hashlib, algorithm))
67 def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): 94 def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwa rgs):
68 if data is None: 95 if data is None:
69 return hashlib.new(_alg) 96 return hashlib.new(_alg, **kwargs)
70 return hashlib.new(_alg, data) 97 return hashlib.new(_alg, data, **kwargs)
71 constructors.add(_test_algorithm_via_hashlib_new) 98 constructors.add(_test_algorithm_via_hashlib_new)
72 99
73 _hashlib = self._conditional_import_module('_hashlib') 100 _hashlib = self._conditional_import_module('_hashlib')
74 if _hashlib: 101 if _hashlib:
75 # These two algorithms should always be present when this module 102 # These two algorithms should always be present when this module
76 # is compiled. If not, something was compiled wrong. 103 # is compiled. If not, something was compiled wrong.
77 self.assertTrue(hasattr(_hashlib, 'openssl_md5')) 104 self.assertTrue(hasattr(_hashlib, 'openssl_md5'))
78 self.assertTrue(hasattr(_hashlib, 'openssl_sha1')) 105 self.assertTrue(hasattr(_hashlib, 'openssl_sha1'))
79 for algorithm, constructors in self.constructors_to_test.items(): 106 for algorithm, constructors in self.constructors_to_test.items():
80 constructor = getattr(_hashlib, 'openssl_'+algorithm, None) 107 constructor = getattr(_hashlib, 'openssl_'+algorithm, None)
(...skipping 11 matching lines...) Expand all
92 if _sha1: 119 if _sha1:
93 add_builtin_constructor('sha1') 120 add_builtin_constructor('sha1')
94 _sha256 = self._conditional_import_module('_sha256') 121 _sha256 = self._conditional_import_module('_sha256')
95 if _sha256: 122 if _sha256:
96 add_builtin_constructor('sha224') 123 add_builtin_constructor('sha224')
97 add_builtin_constructor('sha256') 124 add_builtin_constructor('sha256')
98 _sha512 = self._conditional_import_module('_sha512') 125 _sha512 = self._conditional_import_module('_sha512')
99 if _sha512: 126 if _sha512:
100 add_builtin_constructor('sha384') 127 add_builtin_constructor('sha384')
101 add_builtin_constructor('sha512') 128 add_builtin_constructor('sha512')
129 if _blake2:
130 add_builtin_constructor('blake2s')
131 add_builtin_constructor('blake2b')
102 132
103 super(HashLibTestCase, self).__init__(*args, **kwargs) 133 super(HashLibTestCase, self).__init__(*args, **kwargs)
104 134
105 @property 135 @property
106 def hash_constructors(self): 136 def hash_constructors(self):
107 constructors = self.constructors_to_test.values() 137 constructors = self.constructors_to_test.values()
108 return itertools.chain.from_iterable(constructors) 138 return itertools.chain.from_iterable(constructors)
109 139
110 def test_hash_array(self): 140 def test_hash_array(self):
111 a = array.array("b", range(10)) 141 a = array.array("b", range(10))
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
186 self.assertEqual(m1.digest(), m3.digest()) 216 self.assertEqual(m1.digest(), m3.digest())
187 217
188 # verify copy() doesn't touch original 218 # verify copy() doesn't touch original
189 m4 = cons(aas + bees + cees) 219 m4 = cons(aas + bees + cees)
190 m4_digest = m4.digest() 220 m4_digest = m4.digest()
191 m4_copy = m4.copy() 221 m4_copy = m4.copy()
192 m4_copy.update(dees) 222 m4_copy.update(dees)
193 self.assertEqual(m1.digest(), m4_copy.digest()) 223 self.assertEqual(m1.digest(), m4_copy.digest())
194 self.assertEqual(m4.digest(), m4_digest) 224 self.assertEqual(m4.digest(), m4_digest)
195 225
196 def check(self, name, data, hexdigest): 226 def check(self, name, data, hexdigest, **kwargs):
197 hexdigest = hexdigest.lower() 227 hexdigest = hexdigest.lower()
198 constructors = self.constructors_to_test[name] 228 constructors = self.constructors_to_test[name]
199 # 2 is for hashlib.name(...) and hashlib.new(name, ...) 229 # 2 is for hashlib.name(...) and hashlib.new(name, ...)
200 self.assertGreaterEqual(len(constructors), 2) 230 self.assertGreaterEqual(len(constructors), 2)
201 for hash_object_constructor in constructors: 231 for hash_object_constructor in constructors:
202 m = hash_object_constructor(data) 232 m = hash_object_constructor(data, **kwargs)
203 computed = m.hexdigest() 233 computed = m.hexdigest()
204 self.assertEqual( 234 self.assertEqual(
205 computed, hexdigest, 235 computed, hexdigest,
206 "Hash algorithm %s constructed using %s returned hexdigest" 236 "Hash algorithm %s constructed using %s returned hexdigest"
207 " %r for %d byte input data that should have hashed to %r." 237 " %r for %d byte input data that should have hashed to %r."
208 % (name, hash_object_constructor, 238 % (name, hash_object_constructor,
209 computed, len(data), hexdigest)) 239 computed, len(data), hexdigest))
210 computed = m.digest() 240 computed = m.digest()
211 digest = bytes.fromhex(hexdigest) 241 digest = bytes.fromhex(hexdigest)
212 self.assertEqual(computed, digest) 242 self.assertEqual(computed, digest)
213 self.assertEqual(len(digest), m.digest_size) 243 self.assertEqual(len(digest), m.digest_size)
214 244
215 def check_no_unicode(self, algorithm_name): 245 def check_no_unicode(self, algorithm_name):
216 # Unicode objects are not allowed as input. 246 # Unicode objects are not allowed as input.
217 constructors = self.constructors_to_test[algorithm_name] 247 constructors = self.constructors_to_test[algorithm_name]
218 for hash_object_constructor in constructors: 248 for hash_object_constructor in constructors:
219 self.assertRaises(TypeError, hash_object_constructor, 'spam') 249 self.assertRaises(TypeError, hash_object_constructor, 'spam')
220 250
221 def test_no_unicode(self): 251 def test_no_unicode(self):
222 self.check_no_unicode('md5') 252 self.check_no_unicode('md5')
223 self.check_no_unicode('sha1') 253 self.check_no_unicode('sha1')
224 self.check_no_unicode('sha224') 254 self.check_no_unicode('sha224')
225 self.check_no_unicode('sha256') 255 self.check_no_unicode('sha256')
226 self.check_no_unicode('sha384') 256 self.check_no_unicode('sha384')
227 self.check_no_unicode('sha512') 257 self.check_no_unicode('sha512')
258
259 @requires_blake2
260 def test_no_unicode_blake2(self):
261 self.check_no_unicode('blake2b')
262 self.check_no_unicode('blake2s')
228 263
229 def check_blocksize_name(self, name, block_size=0, digest_size=0): 264 def check_blocksize_name(self, name, block_size=0, digest_size=0):
230 constructors = self.constructors_to_test[name] 265 constructors = self.constructors_to_test[name]
231 for hash_object_constructor in constructors: 266 for hash_object_constructor in constructors:
232 m = hash_object_constructor() 267 m = hash_object_constructor()
233 self.assertEqual(m.block_size, block_size) 268 self.assertEqual(m.block_size, block_size)
234 self.assertEqual(m.digest_size, digest_size) 269 self.assertEqual(m.digest_size, digest_size)
235 self.assertEqual(len(m.digest()), digest_size) 270 self.assertEqual(len(m.digest()), digest_size)
236 self.assertEqual(m.name, name) 271 self.assertEqual(m.name, name)
237 # split for sha3_512 / _sha3.sha3 object 272 # split for sha3_512 / _sha3.sha3 object
238 self.assertIn(name.split("_")[0], repr(m)) 273 self.assertIn(name.split("_")[0], repr(m))
239 274
240 def test_blocksize_name(self): 275 def test_blocksize_name(self):
241 self.check_blocksize_name('md5', 64, 16) 276 self.check_blocksize_name('md5', 64, 16)
242 self.check_blocksize_name('sha1', 64, 20) 277 self.check_blocksize_name('sha1', 64, 20)
243 self.check_blocksize_name('sha224', 64, 28) 278 self.check_blocksize_name('sha224', 64, 28)
244 self.check_blocksize_name('sha256', 64, 32) 279 self.check_blocksize_name('sha256', 64, 32)
245 self.check_blocksize_name('sha384', 128, 48) 280 self.check_blocksize_name('sha384', 128, 48)
246 self.check_blocksize_name('sha512', 128, 64) 281 self.check_blocksize_name('sha512', 128, 64)
282
283 @requires_blake2
284 def test_blocksize_name_blake2(self):
285 self.check_blocksize_name('blake2b', 128, 64)
286 self.check_blocksize_name('blake2s', 64, 32)
247 287
248 def test_case_md5_0(self): 288 def test_case_md5_0(self):
249 self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') 289 self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
250 290
251 def test_case_md5_1(self): 291 def test_case_md5_1(self):
252 self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') 292 self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72')
253 293
254 def test_case_md5_2(self): 294 def test_case_md5_2(self):
255 self.check('md5', 295 self.check('md5',
256 b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456 789', 296 b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456 789',
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
365 self.check('sha512', 405 self.check('sha512',
366 b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+ 406 b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
367 b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", 407 b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
368 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+ 408 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
369 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909") 409 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
370 410
371 def test_case_sha512_3(self): 411 def test_case_sha512_3(self):
372 self.check('sha512', b"a" * 1000000, 412 self.check('sha512', b"a" * 1000000,
373 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+ 413 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
374 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b") 414 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
415
416 def check_blake2(self, constructor, salt_size, person_size, key_size,
417 digest_size, max_offset):
418 self.assertEqual(constructor.SALT_SIZE, salt_size)
419 for i in range(salt_size + 1):
420 constructor(salt=b'a' * i)
421 salt = b'a' * (salt_size + 1)
422 self.assertRaises(ValueError, constructor, salt=salt)
423
424 self.assertEqual(constructor.PERSON_SIZE, person_size)
425 for i in range(person_size+1):
426 constructor(person=b'a' * i)
427 person = b'a' * (person_size + 1)
428 self.assertRaises(ValueError, constructor, person=person)
429
430 self.assertEqual(constructor.MAX_DIGEST_SIZE, digest_size)
431 for i in range(1, digest_size + 1):
432 constructor(digest_size=i)
433 self.assertRaises(ValueError, constructor, digest_size=-1)
434 self.assertRaises(ValueError, constructor, digest_size=0)
435 self.assertRaises(ValueError, constructor, digest_size=digest_size+1)
436
437 self.assertEqual(constructor.MAX_KEY_SIZE, key_size)
438 for i in range(key_size+1):
439 constructor(key=b'a' * i)
440 key = b'a' * (key_size + 1)
441 self.assertRaises(ValueError, constructor, key=key)
442 self.assertEqual(constructor().hexdigest(),
443 constructor(key=b'').hexdigest())
444
445 for i in range(0, 256):
446 constructor(fanout=i)
447 self.assertRaises(ValueError, constructor, fanout=-1)
448 self.assertRaises(ValueError, constructor, fanout=256)
449
450 for i in range(1, 256):
451 constructor(depth=i)
452 self.assertRaises(ValueError, constructor, depth=-1)
453 self.assertRaises(ValueError, constructor, depth=0)
454 self.assertRaises(ValueError, constructor, depth=256)
455
456 for i in range(0, 256):
457 constructor(node_depth=i)
458 self.assertRaises(ValueError, constructor, node_depth=-1)
459 self.assertRaises(ValueError, constructor, node_depth=256)
460
461 for i in range(0, digest_size + 1):
462 constructor(inner_size=i)
463 self.assertRaises(ValueError, constructor, inner_size=-1)
464 self.assertRaises(ValueError, constructor, inner_size=digest_size+1)
465
466 constructor(leaf_size=0)
467 constructor(leaf_size=(1<<32)-1)
468 self.assertRaises(OverflowError, constructor, leaf_size=-1)
469 self.assertRaises(OverflowError, constructor, leaf_size=1<<32)
470
471 constructor(node_offset=0)
472 constructor(node_offset=max_offset)
473 self.assertRaises(OverflowError, constructor, node_offset=-1)
474 self.assertRaises(OverflowError, constructor, node_offset=max_offset+1)
475
476 constructor(
477 string=b'',
478 key=b'',
479 salt=b'',
480 person=b'',
481 digest_size=17,
482 fanout=1,
483 depth=1,
484 leaf_size=256,
485 node_offset=512,
486 node_depth=1,
487 inner_size=7,
488 last_node=True
489 )
490
491 def blake2_rfc7693(self, constructor, md_len, in_len):
492 def selftest_seq(length, seed):
493 mask = (1<<32)-1
494 a = (0xDEAD4BAD * seed) & mask
495 b = 1
496 out = bytearray(length)
497 for i in range(length):
498 t = (a + b) & mask
499 a, b = b, t
500 out[i] = (t >> 24) & 0xFF
501 return out
502 outer = constructor(digest_size=32)
503 for outlen in md_len:
504 for inlen in in_len:
505 indata = selftest_seq(inlen, inlen)
506 key = selftest_seq(outlen, outlen)
507 unkeyed = constructor(indata, digest_size=outlen)
508 outer.update(unkeyed.digest())
509 keyed = constructor(indata, key=key, digest_size=outlen)
510 outer.update(keyed.digest())
511 return outer.hexdigest()
512
513 @requires_blake2
514 def test_blake2b(self):
515 self.check_blake2(hashlib.blake2b, 16, 16, 64, 64, (1<<64)-1)
516 b2b_md_len = [20, 32, 48, 64]
517 b2b_in_len = [0, 3, 128, 129, 255, 1024]
518 self.assertEqual(
519 self.blake2_rfc7693(hashlib.blake2b, b2b_md_len, b2b_in_len),
520 "c23a7800d98123bd10f506c61e29da5603d763b8bbad2e737f5e765a7bccd475")
521
522 @requires_blake2
523 def test_case_blake2b_0(self):
524 self.check('blake2b', b"",
525 "786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419"+
526 "d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce")
527
528 @requires_blake2
529 def test_case_blake2b_1(self):
530 self.check('blake2b', b"abc",
531 "ba80a53f981c4d0d6a2797b69f12f6e94c212f14685ac4b74b12bb6fdbffa2d1"+
532 "7d87c5392aab792dc252d5de4533cc9518d38aa8dbf1925ab92386edd4009923")
533
534 @requires_blake2
535 def test_blake2b_vectors(self):
536 for msg, key, md in read_vectors('blake2b'):
537 key = bytes.fromhex(key)
538 self.check('blake2b', msg, md, key=key)
539
540 @requires_blake2
541 def test_blake2s(self):
542 self.check_blake2(hashlib.blake2s, 8, 8, 32, 32, (1<<48)-1)
543 b2s_md_len = [16, 20, 28, 32]
544 b2s_in_len = [0, 3, 64, 65, 255, 1024]
545 self.assertEqual(
546 self.blake2_rfc7693(hashlib.blake2s, b2s_md_len, b2s_in_len),
547 "6a411f08ce25adcdfb02aba641451cec53c598b24f4fc787fbdc88797f4c1dfe")
548
549 @requires_blake2
550 def test_case_blake2s_0(self):
551 self.check('blake2s', b"",
552 "69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9")
553
554 @requires_blake2
555 def test_case_blake2s_1(self):
556 self.check('blake2s', b"abc",
557 "508c5e8c327c14e2e1a72ba34eeb452f37458b209ed63a294d999b4c86675982")
558
559 @requires_blake2
560 def test_blake2s_vectors(self):
561 for msg, key, md in read_vectors('blake2s'):
562 key = bytes.fromhex(key)
563 self.check('blake2s', msg, md, key=key)
375 564
376 def test_gil(self): 565 def test_gil(self):
377 # Check things work fine with an input larger than the size required 566 # Check things work fine with an input larger than the size required
378 # for multithreaded operation (which is hardwired to 2048). 567 # for multithreaded operation (which is hardwired to 2048).
379 gil_minsize = 2048 568 gil_minsize = 2048
380 569
381 for cons in self.hash_constructors: 570 for cons in self.hash_constructors:
382 m = cons() 571 m = cons()
383 m.update(b'1') 572 m.update(b'1')
384 m.update(b'#' * gil_minsize) 573 m.update(b'#' * gil_minsize)
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
521 self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac) 710 self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac)
522 711
523 @unittest.skipUnless(hasattr(c_hashlib, 'pbkdf2_hmac'), 712 @unittest.skipUnless(hasattr(c_hashlib, 'pbkdf2_hmac'),
524 ' test requires OpenSSL > 1.0') 713 ' test requires OpenSSL > 1.0')
525 def test_pbkdf2_hmac_c(self): 714 def test_pbkdf2_hmac_c(self):
526 self._test_pbkdf2_hmac(c_hashlib.pbkdf2_hmac) 715 self._test_pbkdf2_hmac(c_hashlib.pbkdf2_hmac)
527 716
528 717
529 if __name__ == "__main__": 718 if __name__ == "__main__":
530 unittest.main() 719 unittest.main()
OLDNEW
« no previous file with comments | « Lib/hashlib.py ('k') | Makefile.pre.in » ('j') | no next file with comments »

RSS Feeds Recent Issues | This issue
This is Rietveld 894c83f36cb7+