diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -648,7 +648,7 @@ STRING, ENDMARKER, ENCODING, tok_name, detect_encoding, open as tokenize_open, Untokenizer) from io import BytesIO -from unittest import TestCase +from unittest import TestCase, mock import os, sys, glob import token @@ -1060,6 +1060,17 @@ ins = Bunk(lines, path) detect_encoding(ins.readline) + def test_close_file_when_exception(self): + # test close file of unrecognized encoding + m = mock.mock_open(read_data=b'#coding:xxx') + with mock.patch('tokenize._builtin_open', m): + try: + tokenize_open('foobar') + except: + pass + self.assertTrue(m().close.called) + + class TestTokenize(TestCase): diff --git a/Lib/tokenize.py b/Lib/tokenize.py --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -435,9 +435,17 @@ detect_encoding(). """ buffer = _builtin_open(filename, 'rb') - encoding, lines = detect_encoding(buffer.readline) + try: + encoding, lines = detect_encoding(buffer.readline) + except: + buffer.close() + raise buffer.seek(0) - text = TextIOWrapper(buffer, encoding, line_buffering=True) + try: + text = TextIOWrapper(buffer, encoding, line_buffering=True) + except: + buffer.close() + raise text.mode = 'r' return text