import multiprocessing.pool, zipfile # Create a ZipFile which contains 1000 files which contain 10000 0-bytes with zipfile.ZipFile("test.zip", "w", zipfile.ZIP_STORED) as z: files = [(str(i), b"0"*10000) for i in range(1000)] for name, data in files: z.writestr(name, data) # Read files in ZipFile with two threads at once, crashes in seconds with zipfile.ZipFile("test.zip", "r") as z: pool = multiprocessing.pool.ThreadPool(2) def read_file(args): name, data = args assert(data == z.read(name)) while True: pool.map(read_file, files) """ Traceback (most recent call last): File "test.py", line 18, in pool.map(read_file, files) File "/usr/lib/python3.8/multiprocessing/pool.py", line 364, in map return self._map_async(func, iterable, mapstar, chunksize).get() File "/usr/lib/python3.8/multiprocessing/pool.py", line 771, in get raise self._value File "/usr/lib/python3.8/multiprocessing/pool.py", line 125, in worker result = (True, func(*args, **kwds)) File "/usr/lib/python3.8/multiprocessing/pool.py", line 48, in mapstar return list(map(*args)) File "test.py", line 15, in read_file assert(data == z.read(name)) File "/usr/lib/python3.8/zipfile.py", line 1476, in read return fp.read() File "/usr/lib/python3.8/zipfile.py", line 926, in read buf += self._read1(self.MAX_N) File "/usr/lib/python3.8/zipfile.py", line 1030, in _read1 self._update_crc(data) File "/usr/lib/python3.8/zipfile.py", line 958, in _update_crc raise BadZipFile("Bad CRC-32 for file %r" % self.name) zipfile.BadZipFile: Bad CRC-32 for file '127 """