#!/usr/bin/python3.5 import time import inspect import zipfile import textwrap import os import struct from contextlib import contextmanager from zipfile import ZipInfo, ZIP_LZMA, _get_compressor, ZIP64_LIMIT, crc32, ZIP_BZIP2 def write(self, filename, arcname=None, compress_type=None): """ This is zipfile.ZipFile.write as it dhould be """ zinfo = ZipInfo.from_file(filename, arcname) if zinfo.is_dir(): zinfo.compress_size = 0 zinfo.CRC = 0 with self._lock: if self._seekable: self.fp.seek(self.start_dir) zinfo.header_offset = self.fp.tell() # Start of header bytes self._writecheck(zinfo) self._didModify = True self.filelist.append(zinfo) self.NameToInfo[zinfo.filename] = zinfo self.fp.write(zinfo.FileHeader(False)) self.start_dir = self.fp.tell() return # TODO: double-open in ZipInfo.from_file and here. with open(filename, 'rb') as src: with self.writestream(zinfo, compress_type) as dst: shutil.copyfileobj(src, dst) @contextmanager def writestream(self, zinfo_or_arcname, compress_type=None, mtime=None, mode=0o600): """ This is modified copy of zipfile.ZipFile.write""" if not self.fp: raise RuntimeError( "Attempt to write to ZIP archive that was already closed") if isinstance(zinfo_or_arcname, ZipInfo): zinfo = zinfo_or_arcname if mtime is not None: zinfo.date_time = mtime[0:6] else: if mtime is None: mtime = time.localtime() # Create ZipInfo instance to store file information arcname = os.path.normpath(os.path.splitdrive(zinfo_or_arcname)[1]) while arcname[0] in (os.sep, os.altsep): arcname = arcname[1:] zinfo = ZipInfo(arcname, mtime[0:6]) zinfo.external_attr = (mode & 0o7777) << 16 # Unix attributes zinfo.file_size = 0 if compress_type is None: zinfo.compress_type = self.compression else: zinfo.compress_type = compress_type class Writer: def __init__(self, fp): self.fp = fp def write(self, buf): nonlocal file_size nonlocal CRC nonlocal cmpr nonlocal compress_size nonlocal zip64 if isinstance(buf, str): buf = buf.encode('utf-8') file_size += len(buf) CRC = crc32(buf, CRC) & 0xffffffff if cmpr: buf = cmpr.compress(buf) compress_size += len(buf) if not zip64 and (file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT): raise RuntimeError('64-bit extension is not allowed, but too many data passed') self.fp.write(buf) zinfo.flag_bits = 0x00 with self._lock: if self._seekable: self.fp.seek(self.start_dir) zinfo.header_offset = self.fp.tell() # Start of header bytes if zinfo.compress_type == ZIP_LZMA: # Compressed data includes an end-of-stream (EOS) marker zinfo.flag_bits |= 0x02 self._writecheck(zinfo) self._didModify = True cmpr = _get_compressor(zinfo.compress_type) if not self._seekable: zinfo.flag_bits |= 0x08 # Must overwrite CRC and sizes with correct data later zinfo.CRC = CRC = 0 zinfo.compress_size = compress_size = 0 # Compressed size can be larger than uncompressed size if not self._allowZip64: raise RuntimeError('Stream mode require zip64') zip64 = self._allowZip64 self.fp.write(zinfo.FileHeader(zip64)) file_size = 0 # TODO: recover from error correctly: # on streaming: terminate stream and DO NOT write to central directory # on seekable, truncate file where we start yield Writer(self.fp) if cmpr: buf = cmpr.flush() compress_size = compress_size + len(buf) self.fp.write(buf) zinfo.compress_size = compress_size else: zinfo.compress_size = file_size zinfo.CRC = CRC zinfo.file_size = file_size if (not zip64) and (file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT): raise RuntimeError('64-bit extension is not allowed, but too many data passed') if zinfo.flag_bits & 0x08: # Write CRC and file sizes after the file data fmt = '