pile_js / EFForg__https-everywhere.jsonl
Hamhams's picture
commit files to HF hub
c7f4bd0
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"is_zipfile","parameters":"(filename)","argument_list":"","return_statement":"return result","docstring":"Quickly see if a file is a ZIP file by checking the magic number.\n\n The filename argument may be a file or file-like object too.","docstring_summary":"Quickly see if a file is a ZIP file by checking the magic number.","docstring_tokens":["Quickly","see","if","a","file","is","a","ZIP","file","by","checking","the","magic","number","."],"function":"def is_zipfile(filename):\n \"\"\"Quickly see if a file is a ZIP file by checking the magic number.\n\n The filename argument may be a file or file-like object too.\n \"\"\"\n result = False\n try:\n if hasattr(filename, \"read\"):\n result = _check_zipfile(fp=filename)\n else:\n with open(filename, \"rb\") as fp:\n result = _check_zipfile(fp)\n except OSError:\n pass\n return result","function_tokens":["def","is_zipfile","(","filename",")",":","result","=","False","try",":","if","hasattr","(","filename",",","\"read\"",")",":","result","=","_check_zipfile","(","fp","=","filename",")","else",":","with","open","(","filename",",","\"rb\"",")","as","fp",":","result","=","_check_zipfile","(","fp",")","except","OSError",":","pass","return","result"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L181-L195"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"_EndRecData64","parameters":"(fpin, offset, endrec)","argument_list":"","return_statement":"return endrec","docstring":"Read the ZIP64 end-of-archive records and use that to update endrec","docstring_summary":"Read the ZIP64 end-of-archive records and use that to update endrec","docstring_tokens":["Read","the","ZIP64","end","-","of","-","archive","records","and","use","that","to","update","endrec"],"function":"def _EndRecData64(fpin, offset, endrec):\n \"\"\"\n Read the ZIP64 end-of-archive records and use that to update endrec\n \"\"\"\n try:\n fpin.seek(offset - sizeEndCentDir64Locator, 2)\n except OSError:\n # If the seek fails, the file is not large enough to contain a ZIP64\n # end-of-archive record, so just return the end record we were given.\n return endrec\n\n data = fpin.read(sizeEndCentDir64Locator)\n if len(data) != sizeEndCentDir64Locator:\n return endrec\n sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)\n if sig != stringEndArchive64Locator:\n return endrec\n\n if diskno != 0 or disks != 1:\n raise BadZipFile(\"zipfiles that span multiple disks are not supported\")\n\n # Assume no 'zip64 extensible data'\n fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)\n data = fpin.read(sizeEndCentDir64)\n if len(data) != sizeEndCentDir64:\n return endrec\n sig, sz, create_version, read_version, disk_num, disk_dir, \\\n dircount, dircount2, dirsize, diroffset = \\\n struct.unpack(structEndArchive64, data)\n if sig != stringEndArchive64:\n return endrec\n\n # Update the original endrec using data from the ZIP64 record\n endrec[_ECD_SIGNATURE] = sig\n endrec[_ECD_DISK_NUMBER] = disk_num\n endrec[_ECD_DISK_START] = disk_dir\n endrec[_ECD_ENTRIES_THIS_DISK] = dircount\n endrec[_ECD_ENTRIES_TOTAL] = dircount2\n endrec[_ECD_SIZE] = dirsize\n endrec[_ECD_OFFSET] = diroffset\n return endrec","function_tokens":["def","_EndRecData64","(","fpin",",","offset",",","endrec",")",":","try",":","fpin",".","seek","(","offset","-","sizeEndCentDir64Locator",",","2",")","except","OSError",":","# If the seek fails, the file is not large enough to contain a ZIP64","# end-of-archive record, so just return the end record we were given.","return","endrec","data","=","fpin",".","read","(","sizeEndCentDir64Locator",")","if","len","(","data",")","!=","sizeEndCentDir64Locator",":","return","endrec","sig",",","diskno",",","reloff",",","disks","=","struct",".","unpack","(","structEndArchive64Locator",",","data",")","if","sig","!=","stringEndArchive64Locator",":","return","endrec","if","diskno","!=","0","or","disks","!=","1",":","raise","BadZipFile","(","\"zipfiles that span multiple disks are not supported\"",")","# Assume no 'zip64 extensible data'","fpin",".","seek","(","offset","-","sizeEndCentDir64Locator","-","sizeEndCentDir64",",","2",")","data","=","fpin",".","read","(","sizeEndCentDir64",")","if","len","(","data",")","!=","sizeEndCentDir64",":","return","endrec","sig",",","sz",",","create_version",",","read_version",",","disk_num",",","disk_dir",",","dircount",",","dircount2",",","dirsize",",","diroffset","=","struct",".","unpack","(","structEndArchive64",",","data",")","if","sig","!=","stringEndArchive64",":","return","endrec","# Update the original endrec using data from the ZIP64 record","endrec","[","_ECD_SIGNATURE","]","=","sig","endrec","[","_ECD_DISK_NUMBER","]","=","disk_num","endrec","[","_ECD_DISK_START","]","=","disk_dir","endrec","[","_ECD_ENTRIES_THIS_DISK","]","=","dircount","endrec","[","_ECD_ENTRIES_TOTAL","]","=","dircount2","endrec","[","_ECD_SIZE","]","=","dirsize","endrec","[","_ECD_OFFSET","]","=","diroffset","return","endrec"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L197-L237"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"_EndRecData","parameters":"(fpin)","argument_list":"","return_statement":"return None","docstring":"Return data from the \"End of Central Directory\" record, or None.\n\n The data is a list of the nine items in the ZIP \"End of central dir\"\n record followed by a tenth item, the file seek offset of this record.","docstring_summary":"Return data from the \"End of Central Directory\" record, or None.","docstring_tokens":["Return","data","from","the","End","of","Central","Directory","record","or","None","."],"function":"def _EndRecData(fpin):\n \"\"\"Return data from the \"End of Central Directory\" record, or None.\n\n The data is a list of the nine items in the ZIP \"End of central dir\"\n record followed by a tenth item, the file seek offset of this record.\"\"\"\n\n # Determine file size\n fpin.seek(0, 2)\n filesize = fpin.tell()\n\n # Check to see if this is ZIP file with no archive comment (the\n # \"end of central directory\" structure should be the last item in the\n # file if this is the case).\n try:\n fpin.seek(-sizeEndCentDir, 2)\n except OSError:\n return None\n data = fpin.read()\n if (len(data) == sizeEndCentDir and\n data[0:4] == stringEndArchive and\n data[-2:] == b\"\\000\\000\"):\n # the signature is correct and there's no comment, unpack structure\n endrec = struct.unpack(structEndArchive, data)\n endrec=list(endrec)\n\n # Append a blank comment and record start offset\n endrec.append(b\"\")\n endrec.append(filesize - sizeEndCentDir)\n\n # Try to read the \"Zip64 end of central directory\" structure\n return _EndRecData64(fpin, -sizeEndCentDir, endrec)\n\n # Either this is not a ZIP file, or it is a ZIP file with an archive\n # comment. Search the end of the file for the \"end of central directory\"\n # record signature. The comment is the last item in the ZIP file and may be\n # up to 64K long. It is assumed that the \"end of central directory\" magic\n # number does not appear in the comment.\n maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)\n fpin.seek(maxCommentStart, 0)\n data = fpin.read()\n start = data.rfind(stringEndArchive)\n if start >= 0:\n # found the magic number; attempt to unpack and interpret\n recData = data[start:start+sizeEndCentDir]\n if len(recData) != sizeEndCentDir:\n # Zip file is corrupted.\n return None\n endrec = list(struct.unpack(structEndArchive, recData))\n commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file\n comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]\n endrec.append(comment)\n endrec.append(maxCommentStart + start)\n\n # Try to read the \"Zip64 end of central directory\" structure\n return _EndRecData64(fpin, maxCommentStart + start - filesize,\n endrec)\n\n # Unable to find a valid end of central directory structure\n return None","function_tokens":["def","_EndRecData","(","fpin",")",":","# Determine file size","fpin",".","seek","(","0",",","2",")","filesize","=","fpin",".","tell","(",")","# Check to see if this is ZIP file with no archive comment (the","# \"end of central directory\" structure should be the last item in the","# file if this is the case).","try",":","fpin",".","seek","(","-","sizeEndCentDir",",","2",")","except","OSError",":","return","None","data","=","fpin",".","read","(",")","if","(","len","(","data",")","==","sizeEndCentDir","and","data","[","0",":","4","]","==","stringEndArchive","and","data","[","-","2",":","]","==","b\"\\000\\000\"",")",":","# the signature is correct and there's no comment, unpack structure","endrec","=","struct",".","unpack","(","structEndArchive",",","data",")","endrec","=","list","(","endrec",")","# Append a blank comment and record start offset","endrec",".","append","(","b\"\"",")","endrec",".","append","(","filesize","-","sizeEndCentDir",")","# Try to read the \"Zip64 end of central directory\" structure","return","_EndRecData64","(","fpin",",","-","sizeEndCentDir",",","endrec",")","# Either this is not a ZIP file, or it is a ZIP file with an archive","# comment. Search the end of the file for the \"end of central directory\"","# record signature. The comment is the last item in the ZIP file and may be","# up to 64K long. It is assumed that the \"end of central directory\" magic","# number does not appear in the comment.","maxCommentStart","=","max","(","filesize","-","(","1","<<","16",")","-","sizeEndCentDir",",","0",")","fpin",".","seek","(","maxCommentStart",",","0",")","data","=","fpin",".","read","(",")","start","=","data",".","rfind","(","stringEndArchive",")","if","start",">=","0",":","# found the magic number; attempt to unpack and interpret","recData","=","data","[","start",":","start","+","sizeEndCentDir","]","if","len","(","recData",")","!=","sizeEndCentDir",":","# Zip file is corrupted.","return","None","endrec","=","list","(","struct",".","unpack","(","structEndArchive",",","recData",")",")","commentSize","=","endrec","[","_ECD_COMMENT_SIZE","]","#as claimed by the zip file","comment","=","data","[","start","+","sizeEndCentDir",":","start","+","sizeEndCentDir","+","commentSize","]","endrec",".","append","(","comment",")","endrec",".","append","(","maxCommentStart","+","start",")","# Try to read the \"Zip64 end of central directory\" structure","return","_EndRecData64","(","fpin",",","maxCommentStart","+","start","-","filesize",",","endrec",")","# Unable to find a valid end of central directory structure","return","None"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L240-L298"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipInfo.FileHeader","parameters":"(self, zip64=None)","argument_list":"","return_statement":"return header + filename + extra","docstring":"Return the per-file header as a string.","docstring_summary":"Return the per-file header as a string.","docstring_tokens":["Return","the","per","-","file","header","as","a","string","."],"function":"def FileHeader(self, zip64=None):\n \"\"\"Return the per-file header as a string.\"\"\"\n dt = self.date_time\n dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]\n dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] \/\/ 2)\n if self.flag_bits & 0x08:\n # Set these to zero because we write them after the file data\n CRC = compress_size = file_size = 0\n else:\n CRC = self.CRC\n compress_size = self.compress_size\n file_size = self.file_size\n\n extra = self.extra\n\n min_version = 0\n if zip64 is None:\n zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT\n if zip64:\n fmt = '<HHQQ'\n extra = extra + struct.pack(fmt,\n 1, struct.calcsize(fmt)-4, file_size, compress_size)\n if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:\n if not zip64:\n raise LargeZipFile(\"Filesize would require ZIP64 extensions\")\n # File is larger than what fits into a 4 byte integer,\n # fall back to the ZIP64 extension\n file_size = 0xffffffff\n compress_size = 0xffffffff\n min_version = ZIP64_VERSION\n\n if self.compress_type == ZIP_BZIP2:\n min_version = max(BZIP2_VERSION, min_version)\n elif self.compress_type == ZIP_LZMA:\n min_version = max(LZMA_VERSION, min_version)\n\n self.extract_version = max(min_version, self.extract_version)\n self.create_version = max(min_version, self.create_version)\n filename, flag_bits = self._encodeFilenameFlags()\n header = struct.pack(structFileHeader, stringFileHeader,\n self.extract_version, self.reserved, flag_bits,\n self.compress_type, dostime, dosdate, CRC,\n compress_size, file_size,\n len(filename), len(extra))\n return header + filename + extra","function_tokens":["def","FileHeader","(","self",",","zip64","=","None",")",":","dt","=","self",".","date_time","dosdate","=","(","dt","[","0","]","-","1980",")","<<","9","|","dt","[","1","]","<<","5","|","dt","[","2","]","dostime","=","dt","[","3","]","<<","11","|","dt","[","4","]","<<","5","|","(","dt","[","5","]","\/\/","2",")","if","self",".","flag_bits","&","0x08",":","# Set these to zero because we write them after the file data","CRC","=","compress_size","=","file_size","=","0","else",":","CRC","=","self",".","CRC","compress_size","=","self",".","compress_size","file_size","=","self",".","file_size","extra","=","self",".","extra","min_version","=","0","if","zip64","is","None",":","zip64","=","file_size",">","ZIP64_LIMIT","or","compress_size",">","ZIP64_LIMIT","if","zip64",":","fmt","=","'<HHQQ'","extra","=","extra","+","struct",".","pack","(","fmt",",","1",",","struct",".","calcsize","(","fmt",")","-","4",",","file_size",",","compress_size",")","if","file_size",">","ZIP64_LIMIT","or","compress_size",">","ZIP64_LIMIT",":","if","not","zip64",":","raise","LargeZipFile","(","\"Filesize would require ZIP64 extensions\"",")","# File is larger than what fits into a 4 byte integer,","# fall back to the ZIP64 extension","file_size","=","0xffffffff","compress_size","=","0xffffffff","min_version","=","ZIP64_VERSION","if","self",".","compress_type","==","ZIP_BZIP2",":","min_version","=","max","(","BZIP2_VERSION",",","min_version",")","elif","self",".","compress_type","==","ZIP_LZMA",":","min_version","=","max","(","LZMA_VERSION",",","min_version",")","self",".","extract_version","=","max","(","min_version",",","self",".","extract_version",")","self",".","create_version","=","max","(","min_version",",","self",".","create_version",")","filename",",","flag_bits","=","self",".","_encodeFilenameFlags","(",")","header","=","struct",".","pack","(","structFileHeader",",","stringFileHeader",",","self",".","extract_version",",","self",".","reserved",",","flag_bits",",","self",".","compress_type",",","dostime",",","dosdate",",","CRC",",","compress_size",",","file_size",",","len","(","filename",")",",","len","(","extra",")",")","return","header","+","filename","+","extra"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L387-L431"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipInfo.from_file","parameters":"(cls, filename, arcname=None, date_time=DEFAULT_DATE)","argument_list":"","return_statement":"return zinfo","docstring":"Construct an appropriate ZipInfo for a file on the filesystem.\n\n filename should be the path to a file or directory on the filesystem.\n\n arcname is the name which it will have within the archive (by default,\n this will be the same as filename, but without a drive letter and with\n leading path separators removed).","docstring_summary":"Construct an appropriate ZipInfo for a file on the filesystem.","docstring_tokens":["Construct","an","appropriate","ZipInfo","for","a","file","on","the","filesystem","."],"function":"def from_file(cls, filename, arcname=None, date_time=DEFAULT_DATE):\n \"\"\"Construct an appropriate ZipInfo for a file on the filesystem.\n\n filename should be the path to a file or directory on the filesystem.\n\n arcname is the name which it will have within the archive (by default,\n this will be the same as filename, but without a drive letter and with\n leading path separators removed).\n \"\"\"\n if isinstance(filename, os.PathLike):\n filename = os.fspath(filename)\n st = os.stat(filename)\n isdir = stat.S_ISDIR(st.st_mode)\n # Create ZipInfo instance to store file information\n if arcname is None:\n arcname = filename\n arcname = os.path.normpath(os.path.splitdrive(arcname)[1])\n while arcname[0] in (os.sep, os.altsep):\n arcname = arcname[1:]\n if isdir:\n arcname += '\/'\n zinfo = cls(arcname, date_time)\n zinfo.external_attr = 25165824 # Unix attributes, hard-codeds\n if isdir:\n zinfo.file_size = 0\n zinfo.external_attr |= 0x10 # MS-DOS directory flag\n else:\n zinfo.file_size = st.st_size\n\n return zinfo","function_tokens":["def","from_file","(","cls",",","filename",",","arcname","=","None",",","date_time","=","DEFAULT_DATE",")",":","if","isinstance","(","filename",",","os",".","PathLike",")",":","filename","=","os",".","fspath","(","filename",")","st","=","os",".","stat","(","filename",")","isdir","=","stat",".","S_ISDIR","(","st",".","st_mode",")","# Create ZipInfo instance to store file information","if","arcname","is","None",":","arcname","=","filename","arcname","=","os",".","path",".","normpath","(","os",".","path",".","splitdrive","(","arcname",")","[","1","]",")","while","arcname","[","0","]","in","(","os",".","sep",",","os",".","altsep",")",":","arcname","=","arcname","[","1",":","]","if","isdir",":","arcname","+=","'\/'","zinfo","=","cls","(","arcname",",","date_time",")","zinfo",".","external_attr","=","25165824","# Unix attributes, hard-codeds","if","isdir",":","zinfo",".","file_size","=","0","zinfo",".","external_attr","|=","0x10","# MS-DOS directory flag","else",":","zinfo",".","file_size","=","st",".","st_size","return","zinfo"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L476-L505"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipInfo.is_dir","parameters":"(self)","argument_list":"","return_statement":"return self.filename[-1] == '\/'","docstring":"Return True if this archive member is a directory.","docstring_summary":"Return True if this archive member is a directory.","docstring_tokens":["Return","True","if","this","archive","member","is","a","directory","."],"function":"def is_dir(self):\n \"\"\"Return True if this archive member is a directory.\"\"\"\n return self.filename[-1] == '\/'","function_tokens":["def","is_dir","(","self",")",":","return","self",".","filename","[","-","1","]","==","'\/'"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L507-L509"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"_ZipDecrypter._GenerateCRCTable","parameters":"()","argument_list":"","return_statement":"return table","docstring":"Generate a CRC-32 table.\n\n ZIP encryption uses the CRC32 one-byte primitive for scrambling some\n internal keys. We noticed that a direct implementation is faster than\n relying on binascii.crc32().","docstring_summary":"Generate a CRC-32 table.","docstring_tokens":["Generate","a","CRC","-","32","table","."],"function":"def _GenerateCRCTable():\n \"\"\"Generate a CRC-32 table.\n\n ZIP encryption uses the CRC32 one-byte primitive for scrambling some\n internal keys. We noticed that a direct implementation is faster than\n relying on binascii.crc32().\n \"\"\"\n poly = 0xedb88320\n table = [0] * 256\n for i in range(256):\n crc = i\n for j in range(8):\n if crc & 1:\n crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly\n else:\n crc = ((crc >> 1) & 0x7FFFFFFF)\n table[i] = crc\n return table","function_tokens":["def","_GenerateCRCTable","(",")",":","poly","=","0xedb88320","table","=","[","0","]","*","256","for","i","in","range","(","256",")",":","crc","=","i","for","j","in","range","(","8",")",":","if","crc","&","1",":","crc","=","(","(","crc",">>","1",")","&","0x7FFFFFFF",")","^","poly","else",":","crc","=","(","(","crc",">>","1",")","&","0x7FFFFFFF",")","table","[","i","]","=","crc","return","table"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L525-L542"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"_ZipDecrypter._crc32","parameters":"(self, ch, crc)","argument_list":"","return_statement":"return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ch) & 0xff]","docstring":"Compute the CRC32 primitive on one byte.","docstring_summary":"Compute the CRC32 primitive on one byte.","docstring_tokens":["Compute","the","CRC32","primitive","on","one","byte","."],"function":"def _crc32(self, ch, crc):\n \"\"\"Compute the CRC32 primitive on one byte.\"\"\"\n return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ch) & 0xff]","function_tokens":["def","_crc32","(","self",",","ch",",","crc",")",":","return","(","(","crc",">>","8",")","&","0xffffff",")","^","self",".","crctable","[","(","crc","^","ch",")","&","0xff","]"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L545-L547"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"_ZipDecrypter.__call__","parameters":"(self, c)","argument_list":"","return_statement":"return c","docstring":"Decrypt a single character.","docstring_summary":"Decrypt a single character.","docstring_tokens":["Decrypt","a","single","character","."],"function":"def __call__(self, c):\n \"\"\"Decrypt a single character.\"\"\"\n assert isinstance(c, int)\n k = self.key2 | 2\n c = c ^ (((k * (k^1)) >> 8) & 255)\n self._UpdateKeys(c)\n return c","function_tokens":["def","__call__","(","self",",","c",")",":","assert","isinstance","(","c",",","int",")","k","=","self",".","key2","|","2","c","=","c","^","(","(","(","k","*","(","k","^","1",")",")",">>","8",")","&","255",")","self",".","_UpdateKeys","(","c",")","return","c"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L564-L570"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipExtFile.readline","parameters":"(self, limit=-1)","argument_list":"","return_statement":"return io.BufferedIOBase.readline(self, limit)","docstring":"Read and return a line from the stream.\n\n If limit is specified, at most limit bytes will be read.","docstring_summary":"Read and return a line from the stream.","docstring_tokens":["Read","and","return","a","line","from","the","stream","."],"function":"def readline(self, limit=-1):\n \"\"\"Read and return a line from the stream.\n\n If limit is specified, at most limit bytes will be read.\n \"\"\"\n\n if limit < 0:\n # Shortcut common case - newline found in buffer.\n i = self._readbuffer.find(b'\\n', self._offset) + 1\n if i > 0:\n line = self._readbuffer[self._offset: i]\n self._offset = i\n return line\n\n return io.BufferedIOBase.readline(self, limit)","function_tokens":["def","readline","(","self",",","limit","=","-","1",")",":","if","limit","<","0",":","# Shortcut common case - newline found in buffer.","i","=","self",".","_readbuffer",".","find","(","b'\\n'",",","self",".","_offset",")","+","1","if","i",">","0",":","line","=","self",".","_readbuffer","[","self",".","_offset",":","i","]","self",".","_offset","=","i","return","line","return","io",".","BufferedIOBase",".","readline","(","self",",","limit",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L795-L809"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipExtFile.peek","parameters":"(self, n=1)","argument_list":"","return_statement":"return self._readbuffer[self._offset: self._offset + 512]","docstring":"Returns buffered bytes without advancing the position.","docstring_summary":"Returns buffered bytes without advancing the position.","docstring_tokens":["Returns","buffered","bytes","without","advancing","the","position","."],"function":"def peek(self, n=1):\n \"\"\"Returns buffered bytes without advancing the position.\"\"\"\n if n > len(self._readbuffer) - self._offset:\n chunk = self.read(n)\n if len(chunk) > self._offset:\n self._readbuffer = chunk + self._readbuffer[self._offset:]\n self._offset = 0\n else:\n self._offset -= len(chunk)\n\n # Return up to 512 bytes to reduce allocation overhead for tight loops.\n return self._readbuffer[self._offset: self._offset + 512]","function_tokens":["def","peek","(","self",",","n","=","1",")",":","if","n",">","len","(","self",".","_readbuffer",")","-","self",".","_offset",":","chunk","=","self",".","read","(","n",")","if","len","(","chunk",")",">","self",".","_offset",":","self",".","_readbuffer","=","chunk","+","self",".","_readbuffer","[","self",".","_offset",":","]","self",".","_offset","=","0","else",":","self",".","_offset","-=","len","(","chunk",")","# Return up to 512 bytes to reduce allocation overhead for tight loops.","return","self",".","_readbuffer","[","self",".","_offset",":","self",".","_offset","+","512","]"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L811-L822"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipExtFile.read","parameters":"(self, n=-1)","argument_list":"","return_statement":"return buf","docstring":"Read and return up to n bytes.\n If the argument is omitted, None, or negative, data is read and returned until EOF is reached..","docstring_summary":"Read and return up to n bytes.\n If the argument is omitted, None, or negative, data is read and returned until EOF is reached..","docstring_tokens":["Read","and","return","up","to","n","bytes",".","If","the","argument","is","omitted","None","or","negative","data","is","read","and","returned","until","EOF","is","reached",".."],"function":"def read(self, n=-1):\n \"\"\"Read and return up to n bytes.\n If the argument is omitted, None, or negative, data is read and returned until EOF is reached..\n \"\"\"\n if n is None or n < 0:\n buf = self._readbuffer[self._offset:]\n self._readbuffer = b''\n self._offset = 0\n while not self._eof:\n buf += self._read1(self.MAX_N)\n return buf\n\n end = n + self._offset\n if end < len(self._readbuffer):\n buf = self._readbuffer[self._offset:end]\n self._offset = end\n return buf\n\n n = end - len(self._readbuffer)\n buf = self._readbuffer[self._offset:]\n self._readbuffer = b''\n self._offset = 0\n while n > 0 and not self._eof:\n data = self._read1(n)\n if n < len(data):\n self._readbuffer = data\n self._offset = n\n buf += data[:n]\n break\n buf += data\n n -= len(data)\n return buf","function_tokens":["def","read","(","self",",","n","=","-","1",")",":","if","n","is","None","or","n","<","0",":","buf","=","self",".","_readbuffer","[","self",".","_offset",":","]","self",".","_readbuffer","=","b''","self",".","_offset","=","0","while","not","self",".","_eof",":","buf","+=","self",".","_read1","(","self",".","MAX_N",")","return","buf","end","=","n","+","self",".","_offset","if","end","<","len","(","self",".","_readbuffer",")",":","buf","=","self",".","_readbuffer","[","self",".","_offset",":","end","]","self",".","_offset","=","end","return","buf","n","=","end","-","len","(","self",".","_readbuffer",")","buf","=","self",".","_readbuffer","[","self",".","_offset",":","]","self",".","_readbuffer","=","b''","self",".","_offset","=","0","while","n",">","0","and","not","self",".","_eof",":","data","=","self",".","_read1","(","n",")","if","n","<","len","(","data",")",":","self",".","_readbuffer","=","data","self",".","_offset","=","n","buf","+=","data","[",":","n","]","break","buf","+=","data","n","-=","len","(","data",")","return","buf"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L827-L858"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipExtFile.read1","parameters":"(self, n)","argument_list":"","return_statement":"return buf","docstring":"Read up to n bytes with at most one read() system call.","docstring_summary":"Read up to n bytes with at most one read() system call.","docstring_tokens":["Read","up","to","n","bytes","with","at","most","one","read","()","system","call","."],"function":"def read1(self, n):\n \"\"\"Read up to n bytes with at most one read() system call.\"\"\"\n\n if n is None or n < 0:\n buf = self._readbuffer[self._offset:]\n self._readbuffer = b''\n self._offset = 0\n while not self._eof:\n data = self._read1(self.MAX_N)\n if data:\n buf += data\n break\n return buf\n\n end = n + self._offset\n if end < len(self._readbuffer):\n buf = self._readbuffer[self._offset:end]\n self._offset = end\n return buf\n\n n = end - len(self._readbuffer)\n buf = self._readbuffer[self._offset:]\n self._readbuffer = b''\n self._offset = 0\n if n > 0:\n while not self._eof:\n data = self._read1(n)\n if n < len(data):\n self._readbuffer = data\n self._offset = n\n buf += data[:n]\n break\n if data:\n buf += data\n break\n return buf","function_tokens":["def","read1","(","self",",","n",")",":","if","n","is","None","or","n","<","0",":","buf","=","self",".","_readbuffer","[","self",".","_offset",":","]","self",".","_readbuffer","=","b''","self",".","_offset","=","0","while","not","self",".","_eof",":","data","=","self",".","_read1","(","self",".","MAX_N",")","if","data",":","buf","+=","data","break","return","buf","end","=","n","+","self",".","_offset","if","end","<","len","(","self",".","_readbuffer",")",":","buf","=","self",".","_readbuffer","[","self",".","_offset",":","end","]","self",".","_offset","=","end","return","buf","n","=","end","-","len","(","self",".","_readbuffer",")","buf","=","self",".","_readbuffer","[","self",".","_offset",":","]","self",".","_readbuffer","=","b''","self",".","_offset","=","0","if","n",">","0",":","while","not","self",".","_eof",":","data","=","self",".","_read1","(","n",")","if","n","<","len","(","data",")",":","self",".","_readbuffer","=","data","self",".","_offset","=","n","buf","+=","data","[",":","n","]","break","if","data",":","buf","+=","data","break","return","buf"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L870-L905"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.__init__","parameters":"(self, file, mode=\"r\", compression=ZIP_STORED, allowZip64=True)","argument_list":"","return_statement":"","docstring":"Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',\n or append 'a'.","docstring_summary":"Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',\n or append 'a'.","docstring_tokens":["Open","the","ZIP","file","with","mode","read","r","write","w","exclusive","create","x","or","append","a","."],"function":"def __init__(self, file, mode=\"r\", compression=ZIP_STORED, allowZip64=True):\n \"\"\"Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',\n or append 'a'.\"\"\"\n if mode not in ('r', 'w', 'x', 'a'):\n raise ValueError(\"ZipFile requires mode 'r', 'w', 'x', or 'a'\")\n\n _check_compression(compression)\n\n self._allowZip64 = allowZip64\n self._didModify = False\n self.debug = 0 # Level of printing: 0 through 3\n self.NameToInfo = {} # Find file info given name\n self.filelist = [] # List of ZipInfo instances for archive\n self.compression = compression # Method of compression\n self.mode = mode\n self.pwd = None\n self._comment = b''\n\n # Check if we were passed a file-like object\n if isinstance(file, os.PathLike):\n file = os.fspath(file)\n if isinstance(file, str):\n # No, it's a filename\n self._filePassed = 0\n self.filename = file\n modeDict = {'r' : 'rb', 'w': 'w+b', 'x': 'x+b', 'a' : 'r+b',\n 'r+b': 'w+b', 'w+b': 'wb', 'x+b': 'xb'}\n filemode = modeDict[mode]\n while True:\n try:\n self.fp = io.open(file, filemode)\n except OSError:\n if filemode in modeDict:\n filemode = modeDict[filemode]\n continue\n raise\n break\n else:\n self._filePassed = 1\n self.fp = file\n self.filename = getattr(file, 'name', None)\n self._fileRefCnt = 1\n self._lock = threading.RLock()\n self._seekable = True\n self._writing = False\n\n try:\n if mode == 'r':\n self._RealGetContents()\n elif mode in ('w', 'x'):\n # set the modified flag so central directory gets written\n # even if no files are added to the archive\n self._didModify = True\n try:\n self.start_dir = self.fp.tell()\n except (AttributeError, OSError):\n self.fp = _Tellable(self.fp)\n self.start_dir = 0\n self._seekable = False\n else:\n # Some file-like objects can provide tell() but not seek()\n try:\n self.fp.seek(self.start_dir)\n except (AttributeError, OSError):\n self._seekable = False\n elif mode == 'a':\n try:\n # See if file is a zip file\n self._RealGetContents()\n # seek to start of directory and overwrite\n self.fp.seek(self.start_dir)\n except BadZipFile:\n # file is not a zip file, just append\n self.fp.seek(0, 2)\n\n # set the modified flag so central directory gets written\n # even if no files are added to the archive\n self._didModify = True\n self.start_dir = self.fp.tell()\n else:\n raise ValueError(\"Mode must be 'r', 'w', 'x', or 'a'\")\n except:\n fp = self.fp\n self.fp = None\n self._fpclose(fp)\n raise","function_tokens":["def","__init__","(","self",",","file",",","mode","=","\"r\"",",","compression","=","ZIP_STORED",",","allowZip64","=","True",")",":","if","mode","not","in","(","'r'",",","'w'",",","'x'",",","'a'",")",":","raise","ValueError","(","\"ZipFile requires mode 'r', 'w', 'x', or 'a'\"",")","_check_compression","(","compression",")","self",".","_allowZip64","=","allowZip64","self",".","_didModify","=","False","self",".","debug","=","0","# Level of printing: 0 through 3","self",".","NameToInfo","=","{","}","# Find file info given name","self",".","filelist","=","[","]","# List of ZipInfo instances for archive","self",".","compression","=","compression","# Method of compression","self",".","mode","=","mode","self",".","pwd","=","None","self",".","_comment","=","b''","# Check if we were passed a file-like object","if","isinstance","(","file",",","os",".","PathLike",")",":","file","=","os",".","fspath","(","file",")","if","isinstance","(","file",",","str",")",":","# No, it's a filename","self",".","_filePassed","=","0","self",".","filename","=","file","modeDict","=","{","'r'",":","'rb'",",","'w'",":","'w+b'",",","'x'",":","'x+b'",",","'a'",":","'r+b'",",","'r+b'",":","'w+b'",",","'w+b'",":","'wb'",",","'x+b'",":","'xb'","}","filemode","=","modeDict","[","mode","]","while","True",":","try",":","self",".","fp","=","io",".","open","(","file",",","filemode",")","except","OSError",":","if","filemode","in","modeDict",":","filemode","=","modeDict","[","filemode","]","continue","raise","break","else",":","self",".","_filePassed","=","1","self",".","fp","=","file","self",".","filename","=","getattr","(","file",",","'name'",",","None",")","self",".","_fileRefCnt","=","1","self",".","_lock","=","threading",".","RLock","(",")","self",".","_seekable","=","True","self",".","_writing","=","False","try",":","if","mode","==","'r'",":","self",".","_RealGetContents","(",")","elif","mode","in","(","'w'",",","'x'",")",":","# set the modified flag so central directory gets written","# even if no files are added to the archive","self",".","_didModify","=","True","try",":","self",".","start_dir","=","self",".","fp",".","tell","(",")","except","(","AttributeError",",","OSError",")",":","self",".","fp","=","_Tellable","(","self",".","fp",")","self",".","start_dir","=","0","self",".","_seekable","=","False","else",":","# Some file-like objects can provide tell() but not seek()","try",":","self",".","fp",".","seek","(","self",".","start_dir",")","except","(","AttributeError",",","OSError",")",":","self",".","_seekable","=","False","elif","mode","==","'a'",":","try",":","# See if file is a zip file","self",".","_RealGetContents","(",")","# seek to start of directory and overwrite","self",".","fp",".","seek","(","self",".","start_dir",")","except","BadZipFile",":","# file is not a zip file, just append","self",".","fp",".","seek","(","0",",","2",")","# set the modified flag so central directory gets written","# even if no files are added to the archive","self",".","_didModify","=","True","self",".","start_dir","=","self",".","fp",".","tell","(",")","else",":","raise","ValueError","(","\"Mode must be 'r', 'w', 'x', or 'a'\"",")","except",":","fp","=","self",".","fp","self",".","fp","=","None","self",".","_fpclose","(","fp",")","raise"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1061-L1146"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile._RealGetContents","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Read in the table of contents for the ZIP file.","docstring_summary":"Read in the table of contents for the ZIP file.","docstring_tokens":["Read","in","the","table","of","contents","for","the","ZIP","file","."],"function":"def _RealGetContents(self):\n \"\"\"Read in the table of contents for the ZIP file.\"\"\"\n fp = self.fp\n try:\n endrec = _EndRecData(fp)\n except OSError:\n raise BadZipFile(\"File is not a zip file\")\n if not endrec:\n raise BadZipFile(\"File is not a zip file\")\n if self.debug > 1:\n print(endrec)\n size_cd = endrec[_ECD_SIZE] # bytes in central directory\n offset_cd = endrec[_ECD_OFFSET] # offset of central directory\n self._comment = endrec[_ECD_COMMENT] # archive comment\n\n # \"concat\" is zero, unless zip was concatenated to another file\n concat = endrec[_ECD_LOCATION] - size_cd - offset_cd\n if endrec[_ECD_SIGNATURE] == stringEndArchive64:\n # If Zip64 extension structures are present, account for them\n concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)\n\n if self.debug > 2:\n inferred = concat + offset_cd\n print(\"given, inferred, offset\", offset_cd, inferred, concat)\n # self.start_dir: Position of start of central directory\n self.start_dir = offset_cd + concat\n fp.seek(self.start_dir, 0)\n data = fp.read(size_cd)\n fp = io.BytesIO(data)\n total = 0\n while total < size_cd:\n centdir = fp.read(sizeCentralDir)\n if len(centdir) != sizeCentralDir:\n raise BadZipFile(\"Truncated central directory\")\n centdir = struct.unpack(structCentralDir, centdir)\n if centdir[_CD_SIGNATURE] != stringCentralDir:\n raise BadZipFile(\"Bad magic number for central directory\")\n if self.debug > 2:\n print(centdir)\n filename = fp.read(centdir[_CD_FILENAME_LENGTH])\n flags = centdir[5]\n if flags & 0x800:\n # UTF-8 file names extension\n filename = filename.decode('utf-8')\n else:\n # Historical ZIP filename encoding\n filename = filename.decode('cp437')\n # Create ZipInfo instance to store file information\n x = ZipInfo(filename)\n x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])\n x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])\n x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]\n (x.create_version, x.create_system, x.extract_version, x.reserved,\n x.flag_bits, x.compress_type, t, d,\n x.CRC, x.compress_size, x.file_size) = centdir[1:12]\n if x.extract_version > MAX_EXTRACT_VERSION:\n raise NotImplementedError(\"zip file version %.1f\" %\n (x.extract_version \/ 10))\n x.volume, x.internal_attr, x.external_attr = centdir[15:18]\n # Convert date\/time code to (year, month, day, hour, min, sec)\n x._raw_time = t\n x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,\n t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )\n\n x._decodeExtra()\n x.header_offset = x.header_offset + concat\n self.filelist.append(x)\n self.NameToInfo[x.filename] = x\n\n # update total bytes read from central directory\n total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]\n + centdir[_CD_EXTRA_FIELD_LENGTH]\n + centdir[_CD_COMMENT_LENGTH])\n\n if self.debug > 2:\n print(\"total\", total)","function_tokens":["def","_RealGetContents","(","self",")",":","fp","=","self",".","fp","try",":","endrec","=","_EndRecData","(","fp",")","except","OSError",":","raise","BadZipFile","(","\"File is not a zip file\"",")","if","not","endrec",":","raise","BadZipFile","(","\"File is not a zip file\"",")","if","self",".","debug",">","1",":","print","(","endrec",")","size_cd","=","endrec","[","_ECD_SIZE","]","# bytes in central directory","offset_cd","=","endrec","[","_ECD_OFFSET","]","# offset of central directory","self",".","_comment","=","endrec","[","_ECD_COMMENT","]","# archive comment","# \"concat\" is zero, unless zip was concatenated to another file","concat","=","endrec","[","_ECD_LOCATION","]","-","size_cd","-","offset_cd","if","endrec","[","_ECD_SIGNATURE","]","==","stringEndArchive64",":","# If Zip64 extension structures are present, account for them","concat","-=","(","sizeEndCentDir64","+","sizeEndCentDir64Locator",")","if","self",".","debug",">","2",":","inferred","=","concat","+","offset_cd","print","(","\"given, inferred, offset\"",",","offset_cd",",","inferred",",","concat",")","# self.start_dir: Position of start of central directory","self",".","start_dir","=","offset_cd","+","concat","fp",".","seek","(","self",".","start_dir",",","0",")","data","=","fp",".","read","(","size_cd",")","fp","=","io",".","BytesIO","(","data",")","total","=","0","while","total","<","size_cd",":","centdir","=","fp",".","read","(","sizeCentralDir",")","if","len","(","centdir",")","!=","sizeCentralDir",":","raise","BadZipFile","(","\"Truncated central directory\"",")","centdir","=","struct",".","unpack","(","structCentralDir",",","centdir",")","if","centdir","[","_CD_SIGNATURE","]","!=","stringCentralDir",":","raise","BadZipFile","(","\"Bad magic number for central directory\"",")","if","self",".","debug",">","2",":","print","(","centdir",")","filename","=","fp",".","read","(","centdir","[","_CD_FILENAME_LENGTH","]",")","flags","=","centdir","[","5","]","if","flags","&","0x800",":","# UTF-8 file names extension","filename","=","filename",".","decode","(","'utf-8'",")","else",":","# Historical ZIP filename encoding","filename","=","filename",".","decode","(","'cp437'",")","# Create ZipInfo instance to store file information","x","=","ZipInfo","(","filename",")","x",".","extra","=","fp",".","read","(","centdir","[","_CD_EXTRA_FIELD_LENGTH","]",")","x",".","comment","=","fp",".","read","(","centdir","[","_CD_COMMENT_LENGTH","]",")","x",".","header_offset","=","centdir","[","_CD_LOCAL_HEADER_OFFSET","]","(","x",".","create_version",",","x",".","create_system",",","x",".","extract_version",",","x",".","reserved",",","x",".","flag_bits",",","x",".","compress_type",",","t",",","d",",","x",".","CRC",",","x",".","compress_size",",","x",".","file_size",")","=","centdir","[","1",":","12","]","if","x",".","extract_version",">","MAX_EXTRACT_VERSION",":","raise","NotImplementedError","(","\"zip file version %.1f\"","%","(","x",".","extract_version","\/","10",")",")","x",".","volume",",","x",".","internal_attr",",","x",".","external_attr","=","centdir","[","15",":","18","]","# Convert date\/time code to (year, month, day, hour, min, sec)","x",".","_raw_time","=","t","x",".","date_time","=","(","(","d",">>","9",")","+","1980",",","(","d",">>","5",")","&","0xF",",","d","&","0x1F",",","t",">>","11",",","(","t",">>","5",")","&","0x3F",",","(","t","&","0x1F",")","*","2",")","x",".","_decodeExtra","(",")","x",".","header_offset","=","x",".","header_offset","+","concat","self",".","filelist",".","append","(","x",")","self",".","NameToInfo","[","x",".","filename","]","=","x","# update total bytes read from central directory","total","=","(","total","+","sizeCentralDir","+","centdir","[","_CD_FILENAME_LENGTH","]","+","centdir","[","_CD_EXTRA_FIELD_LENGTH","]","+","centdir","[","_CD_COMMENT_LENGTH","]",")","if","self",".","debug",">","2",":","print","(","\"total\"",",","total",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1168-L1243"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.namelist","parameters":"(self)","argument_list":"","return_statement":"return [data.filename for data in self.filelist]","docstring":"Return a list of file names in the archive.","docstring_summary":"Return a list of file names in the archive.","docstring_tokens":["Return","a","list","of","file","names","in","the","archive","."],"function":"def namelist(self):\n \"\"\"Return a list of file names in the archive.\"\"\"\n return [data.filename for data in self.filelist]","function_tokens":["def","namelist","(","self",")",":","return","[","data",".","filename","for","data","in","self",".","filelist","]"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1246-L1248"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.infolist","parameters":"(self)","argument_list":"","return_statement":"return self.filelist","docstring":"Return a list of class ZipInfo instances for files in the\n archive.","docstring_summary":"Return a list of class ZipInfo instances for files in the\n archive.","docstring_tokens":["Return","a","list","of","class","ZipInfo","instances","for","files","in","the","archive","."],"function":"def infolist(self):\n \"\"\"Return a list of class ZipInfo instances for files in the\n archive.\"\"\"\n return self.filelist","function_tokens":["def","infolist","(","self",")",":","return","self",".","filelist"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1250-L1253"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.printdir","parameters":"(self, file=None)","argument_list":"","return_statement":"","docstring":"Print a table of contents for the zip file.","docstring_summary":"Print a table of contents for the zip file.","docstring_tokens":["Print","a","table","of","contents","for","the","zip","file","."],"function":"def printdir(self, file=None):\n \"\"\"Print a table of contents for the zip file.\"\"\"\n print(\"%-46s %19s %12s\" % (\"File Name\", \"Modified \", \"Size\"),\n file=file)\n for zinfo in self.filelist:\n date = \"%d-%02d-%02d %02d:%02d:%02d\" % zinfo.date_time[:6]\n print(\"%-46s %s %12d\" % (zinfo.filename, date, zinfo.file_size),\n file=file)","function_tokens":["def","printdir","(","self",",","file","=","None",")",":","print","(","\"%-46s %19s %12s\"","%","(","\"File Name\"",",","\"Modified \"",",","\"Size\"",")",",","file","=","file",")","for","zinfo","in","self",".","filelist",":","date","=","\"%d-%02d-%02d %02d:%02d:%02d\"","%","zinfo",".","date_time","[",":","6","]","print","(","\"%-46s %s %12d\"","%","(","zinfo",".","filename",",","date",",","zinfo",".","file_size",")",",","file","=","file",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1255-L1262"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.testzip","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Read all the files and check the CRC.","docstring_summary":"Read all the files and check the CRC.","docstring_tokens":["Read","all","the","files","and","check","the","CRC","."],"function":"def testzip(self):\n \"\"\"Read all the files and check the CRC.\"\"\"\n chunk_size = 2 ** 20\n for zinfo in self.filelist:\n try:\n # Read by chunks, to avoid an OverflowError or a\n # MemoryError with very large embedded files.\n with self.open(zinfo.filename, \"r\") as f:\n while f.read(chunk_size): # Check CRC-32\n pass\n except BadZipFile:\n return zinfo.filename","function_tokens":["def","testzip","(","self",")",":","chunk_size","=","2","**","20","for","zinfo","in","self",".","filelist",":","try",":","# Read by chunks, to avoid an OverflowError or a","# MemoryError with very large embedded files.","with","self",".","open","(","zinfo",".","filename",",","\"r\"",")","as","f",":","while","f",".","read","(","chunk_size",")",":","# Check CRC-32","pass","except","BadZipFile",":","return","zinfo",".","filename"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1264-L1275"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.getinfo","parameters":"(self, name)","argument_list":"","return_statement":"return info","docstring":"Return the instance of ZipInfo given 'name'.","docstring_summary":"Return the instance of ZipInfo given 'name'.","docstring_tokens":["Return","the","instance","of","ZipInfo","given","name","."],"function":"def getinfo(self, name):\n \"\"\"Return the instance of ZipInfo given 'name'.\"\"\"\n info = self.NameToInfo.get(name)\n if info is None:\n raise KeyError(\n 'There is no item named %r in the archive' % name)\n\n return info","function_tokens":["def","getinfo","(","self",",","name",")",":","info","=","self",".","NameToInfo",".","get","(","name",")","if","info","is","None",":","raise","KeyError","(","'There is no item named %r in the archive'","%","name",")","return","info"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1277-L1284"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.setpassword","parameters":"(self, pwd)","argument_list":"","return_statement":"","docstring":"Set default password for encrypted files.","docstring_summary":"Set default password for encrypted files.","docstring_tokens":["Set","default","password","for","encrypted","files","."],"function":"def setpassword(self, pwd):\n \"\"\"Set default password for encrypted files.\"\"\"\n if pwd and not isinstance(pwd, bytes):\n raise TypeError(\"pwd: expected bytes, got %s\" % type(pwd).__name__)\n if pwd:\n self.pwd = pwd\n else:\n self.pwd = None","function_tokens":["def","setpassword","(","self",",","pwd",")",":","if","pwd","and","not","isinstance","(","pwd",",","bytes",")",":","raise","TypeError","(","\"pwd: expected bytes, got %s\"","%","type","(","pwd",")",".","__name__",")","if","pwd",":","self",".","pwd","=","pwd","else",":","self",".","pwd","=","None"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1286-L1293"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.comment","parameters":"(self)","argument_list":"","return_statement":"return self._comment","docstring":"The comment text associated with the ZIP file.","docstring_summary":"The comment text associated with the ZIP file.","docstring_tokens":["The","comment","text","associated","with","the","ZIP","file","."],"function":"def comment(self):\n \"\"\"The comment text associated with the ZIP file.\"\"\"\n return self._comment","function_tokens":["def","comment","(","self",")",":","return","self",".","_comment"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1296-L1298"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.read","parameters":"(self, name, pwd=None)","argument_list":"","return_statement":"","docstring":"Return file bytes (as a string) for name.","docstring_summary":"Return file bytes (as a string) for name.","docstring_tokens":["Return","file","bytes","(","as","a","string",")","for","name","."],"function":"def read(self, name, pwd=None):\n \"\"\"Return file bytes (as a string) for name.\"\"\"\n with self.open(name, \"r\", pwd) as fp:\n return fp.read()","function_tokens":["def","read","(","self",",","name",",","pwd","=","None",")",":","with","self",".","open","(","name",",","\"r\"",",","pwd",")","as","fp",":","return","fp",".","read","(",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1313-L1316"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.open","parameters":"(self, name, mode=\"r\", pwd=None, *, force_zip64=False)","argument_list":"","return_statement":"","docstring":"Return file-like object for 'name'.\n\n name is a string for the file name within the ZIP file, or a ZipInfo\n object.\n\n mode should be 'r' to read a file already in the ZIP file, or 'w' to\n write to a file newly added to the archive.\n\n pwd is the password to decrypt files (only used for reading).\n\n When writing, if the file size is not known in advance but may exceed\n 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large\n files. If the size is known in advance, it is best to pass a ZipInfo\n instance for name, with zinfo.file_size set.","docstring_summary":"Return file-like object for 'name'.","docstring_tokens":["Return","file","-","like","object","for","name","."],"function":"def open(self, name, mode=\"r\", pwd=None, *, force_zip64=False):\n \"\"\"Return file-like object for 'name'.\n\n name is a string for the file name within the ZIP file, or a ZipInfo\n object.\n\n mode should be 'r' to read a file already in the ZIP file, or 'w' to\n write to a file newly added to the archive.\n\n pwd is the password to decrypt files (only used for reading).\n\n When writing, if the file size is not known in advance but may exceed\n 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large\n files. If the size is known in advance, it is best to pass a ZipInfo\n instance for name, with zinfo.file_size set.\n \"\"\"\n if mode not in {\"r\", \"w\"}:\n raise ValueError('open() requires mode \"r\" or \"w\"')\n if pwd and not isinstance(pwd, bytes):\n raise TypeError(\"pwd: expected bytes, got %s\" % type(pwd).__name__)\n if pwd and (mode == \"w\"):\n raise ValueError(\"pwd is only supported for reading files\")\n if not self.fp:\n raise ValueError(\n \"Attempt to use ZIP archive that was already closed\")\n\n # Make sure we have an info object\n if isinstance(name, ZipInfo):\n # 'name' is already an info object\n zinfo = name\n elif mode == 'w':\n zinfo = ZipInfo(name)\n zinfo.compress_type = self.compression\n else:\n # Get info object for name\n zinfo = self.getinfo(name)\n\n if mode == 'w':\n return self._open_to_write(zinfo, force_zip64=force_zip64)\n\n if self._writing:\n raise ValueError(\"Can't read from the ZIP file while there \"\n \"is an open writing handle on it. \"\n \"Close the writing handle before trying to read.\")\n\n # Open for reading:\n self._fileRefCnt += 1\n zef_file = _SharedFile(self.fp, zinfo.header_offset,\n self._fpclose, self._lock, lambda: self._writing)\n try:\n # Skip the file header:\n fheader = zef_file.read(sizeFileHeader)\n if len(fheader) != sizeFileHeader:\n raise BadZipFile(\"Truncated file header\")\n fheader = struct.unpack(structFileHeader, fheader)\n if fheader[_FH_SIGNATURE] != stringFileHeader:\n raise BadZipFile(\"Bad magic number for file header\")\n\n fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])\n if fheader[_FH_EXTRA_FIELD_LENGTH]:\n zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])\n\n if zinfo.flag_bits & 0x20:\n # Zip 2.7: compressed patched data\n raise NotImplementedError(\"compressed patched data (flag bit 5)\")\n\n if zinfo.flag_bits & 0x40:\n # strong encryption\n raise NotImplementedError(\"strong encryption (flag bit 6)\")\n\n if zinfo.flag_bits & 0x800:\n # UTF-8 filename\n fname_str = fname.decode(\"utf-8\")\n else:\n fname_str = fname.decode(\"cp437\")\n\n if fname_str != zinfo.orig_filename:\n raise BadZipFile(\n 'File name in directory %r and header %r differ.'\n % (zinfo.orig_filename, fname))\n\n # check for encrypted flag & handle password\n is_encrypted = zinfo.flag_bits & 0x1\n zd = None\n if is_encrypted:\n if not pwd:\n pwd = self.pwd\n if not pwd:\n raise RuntimeError(\"File %r is encrypted, password \"\n \"required for extraction\" % name)\n\n zd = _ZipDecrypter(pwd)\n # The first 12 bytes in the cypher stream is an encryption header\n # used to strengthen the algorithm. The first 11 bytes are\n # completely random, while the 12th contains the MSB of the CRC,\n # or the MSB of the file time depending on the header type\n # and is used to check the correctness of the password.\n header = zef_file.read(12)\n h = list(map(zd, header[0:12]))\n if zinfo.flag_bits & 0x8:\n # compare against the file type from extended local headers\n check_byte = (zinfo._raw_time >> 8) & 0xff\n else:\n # compare against the CRC otherwise\n check_byte = (zinfo.CRC >> 24) & 0xff\n if h[11] != check_byte:\n raise RuntimeError(\"Bad password for file %r\" % name)\n\n return ZipExtFile(zef_file, mode, zinfo, zd, True)\n except:\n zef_file.close()\n raise","function_tokens":["def","open","(","self",",","name",",","mode","=","\"r\"",",","pwd","=","None",",","*",",","force_zip64","=","False",")",":","if","mode","not","in","{","\"r\"",",","\"w\"","}",":","raise","ValueError","(","'open() requires mode \"r\" or \"w\"'",")","if","pwd","and","not","isinstance","(","pwd",",","bytes",")",":","raise","TypeError","(","\"pwd: expected bytes, got %s\"","%","type","(","pwd",")",".","__name__",")","if","pwd","and","(","mode","==","\"w\"",")",":","raise","ValueError","(","\"pwd is only supported for reading files\"",")","if","not","self",".","fp",":","raise","ValueError","(","\"Attempt to use ZIP archive that was already closed\"",")","# Make sure we have an info object","if","isinstance","(","name",",","ZipInfo",")",":","# 'name' is already an info object","zinfo","=","name","elif","mode","==","'w'",":","zinfo","=","ZipInfo","(","name",")","zinfo",".","compress_type","=","self",".","compression","else",":","# Get info object for name","zinfo","=","self",".","getinfo","(","name",")","if","mode","==","'w'",":","return","self",".","_open_to_write","(","zinfo",",","force_zip64","=","force_zip64",")","if","self",".","_writing",":","raise","ValueError","(","\"Can't read from the ZIP file while there \"","\"is an open writing handle on it. \"","\"Close the writing handle before trying to read.\"",")","# Open for reading:","self",".","_fileRefCnt","+=","1","zef_file","=","_SharedFile","(","self",".","fp",",","zinfo",".","header_offset",",","self",".","_fpclose",",","self",".","_lock",",","lambda",":","self",".","_writing",")","try",":","# Skip the file header:","fheader","=","zef_file",".","read","(","sizeFileHeader",")","if","len","(","fheader",")","!=","sizeFileHeader",":","raise","BadZipFile","(","\"Truncated file header\"",")","fheader","=","struct",".","unpack","(","structFileHeader",",","fheader",")","if","fheader","[","_FH_SIGNATURE","]","!=","stringFileHeader",":","raise","BadZipFile","(","\"Bad magic number for file header\"",")","fname","=","zef_file",".","read","(","fheader","[","_FH_FILENAME_LENGTH","]",")","if","fheader","[","_FH_EXTRA_FIELD_LENGTH","]",":","zef_file",".","read","(","fheader","[","_FH_EXTRA_FIELD_LENGTH","]",")","if","zinfo",".","flag_bits","&","0x20",":","# Zip 2.7: compressed patched data","raise","NotImplementedError","(","\"compressed patched data (flag bit 5)\"",")","if","zinfo",".","flag_bits","&","0x40",":","# strong encryption","raise","NotImplementedError","(","\"strong encryption (flag bit 6)\"",")","if","zinfo",".","flag_bits","&","0x800",":","# UTF-8 filename","fname_str","=","fname",".","decode","(","\"utf-8\"",")","else",":","fname_str","=","fname",".","decode","(","\"cp437\"",")","if","fname_str","!=","zinfo",".","orig_filename",":","raise","BadZipFile","(","'File name in directory %r and header %r differ.'","%","(","zinfo",".","orig_filename",",","fname",")",")","# check for encrypted flag & handle password","is_encrypted","=","zinfo",".","flag_bits","&","0x1","zd","=","None","if","is_encrypted",":","if","not","pwd",":","pwd","=","self",".","pwd","if","not","pwd",":","raise","RuntimeError","(","\"File %r is encrypted, password \"","\"required for extraction\"","%","name",")","zd","=","_ZipDecrypter","(","pwd",")","# The first 12 bytes in the cypher stream is an encryption header","# used to strengthen the algorithm. The first 11 bytes are","# completely random, while the 12th contains the MSB of the CRC,","# or the MSB of the file time depending on the header type","# and is used to check the correctness of the password.","header","=","zef_file",".","read","(","12",")","h","=","list","(","map","(","zd",",","header","[","0",":","12","]",")",")","if","zinfo",".","flag_bits","&","0x8",":","# compare against the file type from extended local headers","check_byte","=","(","zinfo",".","_raw_time",">>","8",")","&","0xff","else",":","# compare against the CRC otherwise","check_byte","=","(","zinfo",".","CRC",">>","24",")","&","0xff","if","h","[","11","]","!=","check_byte",":","raise","RuntimeError","(","\"Bad password for file %r\"","%","name",")","return","ZipExtFile","(","zef_file",",","mode",",","zinfo",",","zd",",","True",")","except",":","zef_file",".","close","(",")","raise"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1318-L1429"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.extract","parameters":"(self, member, path=None, pwd=None)","argument_list":"","return_statement":"return self._extract_member(member, path, pwd)","docstring":"Extract a member from the archive to the current working directory,\n using its full name. Its file information is extracted as accurately\n as possible. `member' may be a filename or a ZipInfo object. You can\n specify a different directory using `path'.","docstring_summary":"Extract a member from the archive to the current working directory,\n using its full name. Its file information is extracted as accurately\n as possible. `member' may be a filename or a ZipInfo object. You can\n specify a different directory using `path'.","docstring_tokens":["Extract","a","member","from","the","archive","to","the","current","working","directory","using","its","full","name",".","Its","file","information","is","extracted","as","accurately","as","possible",".","member","may","be","a","filename","or","a","ZipInfo","object",".","You","can","specify","a","different","directory","using","path","."],"function":"def extract(self, member, path=None, pwd=None):\n \"\"\"Extract a member from the archive to the current working directory,\n using its full name. Its file information is extracted as accurately\n as possible. `member' may be a filename or a ZipInfo object. You can\n specify a different directory using `path'.\n \"\"\"\n if path is None:\n path = os.getcwd()\n else:\n path = os.fspath(path)\n\n return self._extract_member(member, path, pwd)","function_tokens":["def","extract","(","self",",","member",",","path","=","None",",","pwd","=","None",")",":","if","path","is","None",":","path","=","os",".","getcwd","(",")","else",":","path","=","os",".","fspath","(","path",")","return","self",".","_extract_member","(","member",",","path",",","pwd",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1474-L1485"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.extractall","parameters":"(self, path=None, members=None, pwd=None)","argument_list":"","return_statement":"","docstring":"Extract all members from the archive to the current working\n directory. `path' specifies a different directory to extract to.\n `members' is optional and must be a subset of the list returned\n by namelist().","docstring_summary":"Extract all members from the archive to the current working\n directory. `path' specifies a different directory to extract to.\n `members' is optional and must be a subset of the list returned\n by namelist().","docstring_tokens":["Extract","all","members","from","the","archive","to","the","current","working","directory",".","path","specifies","a","different","directory","to","extract","to",".","members","is","optional","and","must","be","a","subset","of","the","list","returned","by","namelist","()","."],"function":"def extractall(self, path=None, members=None, pwd=None):\n \"\"\"Extract all members from the archive to the current working\n directory. `path' specifies a different directory to extract to.\n `members' is optional and must be a subset of the list returned\n by namelist().\n \"\"\"\n if members is None:\n members = self.namelist()\n\n if path is None:\n path = os.getcwd()\n else:\n path = os.fspath(path)\n\n for zipinfo in members:\n self._extract_member(zipinfo, path, pwd)","function_tokens":["def","extractall","(","self",",","path","=","None",",","members","=","None",",","pwd","=","None",")",":","if","members","is","None",":","members","=","self",".","namelist","(",")","if","path","is","None",":","path","=","os",".","getcwd","(",")","else",":","path","=","os",".","fspath","(","path",")","for","zipinfo","in","members",":","self",".","_extract_member","(","zipinfo",",","path",",","pwd",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1487-L1502"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile._sanitize_windows_name","parameters":"(cls, arcname, pathsep)","argument_list":"","return_statement":"return arcname","docstring":"Replace bad characters and remove trailing dots from parts.","docstring_summary":"Replace bad characters and remove trailing dots from parts.","docstring_tokens":["Replace","bad","characters","and","remove","trailing","dots","from","parts","."],"function":"def _sanitize_windows_name(cls, arcname, pathsep):\n \"\"\"Replace bad characters and remove trailing dots from parts.\"\"\"\n table = cls._windows_illegal_name_trans_table\n if not table:\n illegal = ':<>|\"?*'\n table = str.maketrans(illegal, '_' * len(illegal))\n cls._windows_illegal_name_trans_table = table\n arcname = arcname.translate(table)\n # remove trailing dots\n arcname = (x.rstrip('.') for x in arcname.split(pathsep))\n # rejoin, removing empty parts.\n arcname = pathsep.join(x for x in arcname if x)\n return arcname","function_tokens":["def","_sanitize_windows_name","(","cls",",","arcname",",","pathsep",")",":","table","=","cls",".","_windows_illegal_name_trans_table","if","not","table",":","illegal","=","':<>|\"?*'","table","=","str",".","maketrans","(","illegal",",","'_'","*","len","(","illegal",")",")","cls",".","_windows_illegal_name_trans_table","=","table","arcname","=","arcname",".","translate","(","table",")","# remove trailing dots","arcname","=","(","x",".","rstrip","(","'.'",")","for","x","in","arcname",".","split","(","pathsep",")",")","# rejoin, removing empty parts.","arcname","=","pathsep",".","join","(","x","for","x","in","arcname","if","x",")","return","arcname"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1505-L1517"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile._extract_member","parameters":"(self, member, targetpath, pwd)","argument_list":"","return_statement":"return targetpath","docstring":"Extract the ZipInfo object 'member' to a physical\n file on the path targetpath.","docstring_summary":"Extract the ZipInfo object 'member' to a physical\n file on the path targetpath.","docstring_tokens":["Extract","the","ZipInfo","object","member","to","a","physical","file","on","the","path","targetpath","."],"function":"def _extract_member(self, member, targetpath, pwd):\n \"\"\"Extract the ZipInfo object 'member' to a physical\n file on the path targetpath.\n \"\"\"\n if not isinstance(member, ZipInfo):\n member = self.getinfo(member)\n\n # build the destination pathname, replacing\n # forward slashes to platform specific separators.\n arcname = member.filename.replace('\/', os.path.sep)\n\n if os.path.altsep:\n arcname = arcname.replace(os.path.altsep, os.path.sep)\n # interpret absolute pathname as relative, remove drive letter or\n # UNC path, redundant separators, \".\" and \"..\" components.\n arcname = os.path.splitdrive(arcname)[1]\n invalid_path_parts = ('', os.path.curdir, os.path.pardir)\n arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)\n if x not in invalid_path_parts)\n if os.path.sep == '\\\\':\n # filter illegal characters on Windows\n arcname = self._sanitize_windows_name(arcname, os.path.sep)\n\n targetpath = os.path.join(targetpath, arcname)\n targetpath = os.path.normpath(targetpath)\n\n # Create all upper directories if necessary.\n upperdirs = os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n\n if member.is_dir():\n if not os.path.isdir(targetpath):\n os.mkdir(targetpath)\n return targetpath\n\n with self.open(member, pwd=pwd) as source, \\\n open(targetpath, \"wb\") as target:\n shutil.copyfileobj(source, target)\n\n return targetpath","function_tokens":["def","_extract_member","(","self",",","member",",","targetpath",",","pwd",")",":","if","not","isinstance","(","member",",","ZipInfo",")",":","member","=","self",".","getinfo","(","member",")","# build the destination pathname, replacing","# forward slashes to platform specific separators.","arcname","=","member",".","filename",".","replace","(","'\/'",",","os",".","path",".","sep",")","if","os",".","path",".","altsep",":","arcname","=","arcname",".","replace","(","os",".","path",".","altsep",",","os",".","path",".","sep",")","# interpret absolute pathname as relative, remove drive letter or","# UNC path, redundant separators, \".\" and \"..\" components.","arcname","=","os",".","path",".","splitdrive","(","arcname",")","[","1","]","invalid_path_parts","=","(","''",",","os",".","path",".","curdir",",","os",".","path",".","pardir",")","arcname","=","os",".","path",".","sep",".","join","(","x","for","x","in","arcname",".","split","(","os",".","path",".","sep",")","if","x","not","in","invalid_path_parts",")","if","os",".","path",".","sep","==","'\\\\'",":","# filter illegal characters on Windows","arcname","=","self",".","_sanitize_windows_name","(","arcname",",","os",".","path",".","sep",")","targetpath","=","os",".","path",".","join","(","targetpath",",","arcname",")","targetpath","=","os",".","path",".","normpath","(","targetpath",")","# Create all upper directories if necessary.","upperdirs","=","os",".","path",".","dirname","(","targetpath",")","if","upperdirs","and","not","os",".","path",".","exists","(","upperdirs",")",":","os",".","makedirs","(","upperdirs",")","if","member",".","is_dir","(",")",":","if","not","os",".","path",".","isdir","(","targetpath",")",":","os",".","mkdir","(","targetpath",")","return","targetpath","with","self",".","open","(","member",",","pwd","=","pwd",")","as","source",",","open","(","targetpath",",","\"wb\"",")","as","target",":","shutil",".","copyfileobj","(","source",",","target",")","return","targetpath"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1519-L1559"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile._writecheck","parameters":"(self, zinfo)","argument_list":"","return_statement":"","docstring":"Check for errors before writing a file to the archive.","docstring_summary":"Check for errors before writing a file to the archive.","docstring_tokens":["Check","for","errors","before","writing","a","file","to","the","archive","."],"function":"def _writecheck(self, zinfo):\n \"\"\"Check for errors before writing a file to the archive.\"\"\"\n if zinfo.filename in self.NameToInfo:\n import warnings\n warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3)\n if self.mode not in ('w', 'x', 'a'):\n raise ValueError(\"write() requires mode 'w', 'x', or 'a'\")\n if not self.fp:\n raise ValueError(\n \"Attempt to write ZIP archive that was already closed\")\n _check_compression(zinfo.compress_type)\n if not self._allowZip64:\n requires_zip64 = None\n if len(self.filelist) >= ZIP_FILECOUNT_LIMIT:\n requires_zip64 = \"Files count\"\n elif zinfo.file_size > ZIP64_LIMIT:\n requires_zip64 = \"Filesize\"\n elif zinfo.header_offset > ZIP64_LIMIT:\n requires_zip64 = \"Zipfile size\"\n if requires_zip64:\n raise LargeZipFile(requires_zip64 +\n \" would require ZIP64 extensions\")","function_tokens":["def","_writecheck","(","self",",","zinfo",")",":","if","zinfo",".","filename","in","self",".","NameToInfo",":","import","warnings","warnings",".","warn","(","'Duplicate name: %r'","%","zinfo",".","filename",",","stacklevel","=","3",")","if","self",".","mode","not","in","(","'w'",",","'x'",",","'a'",")",":","raise","ValueError","(","\"write() requires mode 'w', 'x', or 'a'\"",")","if","not","self",".","fp",":","raise","ValueError","(","\"Attempt to write ZIP archive that was already closed\"",")","_check_compression","(","zinfo",".","compress_type",")","if","not","self",".","_allowZip64",":","requires_zip64","=","None","if","len","(","self",".","filelist",")",">=","ZIP_FILECOUNT_LIMIT",":","requires_zip64","=","\"Files count\"","elif","zinfo",".","file_size",">","ZIP64_LIMIT",":","requires_zip64","=","\"Filesize\"","elif","zinfo",".","header_offset",">","ZIP64_LIMIT",":","requires_zip64","=","\"Zipfile size\"","if","requires_zip64",":","raise","LargeZipFile","(","requires_zip64","+","\" would require ZIP64 extensions\"",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1561-L1582"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.write","parameters":"(self, filename, arcname=None, compress_type=None, date_time=DEFAULT_DATE)","argument_list":"","return_statement":"","docstring":"Put the bytes from filename into the archive under the name\n arcname.","docstring_summary":"Put the bytes from filename into the archive under the name\n arcname.","docstring_tokens":["Put","the","bytes","from","filename","into","the","archive","under","the","name","arcname","."],"function":"def write(self, filename, arcname=None, compress_type=None, date_time=DEFAULT_DATE):\n \"\"\"Put the bytes from filename into the archive under the name\n arcname.\"\"\"\n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists\"\n )\n\n zinfo = ZipInfo.from_file(filename, arcname, date_time)\n\n if zinfo.is_dir():\n zinfo.compress_size = 0\n zinfo.CRC = 0\n else:\n if compress_type is not None:\n zinfo.compress_type = compress_type\n else:\n zinfo.compress_type = self.compression\n\n if zinfo.is_dir():\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset = self.fp.tell() # Start of header bytes\n if zinfo.compress_type == ZIP_LZMA:\n # Compressed data includes an end-of-stream (EOS) marker\n zinfo.flag_bits |= 0x02\n\n self._writecheck(zinfo)\n self._didModify = True\n\n self.filelist.append(zinfo)\n self.NameToInfo[zinfo.filename] = zinfo\n self.fp.write(zinfo.FileHeader(False))\n self.start_dir = self.fp.tell()\n else:\n with open(filename, \"rb\") as src, self.open(zinfo, 'w') as dest:\n shutil.copyfileobj(src, dest, 1024*8)","function_tokens":["def","write","(","self",",","filename",",","arcname","=","None",",","compress_type","=","None",",","date_time","=","DEFAULT_DATE",")",":","if","not","self",".","fp",":","raise","ValueError","(","\"Attempt to write to ZIP archive that was already closed\"",")","if","self",".","_writing",":","raise","ValueError","(","\"Can't write to ZIP archive while an open writing handle exists\"",")","zinfo","=","ZipInfo",".","from_file","(","filename",",","arcname",",","date_time",")","if","zinfo",".","is_dir","(",")",":","zinfo",".","compress_size","=","0","zinfo",".","CRC","=","0","else",":","if","compress_type","is","not","None",":","zinfo",".","compress_type","=","compress_type","else",":","zinfo",".","compress_type","=","self",".","compression","if","zinfo",".","is_dir","(",")",":","with","self",".","_lock",":","if","self",".","_seekable",":","self",".","fp",".","seek","(","self",".","start_dir",")","zinfo",".","header_offset","=","self",".","fp",".","tell","(",")","# Start of header bytes","if","zinfo",".","compress_type","==","ZIP_LZMA",":","# Compressed data includes an end-of-stream (EOS) marker","zinfo",".","flag_bits","|=","0x02","self",".","_writecheck","(","zinfo",")","self",".","_didModify","=","True","self",".","filelist",".","append","(","zinfo",")","self",".","NameToInfo","[","zinfo",".","filename","]","=","zinfo","self",".","fp",".","write","(","zinfo",".","FileHeader","(","False",")",")","self",".","start_dir","=","self",".","fp",".","tell","(",")","else",":","with","open","(","filename",",","\"rb\"",")","as","src",",","self",".","open","(","zinfo",",","'w'",")","as","dest",":","shutil",".","copyfileobj","(","src",",","dest",",","1024","*","8",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1584-L1624"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.writestr","parameters":"(self, zinfo_or_arcname, data, compress_type=None, date_time=DEFAULT_DATE)","argument_list":"","return_statement":"","docstring":"Write a file into the archive. The contents is 'data', which\n may be either a 'str' or a 'bytes' instance; if it is a 'str',\n it is encoded as UTF-8 first.\n 'zinfo_or_arcname' is either a ZipInfo instance or\n the name of the file in the archive.","docstring_summary":"Write a file into the archive. The contents is 'data', which\n may be either a 'str' or a 'bytes' instance; if it is a 'str',\n it is encoded as UTF-8 first.\n 'zinfo_or_arcname' is either a ZipInfo instance or\n the name of the file in the archive.","docstring_tokens":["Write","a","file","into","the","archive",".","The","contents","is","data","which","may","be","either","a","str","or","a","bytes","instance",";","if","it","is","a","str","it","is","encoded","as","UTF","-","8","first",".","zinfo_or_arcname","is","either","a","ZipInfo","instance","or","the","name","of","the","file","in","the","archive","."],"function":"def writestr(self, zinfo_or_arcname, data, compress_type=None, date_time=DEFAULT_DATE):\n \"\"\"Write a file into the archive. The contents is 'data', which\n may be either a 'str' or a 'bytes' instance; if it is a 'str',\n it is encoded as UTF-8 first.\n 'zinfo_or_arcname' is either a ZipInfo instance or\n the name of the file in the archive.\"\"\"\n if isinstance(data, str):\n data = data.encode(\"utf-8\")\n if not isinstance(zinfo_or_arcname, ZipInfo):\n zinfo = ZipInfo(filename=zinfo_or_arcname,\n date_time=date_time)\n zinfo.compress_type = self.compression\n if zinfo.filename[-1] == '\/':\n zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x\n zinfo.external_attr |= 0x10 # MS-DOS directory flag\n else:\n zinfo.external_attr = 0o600 << 16 # ?rw-------\n else:\n zinfo = zinfo_or_arcname\n\n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists.\"\n )\n\n if compress_type is not None:\n zinfo.compress_type = compress_type\n\n zinfo.file_size = len(data) # Uncompressed size\n with self._lock:\n with self.open(zinfo, mode='w') as dest:\n dest.write(data)","function_tokens":["def","writestr","(","self",",","zinfo_or_arcname",",","data",",","compress_type","=","None",",","date_time","=","DEFAULT_DATE",")",":","if","isinstance","(","data",",","str",")",":","data","=","data",".","encode","(","\"utf-8\"",")","if","not","isinstance","(","zinfo_or_arcname",",","ZipInfo",")",":","zinfo","=","ZipInfo","(","filename","=","zinfo_or_arcname",",","date_time","=","date_time",")","zinfo",".","compress_type","=","self",".","compression","if","zinfo",".","filename","[","-","1","]","==","'\/'",":","zinfo",".","external_attr","=","0o40775","<<","16","# drwxrwxr-x","zinfo",".","external_attr","|=","0x10","# MS-DOS directory flag","else",":","zinfo",".","external_attr","=","0o600","<<","16","# ?rw-------","else",":","zinfo","=","zinfo_or_arcname","if","not","self",".","fp",":","raise","ValueError","(","\"Attempt to write to ZIP archive that was already closed\"",")","if","self",".","_writing",":","raise","ValueError","(","\"Can't write to ZIP archive while an open writing handle exists.\"",")","if","compress_type","is","not","None",":","zinfo",".","compress_type","=","compress_type","zinfo",".","file_size","=","len","(","data",")","# Uncompressed size","with","self",".","_lock",":","with","self",".","open","(","zinfo",",","mode","=","'w'",")","as","dest",":","dest",".","write","(","data",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1626-L1660"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.write_from_directory","parameters":"(self, directory, exclusions=None,\n compress_type=None, date_time=DEFAULT_DATE)","argument_list":"","return_statement":"","docstring":"Create a ZIP package deterministically from a directory.\n We need to sort the files in an OS-independent way before adding to the archive.","docstring_summary":"Create a ZIP package deterministically from a directory.\n We need to sort the files in an OS-independent way before adding to the archive.","docstring_tokens":["Create","a","ZIP","package","deterministically","from","a","directory",".","We","need","to","sort","the","files","in","an","OS","-","independent","way","before","adding","to","the","archive","."],"function":"def write_from_directory(self, directory, exclusions=None,\n compress_type=None, date_time=DEFAULT_DATE):\n \"\"\"\n Create a ZIP package deterministically from a directory.\n We need to sort the files in an OS-independent way before adding to the archive.\n \"\"\"\n file_dict = {}\n for root,subfolders,files in os.walk(directory):\n path_fragments = root.split(os.sep)\n path_nested = [os.path.join(*path_fragments[:x + 1]) for x, _ in enumerate(path_fragments)]\n\n if not set(path_nested) & set(exclusions):\n for fi in files:\n filename = os.path.join(root, fi)\n if filename not in exclusions:\n file_dict.update({filename: filename})\n for new_filename, old_filename in sorted(file_dict.items()):\n self.write(old_filename, compress_type=compress_type, date_time=date_time)","function_tokens":["def","write_from_directory","(","self",",","directory",",","exclusions","=","None",",","compress_type","=","None",",","date_time","=","DEFAULT_DATE",")",":","file_dict","=","{","}","for","root",",","subfolders",",","files","in","os",".","walk","(","directory",")",":","path_fragments","=","root",".","split","(","os",".","sep",")","path_nested","=","[","os",".","path",".","join","(","*","path_fragments","[",":","x","+","1","]",")","for","x",",","_","in","enumerate","(","path_fragments",")","]","if","not","set","(","path_nested",")","&","set","(","exclusions",")",":","for","fi","in","files",":","filename","=","os",".","path",".","join","(","root",",","fi",")","if","filename","not","in","exclusions",":","file_dict",".","update","(","{","filename",":","filename","}",")","for","new_filename",",","old_filename","in","sorted","(","file_dict",".","items","(",")",")",":","self",".","write","(","old_filename",",","compress_type","=","compress_type",",","date_time","=","date_time",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1662-L1679"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.__del__","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Call the \"close()\" method in case the user forgot.","docstring_summary":"Call the \"close()\" method in case the user forgot.","docstring_tokens":["Call","the","close","()","method","in","case","the","user","forgot","."],"function":"def __del__(self):\n \"\"\"Call the \"close()\" method in case the user forgot.\"\"\"\n self.close()","function_tokens":["def","__del__","(","self",")",":","self",".","close","(",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1682-L1684"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"ZipFile.close","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Close the file, and for mode 'w', 'x' and 'a' write the ending\n records.","docstring_summary":"Close the file, and for mode 'w', 'x' and 'a' write the ending\n records.","docstring_tokens":["Close","the","file","and","for","mode","w","x","and","a","write","the","ending","records","."],"function":"def close(self):\n \"\"\"Close the file, and for mode 'w', 'x' and 'a' write the ending\n records.\"\"\"\n if self.fp is None:\n return\n\n if self._writing:\n raise ValueError(\"Can't close the ZIP file while there is \"\n \"an open writing handle on it. \"\n \"Close the writing handle before closing the zip.\")\n\n try:\n if self.mode in ('w', 'x', 'a') and self._didModify: # write ending records\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n self._write_end_record()\n finally:\n fp = self.fp\n self.fp = None\n self._fpclose(fp)","function_tokens":["def","close","(","self",")",":","if","self",".","fp","is","None",":","return","if","self",".","_writing",":","raise","ValueError","(","\"Can't close the ZIP file while there is \"","\"an open writing handle on it. \"","\"Close the writing handle before closing the zip.\"",")","try",":","if","self",".","mode","in","(","'w'",",","'x'",",","'a'",")","and","self",".","_didModify",":","# write ending records","with","self",".","_lock",":","if","self",".","_seekable",":","self",".","fp",".","seek","(","self",".","start_dir",")","self",".","_write_end_record","(",")","finally",":","fp","=","self",".","fp","self",".","fp","=","None","self",".","_fpclose","(","fp",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1686-L1706"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"PyZipFile.writepy","parameters":"(self, pathname, basename=\"\", filterfunc=None)","argument_list":"","return_statement":"","docstring":"Add all files from \"pathname\" to the ZIP archive.\n\n If pathname is a package directory, search the directory and\n all package subdirectories recursively for all *.py and enter\n the modules into the archive. If pathname is a plain\n directory, listdir *.py and enter all modules. Else, pathname\n must be a Python *.py file and the module will be put into the\n archive. Added modules are always module.pyc.\n This method will compile the module.py into module.pyc if\n necessary.\n If filterfunc(pathname) is given, it is called with every argument.\n When it is False, the file or directory is skipped.","docstring_summary":"Add all files from \"pathname\" to the ZIP archive.","docstring_tokens":["Add","all","files","from","pathname","to","the","ZIP","archive","."],"function":"def writepy(self, pathname, basename=\"\", filterfunc=None):\n \"\"\"Add all files from \"pathname\" to the ZIP archive.\n\n If pathname is a package directory, search the directory and\n all package subdirectories recursively for all *.py and enter\n the modules into the archive. If pathname is a plain\n directory, listdir *.py and enter all modules. Else, pathname\n must be a Python *.py file and the module will be put into the\n archive. Added modules are always module.pyc.\n This method will compile the module.py into module.pyc if\n necessary.\n If filterfunc(pathname) is given, it is called with every argument.\n When it is False, the file or directory is skipped.\n \"\"\"\n pathname = os.fspath(pathname)\n if filterfunc and not filterfunc(pathname):\n if self.debug:\n label = 'path' if os.path.isdir(pathname) else 'file'\n print('%s %r skipped by filterfunc' % (label, pathname))\n return\n dir, name = os.path.split(pathname)\n if os.path.isdir(pathname):\n initname = os.path.join(pathname, \"__init__.py\")\n if os.path.isfile(initname):\n # This is a package directory, add it\n if basename:\n basename = \"%s\/%s\" % (basename, name)\n else:\n basename = name\n if self.debug:\n print(\"Adding package in\", pathname, \"as\", basename)\n fname, arcname = self._get_codename(initname[0:-3], basename)\n if self.debug:\n print(\"Adding\", arcname)\n self.write(fname, arcname)\n dirlist = os.listdir(pathname)\n dirlist.remove(\"__init__.py\")\n # Add all *.py files and package subdirectories\n for filename in dirlist:\n path = os.path.join(pathname, filename)\n root, ext = os.path.splitext(filename)\n if os.path.isdir(path):\n if os.path.isfile(os.path.join(path, \"__init__.py\")):\n # This is a package directory, add it\n self.writepy(path, basename,\n filterfunc=filterfunc) # Recursive call\n elif ext == \".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc' % path)\n continue\n fname, arcname = self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\", arcname)\n self.write(fname, arcname)\n else:\n # This is NOT a package directory, add its files at top level\n if self.debug:\n print(\"Adding files from directory\", pathname)\n for filename in os.listdir(pathname):\n path = os.path.join(pathname, filename)\n root, ext = os.path.splitext(filename)\n if ext == \".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc' % path)\n continue\n fname, arcname = self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\", arcname)\n self.write(fname, arcname)\n else:\n if pathname[-3:] != \".py\":\n raise RuntimeError(\n 'Files added with writepy() must end with \".py\"')\n fname, arcname = self._get_codename(pathname[0:-3], basename)\n if self.debug:\n print(\"Adding file\", arcname)\n self.write(fname, arcname)","function_tokens":["def","writepy","(","self",",","pathname",",","basename","=","\"\"",",","filterfunc","=","None",")",":","pathname","=","os",".","fspath","(","pathname",")","if","filterfunc","and","not","filterfunc","(","pathname",")",":","if","self",".","debug",":","label","=","'path'","if","os",".","path",".","isdir","(","pathname",")","else","'file'","print","(","'%s %r skipped by filterfunc'","%","(","label",",","pathname",")",")","return","dir",",","name","=","os",".","path",".","split","(","pathname",")","if","os",".","path",".","isdir","(","pathname",")",":","initname","=","os",".","path",".","join","(","pathname",",","\"__init__.py\"",")","if","os",".","path",".","isfile","(","initname",")",":","# This is a package directory, add it","if","basename",":","basename","=","\"%s\/%s\"","%","(","basename",",","name",")","else",":","basename","=","name","if","self",".","debug",":","print","(","\"Adding package in\"",",","pathname",",","\"as\"",",","basename",")","fname",",","arcname","=","self",".","_get_codename","(","initname","[","0",":","-","3","]",",","basename",")","if","self",".","debug",":","print","(","\"Adding\"",",","arcname",")","self",".","write","(","fname",",","arcname",")","dirlist","=","os",".","listdir","(","pathname",")","dirlist",".","remove","(","\"__init__.py\"",")","# Add all *.py files and package subdirectories","for","filename","in","dirlist",":","path","=","os",".","path",".","join","(","pathname",",","filename",")","root",",","ext","=","os",".","path",".","splitext","(","filename",")","if","os",".","path",".","isdir","(","path",")",":","if","os",".","path",".","isfile","(","os",".","path",".","join","(","path",",","\"__init__.py\"",")",")",":","# This is a package directory, add it","self",".","writepy","(","path",",","basename",",","filterfunc","=","filterfunc",")","# Recursive call","elif","ext","==","\".py\"",":","if","filterfunc","and","not","filterfunc","(","path",")",":","if","self",".","debug",":","print","(","'file %r skipped by filterfunc'","%","path",")","continue","fname",",","arcname","=","self",".","_get_codename","(","path","[","0",":","-","3","]",",","basename",")","if","self",".","debug",":","print","(","\"Adding\"",",","arcname",")","self",".","write","(","fname",",","arcname",")","else",":","# This is NOT a package directory, add its files at top level","if","self",".","debug",":","print","(","\"Adding files from directory\"",",","pathname",")","for","filename","in","os",".","listdir","(","pathname",")",":","path","=","os",".","path",".","join","(","pathname",",","filename",")","root",",","ext","=","os",".","path",".","splitext","(","filename",")","if","ext","==","\".py\"",":","if","filterfunc","and","not","filterfunc","(","path",")",":","if","self",".","debug",":","print","(","'file %r skipped by filterfunc'","%","path",")","continue","fname",",","arcname","=","self",".","_get_codename","(","path","[","0",":","-","3","]",",","basename",")","if","self",".","debug",":","print","(","\"Adding\"",",","arcname",")","self",".","write","(","fname",",","arcname",")","else",":","if","pathname","[","-","3",":","]","!=","\".py\"",":","raise","RuntimeError","(","'Files added with writepy() must end with \".py\"'",")","fname",",","arcname","=","self",".","_get_codename","(","pathname","[","0",":","-","3","]",",","basename",")","if","self",".","debug",":","print","(","\"Adding file\"",",","arcname",")","self",".","write","(","fname",",","arcname",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1825-L1905"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/zipfile_deterministic.py","language":"python","identifier":"PyZipFile._get_codename","parameters":"(self, pathname, basename)","argument_list":"","return_statement":"return (fname, archivename)","docstring":"Return (filename, archivename) for the path.\n\n Given a module name path, return the correct file path and\n archive name, compiling if necessary. For example, given\n \/python\/lib\/string, return (\/python\/lib\/string.pyc, string).","docstring_summary":"Return (filename, archivename) for the path.","docstring_tokens":["Return","(","filename","archivename",")","for","the","path","."],"function":"def _get_codename(self, pathname, basename):\n \"\"\"Return (filename, archivename) for the path.\n\n Given a module name path, return the correct file path and\n archive name, compiling if necessary. For example, given\n \/python\/lib\/string, return (\/python\/lib\/string.pyc, string).\n \"\"\"\n def _compile(file, optimize=-1):\n import py_compile\n if self.debug:\n print(\"Compiling\", file)\n try:\n py_compile.compile(file, doraise=True, optimize=optimize)\n except py_compile.PyCompileError as err:\n print(err.msg)\n return False\n return True\n\n file_py = pathname + \".py\"\n file_pyc = pathname + \".pyc\"\n pycache_opt0 = importlib.util.cache_from_source(file_py, optimization='')\n pycache_opt1 = importlib.util.cache_from_source(file_py, optimization=1)\n pycache_opt2 = importlib.util.cache_from_source(file_py, optimization=2)\n if self._optimize == -1:\n # legacy mode: use whatever file is present\n if (os.path.isfile(file_pyc) and\n os.stat(file_pyc).st_mtime >= os.stat(file_py).st_mtime):\n # Use .pyc file.\n arcname = fname = file_pyc\n elif (os.path.isfile(pycache_opt0) and\n os.stat(pycache_opt0).st_mtime >= os.stat(file_py).st_mtime):\n # Use the __pycache__\/*.pyc file, but write it to the legacy pyc\n # file name in the archive.\n fname = pycache_opt0\n arcname = file_pyc\n elif (os.path.isfile(pycache_opt1) and\n os.stat(pycache_opt1).st_mtime >= os.stat(file_py).st_mtime):\n # Use the __pycache__\/*.pyc file, but write it to the legacy pyc\n # file name in the archive.\n fname = pycache_opt1\n arcname = file_pyc\n elif (os.path.isfile(pycache_opt2) and\n os.stat(pycache_opt2).st_mtime >= os.stat(file_py).st_mtime):\n # Use the __pycache__\/*.pyc file, but write it to the legacy pyc\n # file name in the archive.\n fname = pycache_opt2\n arcname = file_pyc\n else:\n # Compile py into PEP 3147 pyc file.\n if _compile(file_py):\n if sys.flags.optimize == 0:\n fname = pycache_opt0\n elif sys.flags.optimize == 1:\n fname = pycache_opt1\n else:\n fname = pycache_opt2\n arcname = file_pyc\n else:\n fname = arcname = file_py\n else:\n # new mode: use given optimization level\n if self._optimize == 0:\n fname = pycache_opt0\n arcname = file_pyc\n else:\n arcname = file_pyc\n if self._optimize == 1:\n fname = pycache_opt1\n elif self._optimize == 2:\n fname = pycache_opt2\n else:\n msg = \"invalid value for 'optimize': {!r}\".format(self._optimize)\n raise ValueError(msg)\n if not (os.path.isfile(fname) and\n os.stat(fname).st_mtime >= os.stat(file_py).st_mtime):\n if not _compile(file_py, optimize=self._optimize):\n fname = arcname = file_py\n archivename = os.path.split(arcname)[1]\n if basename:\n archivename = \"%s\/%s\" % (basename, archivename)\n return (fname, archivename)","function_tokens":["def","_get_codename","(","self",",","pathname",",","basename",")",":","def","_compile","(","file",",","optimize","=","-","1",")",":","import","py_compile","if","self",".","debug",":","print","(","\"Compiling\"",",","file",")","try",":","py_compile",".","compile","(","file",",","doraise","=","True",",","optimize","=","optimize",")","except","py_compile",".","PyCompileError","as","err",":","print","(","err",".","msg",")","return","False","return","True","file_py","=","pathname","+","\".py\"","file_pyc","=","pathname","+","\".pyc\"","pycache_opt0","=","importlib",".","util",".","cache_from_source","(","file_py",",","optimization","=","''",")","pycache_opt1","=","importlib",".","util",".","cache_from_source","(","file_py",",","optimization","=","1",")","pycache_opt2","=","importlib",".","util",".","cache_from_source","(","file_py",",","optimization","=","2",")","if","self",".","_optimize","==","-","1",":","# legacy mode: use whatever file is present","if","(","os",".","path",".","isfile","(","file_pyc",")","and","os",".","stat","(","file_pyc",")",".","st_mtime",">=","os",".","stat","(","file_py",")",".","st_mtime",")",":","# Use .pyc file.","arcname","=","fname","=","file_pyc","elif","(","os",".","path",".","isfile","(","pycache_opt0",")","and","os",".","stat","(","pycache_opt0",")",".","st_mtime",">=","os",".","stat","(","file_py",")",".","st_mtime",")",":","# Use the __pycache__\/*.pyc file, but write it to the legacy pyc","# file name in the archive.","fname","=","pycache_opt0","arcname","=","file_pyc","elif","(","os",".","path",".","isfile","(","pycache_opt1",")","and","os",".","stat","(","pycache_opt1",")",".","st_mtime",">=","os",".","stat","(","file_py",")",".","st_mtime",")",":","# Use the __pycache__\/*.pyc file, but write it to the legacy pyc","# file name in the archive.","fname","=","pycache_opt1","arcname","=","file_pyc","elif","(","os",".","path",".","isfile","(","pycache_opt2",")","and","os",".","stat","(","pycache_opt2",")",".","st_mtime",">=","os",".","stat","(","file_py",")",".","st_mtime",")",":","# Use the __pycache__\/*.pyc file, but write it to the legacy pyc","# file name in the archive.","fname","=","pycache_opt2","arcname","=","file_pyc","else",":","# Compile py into PEP 3147 pyc file.","if","_compile","(","file_py",")",":","if","sys",".","flags",".","optimize","==","0",":","fname","=","pycache_opt0","elif","sys",".","flags",".","optimize","==","1",":","fname","=","pycache_opt1","else",":","fname","=","pycache_opt2","arcname","=","file_pyc","else",":","fname","=","arcname","=","file_py","else",":","# new mode: use given optimization level","if","self",".","_optimize","==","0",":","fname","=","pycache_opt0","arcname","=","file_pyc","else",":","arcname","=","file_pyc","if","self",".","_optimize","==","1",":","fname","=","pycache_opt1","elif","self",".","_optimize","==","2",":","fname","=","pycache_opt2","else",":","msg","=","\"invalid value for 'optimize': {!r}\"",".","format","(","self",".","_optimize",")","raise","ValueError","(","msg",")","if","not","(","os",".","path",".","isfile","(","fname",")","and","os",".","stat","(","fname",")",".","st_mtime",">=","os",".","stat","(","file_py",")",".","st_mtime",")",":","if","not","_compile","(","file_py",",","optimize","=","self",".","_optimize",")",":","fname","=","arcname","=","file_py","archivename","=","os",".","path",".","split","(","arcname",")","[","1","]","if","basename",":","archivename","=","\"%s\/%s\"","%","(","basename",",","archivename",")","return","(","fname",",","archivename",")"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/zipfile_deterministic.py#L1907-L1987"}
{"nwo":"EFForg\/https-everywhere","sha":"5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16","path":"utils\/merge-rulesets.py","language":"python","identifier":"normalize","parameters":"(f)","argument_list":"","return_statement":"return f","docstring":"OSX and Linux filesystems encode composite characters differently in\n filenames. We should normalize to NFC: http:\/\/unicode.org\/reports\/tr15\/","docstring_summary":"OSX and Linux filesystems encode composite characters differently in\n filenames. We should normalize to NFC: http:\/\/unicode.org\/reports\/tr15\/","docstring_tokens":["OSX","and","Linux","filesystems","encode","composite","characters","differently","in","filenames",".","We","should","normalize","to","NFC",":","http",":","\/\/","unicode",".","org","\/","reports","\/","tr15","\/"],"function":"def normalize(f):\n \"\"\"\n OSX and Linux filesystems encode composite characters differently in\n filenames. We should normalize to NFC: http:\/\/unicode.org\/reports\/tr15\/\n \"\"\"\n f = unicodedata.normalize(\"NFC\", f)\n return f","function_tokens":["def","normalize","(","f",")",":","f","=","unicodedata",".","normalize","(","\"NFC\"",",","f",")","return","f"],"url":"https:\/\/github.com\/EFForg\/https-everywhere\/blob\/5fe2d3b24ca028b02a0079bc2ec5e555e9ce4c16\/utils\/merge-rulesets.py#L17-L23"}