| 1 | n/a | #!/usr/bin/env python3 |
|---|
| 2 | n/a | #------------------------------------------------------------------- |
|---|
| 3 | n/a | # tarfile.py |
|---|
| 4 | n/a | #------------------------------------------------------------------- |
|---|
| 5 | n/a | # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de> |
|---|
| 6 | n/a | # All rights reserved. |
|---|
| 7 | n/a | # |
|---|
| 8 | n/a | # Permission is hereby granted, free of charge, to any person |
|---|
| 9 | n/a | # obtaining a copy of this software and associated documentation |
|---|
| 10 | n/a | # files (the "Software"), to deal in the Software without |
|---|
| 11 | n/a | # restriction, including without limitation the rights to use, |
|---|
| 12 | n/a | # copy, modify, merge, publish, distribute, sublicense, and/or sell |
|---|
| 13 | n/a | # copies of the Software, and to permit persons to whom the |
|---|
| 14 | n/a | # Software is furnished to do so, subject to the following |
|---|
| 15 | n/a | # conditions: |
|---|
| 16 | n/a | # |
|---|
| 17 | n/a | # The above copyright notice and this permission notice shall be |
|---|
| 18 | n/a | # included in all copies or substantial portions of the Software. |
|---|
| 19 | n/a | # |
|---|
| 20 | n/a | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
|---|
| 21 | n/a | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES |
|---|
| 22 | n/a | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
|---|
| 23 | n/a | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT |
|---|
| 24 | n/a | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
|---|
| 25 | n/a | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
|---|
| 26 | n/a | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
|---|
| 27 | n/a | # OTHER DEALINGS IN THE SOFTWARE. |
|---|
| 28 | n/a | # |
|---|
| 29 | n/a | """Read from and write to tar format archives. |
|---|
| 30 | n/a | """ |
|---|
| 31 | n/a | |
|---|
| 32 | n/a | version = "0.9.0" |
|---|
| 33 | n/a | __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" |
|---|
| 34 | n/a | __date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" |
|---|
| 35 | n/a | __cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" |
|---|
| 36 | n/a | __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." |
|---|
| 37 | n/a | |
|---|
| 38 | n/a | #--------- |
|---|
| 39 | n/a | # Imports |
|---|
| 40 | n/a | #--------- |
|---|
| 41 | n/a | from builtins import open as bltn_open |
|---|
| 42 | n/a | import sys |
|---|
| 43 | n/a | import os |
|---|
| 44 | n/a | import io |
|---|
| 45 | n/a | import shutil |
|---|
| 46 | n/a | import stat |
|---|
| 47 | n/a | import time |
|---|
| 48 | n/a | import struct |
|---|
| 49 | n/a | import copy |
|---|
| 50 | n/a | import re |
|---|
| 51 | n/a | |
|---|
| 52 | n/a | try: |
|---|
| 53 | n/a | import pwd |
|---|
| 54 | n/a | except ImportError: |
|---|
| 55 | n/a | pwd = None |
|---|
| 56 | n/a | try: |
|---|
| 57 | n/a | import grp |
|---|
| 58 | n/a | except ImportError: |
|---|
| 59 | n/a | grp = None |
|---|
| 60 | n/a | |
|---|
| 61 | n/a | # os.symlink on Windows prior to 6.0 raises NotImplementedError |
|---|
| 62 | n/a | symlink_exception = (AttributeError, NotImplementedError) |
|---|
| 63 | n/a | try: |
|---|
| 64 | n/a | # OSError (winerror=1314) will be raised if the caller does not hold the |
|---|
| 65 | n/a | # SeCreateSymbolicLinkPrivilege privilege |
|---|
| 66 | n/a | symlink_exception += (OSError,) |
|---|
| 67 | n/a | except NameError: |
|---|
| 68 | n/a | pass |
|---|
| 69 | n/a | |
|---|
| 70 | n/a | # from tarfile import * |
|---|
| 71 | n/a | __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", |
|---|
| 72 | n/a | "CompressionError", "StreamError", "ExtractError", "HeaderError", |
|---|
| 73 | n/a | "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", |
|---|
| 74 | n/a | "DEFAULT_FORMAT", "open"] |
|---|
| 75 | n/a | |
|---|
| 76 | n/a | #--------------------------------------------------------- |
|---|
| 77 | n/a | # tar constants |
|---|
| 78 | n/a | #--------------------------------------------------------- |
|---|
| 79 | n/a | NUL = b"\0" # the null character |
|---|
| 80 | n/a | BLOCKSIZE = 512 # length of processing blocks |
|---|
| 81 | n/a | RECORDSIZE = BLOCKSIZE * 20 # length of records |
|---|
| 82 | n/a | GNU_MAGIC = b"ustar \0" # magic gnu tar string |
|---|
| 83 | n/a | POSIX_MAGIC = b"ustar\x0000" # magic posix tar string |
|---|
| 84 | n/a | |
|---|
| 85 | n/a | LENGTH_NAME = 100 # maximum length of a filename |
|---|
| 86 | n/a | LENGTH_LINK = 100 # maximum length of a linkname |
|---|
| 87 | n/a | LENGTH_PREFIX = 155 # maximum length of the prefix field |
|---|
| 88 | n/a | |
|---|
| 89 | n/a | REGTYPE = b"0" # regular file |
|---|
| 90 | n/a | AREGTYPE = b"\0" # regular file |
|---|
| 91 | n/a | LNKTYPE = b"1" # link (inside tarfile) |
|---|
| 92 | n/a | SYMTYPE = b"2" # symbolic link |
|---|
| 93 | n/a | CHRTYPE = b"3" # character special device |
|---|
| 94 | n/a | BLKTYPE = b"4" # block special device |
|---|
| 95 | n/a | DIRTYPE = b"5" # directory |
|---|
| 96 | n/a | FIFOTYPE = b"6" # fifo special device |
|---|
| 97 | n/a | CONTTYPE = b"7" # contiguous file |
|---|
| 98 | n/a | |
|---|
| 99 | n/a | GNUTYPE_LONGNAME = b"L" # GNU tar longname |
|---|
| 100 | n/a | GNUTYPE_LONGLINK = b"K" # GNU tar longlink |
|---|
| 101 | n/a | GNUTYPE_SPARSE = b"S" # GNU tar sparse file |
|---|
| 102 | n/a | |
|---|
| 103 | n/a | XHDTYPE = b"x" # POSIX.1-2001 extended header |
|---|
| 104 | n/a | XGLTYPE = b"g" # POSIX.1-2001 global header |
|---|
| 105 | n/a | SOLARIS_XHDTYPE = b"X" # Solaris extended header |
|---|
| 106 | n/a | |
|---|
| 107 | n/a | USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format |
|---|
| 108 | n/a | GNU_FORMAT = 1 # GNU tar format |
|---|
| 109 | n/a | PAX_FORMAT = 2 # POSIX.1-2001 (pax) format |
|---|
| 110 | n/a | DEFAULT_FORMAT = GNU_FORMAT |
|---|
| 111 | n/a | |
|---|
| 112 | n/a | #--------------------------------------------------------- |
|---|
| 113 | n/a | # tarfile constants |
|---|
| 114 | n/a | #--------------------------------------------------------- |
|---|
| 115 | n/a | # File types that tarfile supports: |
|---|
| 116 | n/a | SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, |
|---|
| 117 | n/a | SYMTYPE, DIRTYPE, FIFOTYPE, |
|---|
| 118 | n/a | CONTTYPE, CHRTYPE, BLKTYPE, |
|---|
| 119 | n/a | GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, |
|---|
| 120 | n/a | GNUTYPE_SPARSE) |
|---|
| 121 | n/a | |
|---|
| 122 | n/a | # File types that will be treated as a regular file. |
|---|
| 123 | n/a | REGULAR_TYPES = (REGTYPE, AREGTYPE, |
|---|
| 124 | n/a | CONTTYPE, GNUTYPE_SPARSE) |
|---|
| 125 | n/a | |
|---|
| 126 | n/a | # File types that are part of the GNU tar format. |
|---|
| 127 | n/a | GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, |
|---|
| 128 | n/a | GNUTYPE_SPARSE) |
|---|
| 129 | n/a | |
|---|
| 130 | n/a | # Fields from a pax header that override a TarInfo attribute. |
|---|
| 131 | n/a | PAX_FIELDS = ("path", "linkpath", "size", "mtime", |
|---|
| 132 | n/a | "uid", "gid", "uname", "gname") |
|---|
| 133 | n/a | |
|---|
| 134 | n/a | # Fields from a pax header that are affected by hdrcharset. |
|---|
| 135 | n/a | PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"} |
|---|
| 136 | n/a | |
|---|
| 137 | n/a | # Fields in a pax header that are numbers, all other fields |
|---|
| 138 | n/a | # are treated as strings. |
|---|
| 139 | n/a | PAX_NUMBER_FIELDS = { |
|---|
| 140 | n/a | "atime": float, |
|---|
| 141 | n/a | "ctime": float, |
|---|
| 142 | n/a | "mtime": float, |
|---|
| 143 | n/a | "uid": int, |
|---|
| 144 | n/a | "gid": int, |
|---|
| 145 | n/a | "size": int |
|---|
| 146 | n/a | } |
|---|
| 147 | n/a | |
|---|
| 148 | n/a | #--------------------------------------------------------- |
|---|
| 149 | n/a | # initialization |
|---|
| 150 | n/a | #--------------------------------------------------------- |
|---|
| 151 | n/a | if os.name == "nt": |
|---|
| 152 | n/a | ENCODING = "utf-8" |
|---|
| 153 | n/a | else: |
|---|
| 154 | n/a | ENCODING = sys.getfilesystemencoding() |
|---|
| 155 | n/a | |
|---|
| 156 | n/a | #--------------------------------------------------------- |
|---|
| 157 | n/a | # Some useful functions |
|---|
| 158 | n/a | #--------------------------------------------------------- |
|---|
| 159 | n/a | |
|---|
| 160 | n/a | def stn(s, length, encoding, errors): |
|---|
| 161 | n/a | """Convert a string to a null-terminated bytes object. |
|---|
| 162 | n/a | """ |
|---|
| 163 | n/a | s = s.encode(encoding, errors) |
|---|
| 164 | n/a | return s[:length] + (length - len(s)) * NUL |
|---|
| 165 | n/a | |
|---|
| 166 | n/a | def nts(s, encoding, errors): |
|---|
| 167 | n/a | """Convert a null-terminated bytes object to a string. |
|---|
| 168 | n/a | """ |
|---|
| 169 | n/a | p = s.find(b"\0") |
|---|
| 170 | n/a | if p != -1: |
|---|
| 171 | n/a | s = s[:p] |
|---|
| 172 | n/a | return s.decode(encoding, errors) |
|---|
| 173 | n/a | |
|---|
| 174 | n/a | def nti(s): |
|---|
| 175 | n/a | """Convert a number field to a python number. |
|---|
| 176 | n/a | """ |
|---|
| 177 | n/a | # There are two possible encodings for a number field, see |
|---|
| 178 | n/a | # itn() below. |
|---|
| 179 | n/a | if s[0] in (0o200, 0o377): |
|---|
| 180 | n/a | n = 0 |
|---|
| 181 | n/a | for i in range(len(s) - 1): |
|---|
| 182 | n/a | n <<= 8 |
|---|
| 183 | n/a | n += s[i + 1] |
|---|
| 184 | n/a | if s[0] == 0o377: |
|---|
| 185 | n/a | n = -(256 ** (len(s) - 1) - n) |
|---|
| 186 | n/a | else: |
|---|
| 187 | n/a | try: |
|---|
| 188 | n/a | s = nts(s, "ascii", "strict") |
|---|
| 189 | n/a | n = int(s.strip() or "0", 8) |
|---|
| 190 | n/a | except ValueError: |
|---|
| 191 | n/a | raise InvalidHeaderError("invalid header") |
|---|
| 192 | n/a | return n |
|---|
| 193 | n/a | |
|---|
| 194 | n/a | def itn(n, digits=8, format=DEFAULT_FORMAT): |
|---|
| 195 | n/a | """Convert a python number to a number field. |
|---|
| 196 | n/a | """ |
|---|
| 197 | n/a | # POSIX 1003.1-1988 requires numbers to be encoded as a string of |
|---|
| 198 | n/a | # octal digits followed by a null-byte, this allows values up to |
|---|
| 199 | n/a | # (8**(digits-1))-1. GNU tar allows storing numbers greater than |
|---|
| 200 | n/a | # that if necessary. A leading 0o200 or 0o377 byte indicate this |
|---|
| 201 | n/a | # particular encoding, the following digits-1 bytes are a big-endian |
|---|
| 202 | n/a | # base-256 representation. This allows values up to (256**(digits-1))-1. |
|---|
| 203 | n/a | # A 0o200 byte indicates a positive number, a 0o377 byte a negative |
|---|
| 204 | n/a | # number. |
|---|
| 205 | n/a | if 0 <= n < 8 ** (digits - 1): |
|---|
| 206 | n/a | s = bytes("%0*o" % (digits - 1, int(n)), "ascii") + NUL |
|---|
| 207 | n/a | elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1): |
|---|
| 208 | n/a | if n >= 0: |
|---|
| 209 | n/a | s = bytearray([0o200]) |
|---|
| 210 | n/a | else: |
|---|
| 211 | n/a | s = bytearray([0o377]) |
|---|
| 212 | n/a | n = 256 ** digits + n |
|---|
| 213 | n/a | |
|---|
| 214 | n/a | for i in range(digits - 1): |
|---|
| 215 | n/a | s.insert(1, n & 0o377) |
|---|
| 216 | n/a | n >>= 8 |
|---|
| 217 | n/a | else: |
|---|
| 218 | n/a | raise ValueError("overflow in number field") |
|---|
| 219 | n/a | |
|---|
| 220 | n/a | return s |
|---|
| 221 | n/a | |
|---|
| 222 | n/a | def calc_chksums(buf): |
|---|
| 223 | n/a | """Calculate the checksum for a member's header by summing up all |
|---|
| 224 | n/a | characters except for the chksum field which is treated as if |
|---|
| 225 | n/a | it was filled with spaces. According to the GNU tar sources, |
|---|
| 226 | n/a | some tars (Sun and NeXT) calculate chksum with signed char, |
|---|
| 227 | n/a | which will be different if there are chars in the buffer with |
|---|
| 228 | n/a | the high bit set. So we calculate two checksums, unsigned and |
|---|
| 229 | n/a | signed. |
|---|
| 230 | n/a | """ |
|---|
| 231 | n/a | unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf)) |
|---|
| 232 | n/a | signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf)) |
|---|
| 233 | n/a | return unsigned_chksum, signed_chksum |
|---|
| 234 | n/a | |
|---|
| 235 | n/a | def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None): |
|---|
| 236 | n/a | """Copy length bytes from fileobj src to fileobj dst. |
|---|
| 237 | n/a | If length is None, copy the entire content. |
|---|
| 238 | n/a | """ |
|---|
| 239 | n/a | bufsize = bufsize or 16 * 1024 |
|---|
| 240 | n/a | if length == 0: |
|---|
| 241 | n/a | return |
|---|
| 242 | n/a | if length is None: |
|---|
| 243 | n/a | shutil.copyfileobj(src, dst, bufsize) |
|---|
| 244 | n/a | return |
|---|
| 245 | n/a | |
|---|
| 246 | n/a | blocks, remainder = divmod(length, bufsize) |
|---|
| 247 | n/a | for b in range(blocks): |
|---|
| 248 | n/a | buf = src.read(bufsize) |
|---|
| 249 | n/a | if len(buf) < bufsize: |
|---|
| 250 | n/a | raise exception("unexpected end of data") |
|---|
| 251 | n/a | dst.write(buf) |
|---|
| 252 | n/a | |
|---|
| 253 | n/a | if remainder != 0: |
|---|
| 254 | n/a | buf = src.read(remainder) |
|---|
| 255 | n/a | if len(buf) < remainder: |
|---|
| 256 | n/a | raise exception("unexpected end of data") |
|---|
| 257 | n/a | dst.write(buf) |
|---|
| 258 | n/a | return |
|---|
| 259 | n/a | |
|---|
| 260 | n/a | def filemode(mode): |
|---|
| 261 | n/a | """Deprecated in this location; use stat.filemode.""" |
|---|
| 262 | n/a | import warnings |
|---|
| 263 | n/a | warnings.warn("deprecated in favor of stat.filemode", |
|---|
| 264 | n/a | DeprecationWarning, 2) |
|---|
| 265 | n/a | return stat.filemode(mode) |
|---|
| 266 | n/a | |
|---|
| 267 | n/a | def _safe_print(s): |
|---|
| 268 | n/a | encoding = getattr(sys.stdout, 'encoding', None) |
|---|
| 269 | n/a | if encoding is not None: |
|---|
| 270 | n/a | s = s.encode(encoding, 'backslashreplace').decode(encoding) |
|---|
| 271 | n/a | print(s, end=' ') |
|---|
| 272 | n/a | |
|---|
| 273 | n/a | |
|---|
| 274 | n/a | class TarError(Exception): |
|---|
| 275 | n/a | """Base exception.""" |
|---|
| 276 | n/a | pass |
|---|
| 277 | n/a | class ExtractError(TarError): |
|---|
| 278 | n/a | """General exception for extract errors.""" |
|---|
| 279 | n/a | pass |
|---|
| 280 | n/a | class ReadError(TarError): |
|---|
| 281 | n/a | """Exception for unreadable tar archives.""" |
|---|
| 282 | n/a | pass |
|---|
| 283 | n/a | class CompressionError(TarError): |
|---|
| 284 | n/a | """Exception for unavailable compression methods.""" |
|---|
| 285 | n/a | pass |
|---|
| 286 | n/a | class StreamError(TarError): |
|---|
| 287 | n/a | """Exception for unsupported operations on stream-like TarFiles.""" |
|---|
| 288 | n/a | pass |
|---|
| 289 | n/a | class HeaderError(TarError): |
|---|
| 290 | n/a | """Base exception for header errors.""" |
|---|
| 291 | n/a | pass |
|---|
| 292 | n/a | class EmptyHeaderError(HeaderError): |
|---|
| 293 | n/a | """Exception for empty headers.""" |
|---|
| 294 | n/a | pass |
|---|
| 295 | n/a | class TruncatedHeaderError(HeaderError): |
|---|
| 296 | n/a | """Exception for truncated headers.""" |
|---|
| 297 | n/a | pass |
|---|
| 298 | n/a | class EOFHeaderError(HeaderError): |
|---|
| 299 | n/a | """Exception for end of file headers.""" |
|---|
| 300 | n/a | pass |
|---|
| 301 | n/a | class InvalidHeaderError(HeaderError): |
|---|
| 302 | n/a | """Exception for invalid headers.""" |
|---|
| 303 | n/a | pass |
|---|
| 304 | n/a | class SubsequentHeaderError(HeaderError): |
|---|
| 305 | n/a | """Exception for missing and invalid extended headers.""" |
|---|
| 306 | n/a | pass |
|---|
| 307 | n/a | |
|---|
| 308 | n/a | #--------------------------- |
|---|
| 309 | n/a | # internal stream interface |
|---|
| 310 | n/a | #--------------------------- |
|---|
| 311 | n/a | class _LowLevelFile: |
|---|
| 312 | n/a | """Low-level file object. Supports reading and writing. |
|---|
| 313 | n/a | It is used instead of a regular file object for streaming |
|---|
| 314 | n/a | access. |
|---|
| 315 | n/a | """ |
|---|
| 316 | n/a | |
|---|
| 317 | n/a | def __init__(self, name, mode): |
|---|
| 318 | n/a | mode = { |
|---|
| 319 | n/a | "r": os.O_RDONLY, |
|---|
| 320 | n/a | "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, |
|---|
| 321 | n/a | }[mode] |
|---|
| 322 | n/a | if hasattr(os, "O_BINARY"): |
|---|
| 323 | n/a | mode |= os.O_BINARY |
|---|
| 324 | n/a | self.fd = os.open(name, mode, 0o666) |
|---|
| 325 | n/a | |
|---|
| 326 | n/a | def close(self): |
|---|
| 327 | n/a | os.close(self.fd) |
|---|
| 328 | n/a | |
|---|
| 329 | n/a | def read(self, size): |
|---|
| 330 | n/a | return os.read(self.fd, size) |
|---|
| 331 | n/a | |
|---|
| 332 | n/a | def write(self, s): |
|---|
| 333 | n/a | os.write(self.fd, s) |
|---|
| 334 | n/a | |
|---|
| 335 | n/a | class _Stream: |
|---|
| 336 | n/a | """Class that serves as an adapter between TarFile and |
|---|
| 337 | n/a | a stream-like object. The stream-like object only |
|---|
| 338 | n/a | needs to have a read() or write() method and is accessed |
|---|
| 339 | n/a | blockwise. Use of gzip or bzip2 compression is possible. |
|---|
| 340 | n/a | A stream-like object could be for example: sys.stdin, |
|---|
| 341 | n/a | sys.stdout, a socket, a tape device etc. |
|---|
| 342 | n/a | |
|---|
| 343 | n/a | _Stream is intended to be used only internally. |
|---|
| 344 | n/a | """ |
|---|
| 345 | n/a | |
|---|
| 346 | n/a | def __init__(self, name, mode, comptype, fileobj, bufsize): |
|---|
| 347 | n/a | """Construct a _Stream object. |
|---|
| 348 | n/a | """ |
|---|
| 349 | n/a | self._extfileobj = True |
|---|
| 350 | n/a | if fileobj is None: |
|---|
| 351 | n/a | fileobj = _LowLevelFile(name, mode) |
|---|
| 352 | n/a | self._extfileobj = False |
|---|
| 353 | n/a | |
|---|
| 354 | n/a | if comptype == '*': |
|---|
| 355 | n/a | # Enable transparent compression detection for the |
|---|
| 356 | n/a | # stream interface |
|---|
| 357 | n/a | fileobj = _StreamProxy(fileobj) |
|---|
| 358 | n/a | comptype = fileobj.getcomptype() |
|---|
| 359 | n/a | |
|---|
| 360 | n/a | self.name = name or "" |
|---|
| 361 | n/a | self.mode = mode |
|---|
| 362 | n/a | self.comptype = comptype |
|---|
| 363 | n/a | self.fileobj = fileobj |
|---|
| 364 | n/a | self.bufsize = bufsize |
|---|
| 365 | n/a | self.buf = b"" |
|---|
| 366 | n/a | self.pos = 0 |
|---|
| 367 | n/a | self.closed = False |
|---|
| 368 | n/a | |
|---|
| 369 | n/a | try: |
|---|
| 370 | n/a | if comptype == "gz": |
|---|
| 371 | n/a | try: |
|---|
| 372 | n/a | import zlib |
|---|
| 373 | n/a | except ImportError: |
|---|
| 374 | n/a | raise CompressionError("zlib module is not available") |
|---|
| 375 | n/a | self.zlib = zlib |
|---|
| 376 | n/a | self.crc = zlib.crc32(b"") |
|---|
| 377 | n/a | if mode == "r": |
|---|
| 378 | n/a | self._init_read_gz() |
|---|
| 379 | n/a | self.exception = zlib.error |
|---|
| 380 | n/a | else: |
|---|
| 381 | n/a | self._init_write_gz() |
|---|
| 382 | n/a | |
|---|
| 383 | n/a | elif comptype == "bz2": |
|---|
| 384 | n/a | try: |
|---|
| 385 | n/a | import bz2 |
|---|
| 386 | n/a | except ImportError: |
|---|
| 387 | n/a | raise CompressionError("bz2 module is not available") |
|---|
| 388 | n/a | if mode == "r": |
|---|
| 389 | n/a | self.dbuf = b"" |
|---|
| 390 | n/a | self.cmp = bz2.BZ2Decompressor() |
|---|
| 391 | n/a | self.exception = OSError |
|---|
| 392 | n/a | else: |
|---|
| 393 | n/a | self.cmp = bz2.BZ2Compressor() |
|---|
| 394 | n/a | |
|---|
| 395 | n/a | elif comptype == "xz": |
|---|
| 396 | n/a | try: |
|---|
| 397 | n/a | import lzma |
|---|
| 398 | n/a | except ImportError: |
|---|
| 399 | n/a | raise CompressionError("lzma module is not available") |
|---|
| 400 | n/a | if mode == "r": |
|---|
| 401 | n/a | self.dbuf = b"" |
|---|
| 402 | n/a | self.cmp = lzma.LZMADecompressor() |
|---|
| 403 | n/a | self.exception = lzma.LZMAError |
|---|
| 404 | n/a | else: |
|---|
| 405 | n/a | self.cmp = lzma.LZMACompressor() |
|---|
| 406 | n/a | |
|---|
| 407 | n/a | elif comptype != "tar": |
|---|
| 408 | n/a | raise CompressionError("unknown compression type %r" % comptype) |
|---|
| 409 | n/a | |
|---|
| 410 | n/a | except: |
|---|
| 411 | n/a | if not self._extfileobj: |
|---|
| 412 | n/a | self.fileobj.close() |
|---|
| 413 | n/a | self.closed = True |
|---|
| 414 | n/a | raise |
|---|
| 415 | n/a | |
|---|
| 416 | n/a | def __del__(self): |
|---|
| 417 | n/a | if hasattr(self, "closed") and not self.closed: |
|---|
| 418 | n/a | self.close() |
|---|
| 419 | n/a | |
|---|
| 420 | n/a | def _init_write_gz(self): |
|---|
| 421 | n/a | """Initialize for writing with gzip compression. |
|---|
| 422 | n/a | """ |
|---|
| 423 | n/a | self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, |
|---|
| 424 | n/a | -self.zlib.MAX_WBITS, |
|---|
| 425 | n/a | self.zlib.DEF_MEM_LEVEL, |
|---|
| 426 | n/a | 0) |
|---|
| 427 | n/a | timestamp = struct.pack("<L", int(time.time())) |
|---|
| 428 | n/a | self.__write(b"\037\213\010\010" + timestamp + b"\002\377") |
|---|
| 429 | n/a | if self.name.endswith(".gz"): |
|---|
| 430 | n/a | self.name = self.name[:-3] |
|---|
| 431 | n/a | # RFC1952 says we must use ISO-8859-1 for the FNAME field. |
|---|
| 432 | n/a | self.__write(self.name.encode("iso-8859-1", "replace") + NUL) |
|---|
| 433 | n/a | |
|---|
| 434 | n/a | def write(self, s): |
|---|
| 435 | n/a | """Write string s to the stream. |
|---|
| 436 | n/a | """ |
|---|
| 437 | n/a | if self.comptype == "gz": |
|---|
| 438 | n/a | self.crc = self.zlib.crc32(s, self.crc) |
|---|
| 439 | n/a | self.pos += len(s) |
|---|
| 440 | n/a | if self.comptype != "tar": |
|---|
| 441 | n/a | s = self.cmp.compress(s) |
|---|
| 442 | n/a | self.__write(s) |
|---|
| 443 | n/a | |
|---|
| 444 | n/a | def __write(self, s): |
|---|
| 445 | n/a | """Write string s to the stream if a whole new block |
|---|
| 446 | n/a | is ready to be written. |
|---|
| 447 | n/a | """ |
|---|
| 448 | n/a | self.buf += s |
|---|
| 449 | n/a | while len(self.buf) > self.bufsize: |
|---|
| 450 | n/a | self.fileobj.write(self.buf[:self.bufsize]) |
|---|
| 451 | n/a | self.buf = self.buf[self.bufsize:] |
|---|
| 452 | n/a | |
|---|
| 453 | n/a | def close(self): |
|---|
| 454 | n/a | """Close the _Stream object. No operation should be |
|---|
| 455 | n/a | done on it afterwards. |
|---|
| 456 | n/a | """ |
|---|
| 457 | n/a | if self.closed: |
|---|
| 458 | n/a | return |
|---|
| 459 | n/a | |
|---|
| 460 | n/a | self.closed = True |
|---|
| 461 | n/a | try: |
|---|
| 462 | n/a | if self.mode == "w" and self.comptype != "tar": |
|---|
| 463 | n/a | self.buf += self.cmp.flush() |
|---|
| 464 | n/a | |
|---|
| 465 | n/a | if self.mode == "w" and self.buf: |
|---|
| 466 | n/a | self.fileobj.write(self.buf) |
|---|
| 467 | n/a | self.buf = b"" |
|---|
| 468 | n/a | if self.comptype == "gz": |
|---|
| 469 | n/a | self.fileobj.write(struct.pack("<L", self.crc)) |
|---|
| 470 | n/a | self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF)) |
|---|
| 471 | n/a | finally: |
|---|
| 472 | n/a | if not self._extfileobj: |
|---|
| 473 | n/a | self.fileobj.close() |
|---|
| 474 | n/a | |
|---|
| 475 | n/a | def _init_read_gz(self): |
|---|
| 476 | n/a | """Initialize for reading a gzip compressed fileobj. |
|---|
| 477 | n/a | """ |
|---|
| 478 | n/a | self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS) |
|---|
| 479 | n/a | self.dbuf = b"" |
|---|
| 480 | n/a | |
|---|
| 481 | n/a | # taken from gzip.GzipFile with some alterations |
|---|
| 482 | n/a | if self.__read(2) != b"\037\213": |
|---|
| 483 | n/a | raise ReadError("not a gzip file") |
|---|
| 484 | n/a | if self.__read(1) != b"\010": |
|---|
| 485 | n/a | raise CompressionError("unsupported compression method") |
|---|
| 486 | n/a | |
|---|
| 487 | n/a | flag = ord(self.__read(1)) |
|---|
| 488 | n/a | self.__read(6) |
|---|
| 489 | n/a | |
|---|
| 490 | n/a | if flag & 4: |
|---|
| 491 | n/a | xlen = ord(self.__read(1)) + 256 * ord(self.__read(1)) |
|---|
| 492 | n/a | self.read(xlen) |
|---|
| 493 | n/a | if flag & 8: |
|---|
| 494 | n/a | while True: |
|---|
| 495 | n/a | s = self.__read(1) |
|---|
| 496 | n/a | if not s or s == NUL: |
|---|
| 497 | n/a | break |
|---|
| 498 | n/a | if flag & 16: |
|---|
| 499 | n/a | while True: |
|---|
| 500 | n/a | s = self.__read(1) |
|---|
| 501 | n/a | if not s or s == NUL: |
|---|
| 502 | n/a | break |
|---|
| 503 | n/a | if flag & 2: |
|---|
| 504 | n/a | self.__read(2) |
|---|
| 505 | n/a | |
|---|
| 506 | n/a | def tell(self): |
|---|
| 507 | n/a | """Return the stream's file pointer position. |
|---|
| 508 | n/a | """ |
|---|
| 509 | n/a | return self.pos |
|---|
| 510 | n/a | |
|---|
| 511 | n/a | def seek(self, pos=0): |
|---|
| 512 | n/a | """Set the stream's file pointer to pos. Negative seeking |
|---|
| 513 | n/a | is forbidden. |
|---|
| 514 | n/a | """ |
|---|
| 515 | n/a | if pos - self.pos >= 0: |
|---|
| 516 | n/a | blocks, remainder = divmod(pos - self.pos, self.bufsize) |
|---|
| 517 | n/a | for i in range(blocks): |
|---|
| 518 | n/a | self.read(self.bufsize) |
|---|
| 519 | n/a | self.read(remainder) |
|---|
| 520 | n/a | else: |
|---|
| 521 | n/a | raise StreamError("seeking backwards is not allowed") |
|---|
| 522 | n/a | return self.pos |
|---|
| 523 | n/a | |
|---|
| 524 | n/a | def read(self, size=None): |
|---|
| 525 | n/a | """Return the next size number of bytes from the stream. |
|---|
| 526 | n/a | If size is not defined, return all bytes of the stream |
|---|
| 527 | n/a | up to EOF. |
|---|
| 528 | n/a | """ |
|---|
| 529 | n/a | if size is None: |
|---|
| 530 | n/a | t = [] |
|---|
| 531 | n/a | while True: |
|---|
| 532 | n/a | buf = self._read(self.bufsize) |
|---|
| 533 | n/a | if not buf: |
|---|
| 534 | n/a | break |
|---|
| 535 | n/a | t.append(buf) |
|---|
| 536 | n/a | buf = "".join(t) |
|---|
| 537 | n/a | else: |
|---|
| 538 | n/a | buf = self._read(size) |
|---|
| 539 | n/a | self.pos += len(buf) |
|---|
| 540 | n/a | return buf |
|---|
| 541 | n/a | |
|---|
| 542 | n/a | def _read(self, size): |
|---|
| 543 | n/a | """Return size bytes from the stream. |
|---|
| 544 | n/a | """ |
|---|
| 545 | n/a | if self.comptype == "tar": |
|---|
| 546 | n/a | return self.__read(size) |
|---|
| 547 | n/a | |
|---|
| 548 | n/a | c = len(self.dbuf) |
|---|
| 549 | n/a | while c < size: |
|---|
| 550 | n/a | buf = self.__read(self.bufsize) |
|---|
| 551 | n/a | if not buf: |
|---|
| 552 | n/a | break |
|---|
| 553 | n/a | try: |
|---|
| 554 | n/a | buf = self.cmp.decompress(buf) |
|---|
| 555 | n/a | except self.exception: |
|---|
| 556 | n/a | raise ReadError("invalid compressed data") |
|---|
| 557 | n/a | self.dbuf += buf |
|---|
| 558 | n/a | c += len(buf) |
|---|
| 559 | n/a | buf = self.dbuf[:size] |
|---|
| 560 | n/a | self.dbuf = self.dbuf[size:] |
|---|
| 561 | n/a | return buf |
|---|
| 562 | n/a | |
|---|
| 563 | n/a | def __read(self, size): |
|---|
| 564 | n/a | """Return size bytes from stream. If internal buffer is empty, |
|---|
| 565 | n/a | read another block from the stream. |
|---|
| 566 | n/a | """ |
|---|
| 567 | n/a | c = len(self.buf) |
|---|
| 568 | n/a | while c < size: |
|---|
| 569 | n/a | buf = self.fileobj.read(self.bufsize) |
|---|
| 570 | n/a | if not buf: |
|---|
| 571 | n/a | break |
|---|
| 572 | n/a | self.buf += buf |
|---|
| 573 | n/a | c += len(buf) |
|---|
| 574 | n/a | buf = self.buf[:size] |
|---|
| 575 | n/a | self.buf = self.buf[size:] |
|---|
| 576 | n/a | return buf |
|---|
| 577 | n/a | # class _Stream |
|---|
| 578 | n/a | |
|---|
| 579 | n/a | class _StreamProxy(object): |
|---|
| 580 | n/a | """Small proxy class that enables transparent compression |
|---|
| 581 | n/a | detection for the Stream interface (mode 'r|*'). |
|---|
| 582 | n/a | """ |
|---|
| 583 | n/a | |
|---|
| 584 | n/a | def __init__(self, fileobj): |
|---|
| 585 | n/a | self.fileobj = fileobj |
|---|
| 586 | n/a | self.buf = self.fileobj.read(BLOCKSIZE) |
|---|
| 587 | n/a | |
|---|
| 588 | n/a | def read(self, size): |
|---|
| 589 | n/a | self.read = self.fileobj.read |
|---|
| 590 | n/a | return self.buf |
|---|
| 591 | n/a | |
|---|
| 592 | n/a | def getcomptype(self): |
|---|
| 593 | n/a | if self.buf.startswith(b"\x1f\x8b\x08"): |
|---|
| 594 | n/a | return "gz" |
|---|
| 595 | n/a | elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY": |
|---|
| 596 | n/a | return "bz2" |
|---|
| 597 | n/a | elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")): |
|---|
| 598 | n/a | return "xz" |
|---|
| 599 | n/a | else: |
|---|
| 600 | n/a | return "tar" |
|---|
| 601 | n/a | |
|---|
| 602 | n/a | def close(self): |
|---|
| 603 | n/a | self.fileobj.close() |
|---|
| 604 | n/a | # class StreamProxy |
|---|
| 605 | n/a | |
|---|
| 606 | n/a | #------------------------ |
|---|
| 607 | n/a | # Extraction file object |
|---|
| 608 | n/a | #------------------------ |
|---|
| 609 | n/a | class _FileInFile(object): |
|---|
| 610 | n/a | """A thin wrapper around an existing file object that |
|---|
| 611 | n/a | provides a part of its data as an individual file |
|---|
| 612 | n/a | object. |
|---|
| 613 | n/a | """ |
|---|
| 614 | n/a | |
|---|
| 615 | n/a | def __init__(self, fileobj, offset, size, blockinfo=None): |
|---|
| 616 | n/a | self.fileobj = fileobj |
|---|
| 617 | n/a | self.offset = offset |
|---|
| 618 | n/a | self.size = size |
|---|
| 619 | n/a | self.position = 0 |
|---|
| 620 | n/a | self.name = getattr(fileobj, "name", None) |
|---|
| 621 | n/a | self.closed = False |
|---|
| 622 | n/a | |
|---|
| 623 | n/a | if blockinfo is None: |
|---|
| 624 | n/a | blockinfo = [(0, size)] |
|---|
| 625 | n/a | |
|---|
| 626 | n/a | # Construct a map with data and zero blocks. |
|---|
| 627 | n/a | self.map_index = 0 |
|---|
| 628 | n/a | self.map = [] |
|---|
| 629 | n/a | lastpos = 0 |
|---|
| 630 | n/a | realpos = self.offset |
|---|
| 631 | n/a | for offset, size in blockinfo: |
|---|
| 632 | n/a | if offset > lastpos: |
|---|
| 633 | n/a | self.map.append((False, lastpos, offset, None)) |
|---|
| 634 | n/a | self.map.append((True, offset, offset + size, realpos)) |
|---|
| 635 | n/a | realpos += size |
|---|
| 636 | n/a | lastpos = offset + size |
|---|
| 637 | n/a | if lastpos < self.size: |
|---|
| 638 | n/a | self.map.append((False, lastpos, self.size, None)) |
|---|
| 639 | n/a | |
|---|
| 640 | n/a | def flush(self): |
|---|
| 641 | n/a | pass |
|---|
| 642 | n/a | |
|---|
| 643 | n/a | def readable(self): |
|---|
| 644 | n/a | return True |
|---|
| 645 | n/a | |
|---|
| 646 | n/a | def writable(self): |
|---|
| 647 | n/a | return False |
|---|
| 648 | n/a | |
|---|
| 649 | n/a | def seekable(self): |
|---|
| 650 | n/a | return self.fileobj.seekable() |
|---|
| 651 | n/a | |
|---|
| 652 | n/a | def tell(self): |
|---|
| 653 | n/a | """Return the current file position. |
|---|
| 654 | n/a | """ |
|---|
| 655 | n/a | return self.position |
|---|
| 656 | n/a | |
|---|
| 657 | n/a | def seek(self, position, whence=io.SEEK_SET): |
|---|
| 658 | n/a | """Seek to a position in the file. |
|---|
| 659 | n/a | """ |
|---|
| 660 | n/a | if whence == io.SEEK_SET: |
|---|
| 661 | n/a | self.position = min(max(position, 0), self.size) |
|---|
| 662 | n/a | elif whence == io.SEEK_CUR: |
|---|
| 663 | n/a | if position < 0: |
|---|
| 664 | n/a | self.position = max(self.position + position, 0) |
|---|
| 665 | n/a | else: |
|---|
| 666 | n/a | self.position = min(self.position + position, self.size) |
|---|
| 667 | n/a | elif whence == io.SEEK_END: |
|---|
| 668 | n/a | self.position = max(min(self.size + position, self.size), 0) |
|---|
| 669 | n/a | else: |
|---|
| 670 | n/a | raise ValueError("Invalid argument") |
|---|
| 671 | n/a | return self.position |
|---|
| 672 | n/a | |
|---|
| 673 | n/a | def read(self, size=None): |
|---|
| 674 | n/a | """Read data from the file. |
|---|
| 675 | n/a | """ |
|---|
| 676 | n/a | if size is None: |
|---|
| 677 | n/a | size = self.size - self.position |
|---|
| 678 | n/a | else: |
|---|
| 679 | n/a | size = min(size, self.size - self.position) |
|---|
| 680 | n/a | |
|---|
| 681 | n/a | buf = b"" |
|---|
| 682 | n/a | while size > 0: |
|---|
| 683 | n/a | while True: |
|---|
| 684 | n/a | data, start, stop, offset = self.map[self.map_index] |
|---|
| 685 | n/a | if start <= self.position < stop: |
|---|
| 686 | n/a | break |
|---|
| 687 | n/a | else: |
|---|
| 688 | n/a | self.map_index += 1 |
|---|
| 689 | n/a | if self.map_index == len(self.map): |
|---|
| 690 | n/a | self.map_index = 0 |
|---|
| 691 | n/a | length = min(size, stop - self.position) |
|---|
| 692 | n/a | if data: |
|---|
| 693 | n/a | self.fileobj.seek(offset + (self.position - start)) |
|---|
| 694 | n/a | b = self.fileobj.read(length) |
|---|
| 695 | n/a | if len(b) != length: |
|---|
| 696 | n/a | raise ReadError("unexpected end of data") |
|---|
| 697 | n/a | buf += b |
|---|
| 698 | n/a | else: |
|---|
| 699 | n/a | buf += NUL * length |
|---|
| 700 | n/a | size -= length |
|---|
| 701 | n/a | self.position += length |
|---|
| 702 | n/a | return buf |
|---|
| 703 | n/a | |
|---|
| 704 | n/a | def readinto(self, b): |
|---|
| 705 | n/a | buf = self.read(len(b)) |
|---|
| 706 | n/a | b[:len(buf)] = buf |
|---|
| 707 | n/a | return len(buf) |
|---|
| 708 | n/a | |
|---|
| 709 | n/a | def close(self): |
|---|
| 710 | n/a | self.closed = True |
|---|
| 711 | n/a | #class _FileInFile |
|---|
| 712 | n/a | |
|---|
| 713 | n/a | class ExFileObject(io.BufferedReader): |
|---|
| 714 | n/a | |
|---|
| 715 | n/a | def __init__(self, tarfile, tarinfo): |
|---|
| 716 | n/a | fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data, |
|---|
| 717 | n/a | tarinfo.size, tarinfo.sparse) |
|---|
| 718 | n/a | super().__init__(fileobj) |
|---|
| 719 | n/a | #class ExFileObject |
|---|
| 720 | n/a | |
|---|
| 721 | n/a | #------------------ |
|---|
| 722 | n/a | # Exported Classes |
|---|
| 723 | n/a | #------------------ |
|---|
| 724 | n/a | class TarInfo(object): |
|---|
| 725 | n/a | """Informational class which holds the details about an |
|---|
| 726 | n/a | archive member given by a tar header block. |
|---|
| 727 | n/a | TarInfo objects are returned by TarFile.getmember(), |
|---|
| 728 | n/a | TarFile.getmembers() and TarFile.gettarinfo() and are |
|---|
| 729 | n/a | usually created internally. |
|---|
| 730 | n/a | """ |
|---|
| 731 | n/a | |
|---|
| 732 | n/a | __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", |
|---|
| 733 | n/a | "chksum", "type", "linkname", "uname", "gname", |
|---|
| 734 | n/a | "devmajor", "devminor", |
|---|
| 735 | n/a | "offset", "offset_data", "pax_headers", "sparse", |
|---|
| 736 | n/a | "tarfile", "_sparse_structs", "_link_target") |
|---|
| 737 | n/a | |
|---|
| 738 | n/a | def __init__(self, name=""): |
|---|
| 739 | n/a | """Construct a TarInfo object. name is the optional name |
|---|
| 740 | n/a | of the member. |
|---|
| 741 | n/a | """ |
|---|
| 742 | n/a | self.name = name # member name |
|---|
| 743 | n/a | self.mode = 0o644 # file permissions |
|---|
| 744 | n/a | self.uid = 0 # user id |
|---|
| 745 | n/a | self.gid = 0 # group id |
|---|
| 746 | n/a | self.size = 0 # file size |
|---|
| 747 | n/a | self.mtime = 0 # modification time |
|---|
| 748 | n/a | self.chksum = 0 # header checksum |
|---|
| 749 | n/a | self.type = REGTYPE # member type |
|---|
| 750 | n/a | self.linkname = "" # link name |
|---|
| 751 | n/a | self.uname = "" # user name |
|---|
| 752 | n/a | self.gname = "" # group name |
|---|
| 753 | n/a | self.devmajor = 0 # device major number |
|---|
| 754 | n/a | self.devminor = 0 # device minor number |
|---|
| 755 | n/a | |
|---|
| 756 | n/a | self.offset = 0 # the tar header starts here |
|---|
| 757 | n/a | self.offset_data = 0 # the file's data starts here |
|---|
| 758 | n/a | |
|---|
| 759 | n/a | self.sparse = None # sparse member information |
|---|
| 760 | n/a | self.pax_headers = {} # pax header information |
|---|
| 761 | n/a | |
|---|
| 762 | n/a | # In pax headers the "name" and "linkname" field are called |
|---|
| 763 | n/a | # "path" and "linkpath". |
|---|
| 764 | n/a | def _getpath(self): |
|---|
| 765 | n/a | return self.name |
|---|
| 766 | n/a | def _setpath(self, name): |
|---|
| 767 | n/a | self.name = name |
|---|
| 768 | n/a | path = property(_getpath, _setpath) |
|---|
| 769 | n/a | |
|---|
| 770 | n/a | def _getlinkpath(self): |
|---|
| 771 | n/a | return self.linkname |
|---|
| 772 | n/a | def _setlinkpath(self, linkname): |
|---|
| 773 | n/a | self.linkname = linkname |
|---|
| 774 | n/a | linkpath = property(_getlinkpath, _setlinkpath) |
|---|
| 775 | n/a | |
|---|
| 776 | n/a | def __repr__(self): |
|---|
| 777 | n/a | return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) |
|---|
| 778 | n/a | |
|---|
| 779 | n/a | def get_info(self): |
|---|
| 780 | n/a | """Return the TarInfo's attributes as a dictionary. |
|---|
| 781 | n/a | """ |
|---|
| 782 | n/a | info = { |
|---|
| 783 | n/a | "name": self.name, |
|---|
| 784 | n/a | "mode": self.mode & 0o7777, |
|---|
| 785 | n/a | "uid": self.uid, |
|---|
| 786 | n/a | "gid": self.gid, |
|---|
| 787 | n/a | "size": self.size, |
|---|
| 788 | n/a | "mtime": self.mtime, |
|---|
| 789 | n/a | "chksum": self.chksum, |
|---|
| 790 | n/a | "type": self.type, |
|---|
| 791 | n/a | "linkname": self.linkname, |
|---|
| 792 | n/a | "uname": self.uname, |
|---|
| 793 | n/a | "gname": self.gname, |
|---|
| 794 | n/a | "devmajor": self.devmajor, |
|---|
| 795 | n/a | "devminor": self.devminor |
|---|
| 796 | n/a | } |
|---|
| 797 | n/a | |
|---|
| 798 | n/a | if info["type"] == DIRTYPE and not info["name"].endswith("/"): |
|---|
| 799 | n/a | info["name"] += "/" |
|---|
| 800 | n/a | |
|---|
| 801 | n/a | return info |
|---|
| 802 | n/a | |
|---|
| 803 | n/a | def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): |
|---|
| 804 | n/a | """Return a tar header as a string of 512 byte blocks. |
|---|
| 805 | n/a | """ |
|---|
| 806 | n/a | info = self.get_info() |
|---|
| 807 | n/a | |
|---|
| 808 | n/a | if format == USTAR_FORMAT: |
|---|
| 809 | n/a | return self.create_ustar_header(info, encoding, errors) |
|---|
| 810 | n/a | elif format == GNU_FORMAT: |
|---|
| 811 | n/a | return self.create_gnu_header(info, encoding, errors) |
|---|
| 812 | n/a | elif format == PAX_FORMAT: |
|---|
| 813 | n/a | return self.create_pax_header(info, encoding) |
|---|
| 814 | n/a | else: |
|---|
| 815 | n/a | raise ValueError("invalid format") |
|---|
| 816 | n/a | |
|---|
| 817 | n/a | def create_ustar_header(self, info, encoding, errors): |
|---|
| 818 | n/a | """Return the object as a ustar header block. |
|---|
| 819 | n/a | """ |
|---|
| 820 | n/a | info["magic"] = POSIX_MAGIC |
|---|
| 821 | n/a | |
|---|
| 822 | n/a | if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK: |
|---|
| 823 | n/a | raise ValueError("linkname is too long") |
|---|
| 824 | n/a | |
|---|
| 825 | n/a | if len(info["name"].encode(encoding, errors)) > LENGTH_NAME: |
|---|
| 826 | n/a | info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors) |
|---|
| 827 | n/a | |
|---|
| 828 | n/a | return self._create_header(info, USTAR_FORMAT, encoding, errors) |
|---|
| 829 | n/a | |
|---|
| 830 | n/a | def create_gnu_header(self, info, encoding, errors): |
|---|
| 831 | n/a | """Return the object as a GNU header block sequence. |
|---|
| 832 | n/a | """ |
|---|
| 833 | n/a | info["magic"] = GNU_MAGIC |
|---|
| 834 | n/a | |
|---|
| 835 | n/a | buf = b"" |
|---|
| 836 | n/a | if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK: |
|---|
| 837 | n/a | buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) |
|---|
| 838 | n/a | |
|---|
| 839 | n/a | if len(info["name"].encode(encoding, errors)) > LENGTH_NAME: |
|---|
| 840 | n/a | buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) |
|---|
| 841 | n/a | |
|---|
| 842 | n/a | return buf + self._create_header(info, GNU_FORMAT, encoding, errors) |
|---|
| 843 | n/a | |
|---|
| 844 | n/a | def create_pax_header(self, info, encoding): |
|---|
| 845 | n/a | """Return the object as a ustar header block. If it cannot be |
|---|
| 846 | n/a | represented this way, prepend a pax extended header sequence |
|---|
| 847 | n/a | with supplement information. |
|---|
| 848 | n/a | """ |
|---|
| 849 | n/a | info["magic"] = POSIX_MAGIC |
|---|
| 850 | n/a | pax_headers = self.pax_headers.copy() |
|---|
| 851 | n/a | |
|---|
| 852 | n/a | # Test string fields for values that exceed the field length or cannot |
|---|
| 853 | n/a | # be represented in ASCII encoding. |
|---|
| 854 | n/a | for name, hname, length in ( |
|---|
| 855 | n/a | ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), |
|---|
| 856 | n/a | ("uname", "uname", 32), ("gname", "gname", 32)): |
|---|
| 857 | n/a | |
|---|
| 858 | n/a | if hname in pax_headers: |
|---|
| 859 | n/a | # The pax header has priority. |
|---|
| 860 | n/a | continue |
|---|
| 861 | n/a | |
|---|
| 862 | n/a | # Try to encode the string as ASCII. |
|---|
| 863 | n/a | try: |
|---|
| 864 | n/a | info[name].encode("ascii", "strict") |
|---|
| 865 | n/a | except UnicodeEncodeError: |
|---|
| 866 | n/a | pax_headers[hname] = info[name] |
|---|
| 867 | n/a | continue |
|---|
| 868 | n/a | |
|---|
| 869 | n/a | if len(info[name]) > length: |
|---|
| 870 | n/a | pax_headers[hname] = info[name] |
|---|
| 871 | n/a | |
|---|
| 872 | n/a | # Test number fields for values that exceed the field limit or values |
|---|
| 873 | n/a | # that like to be stored as float. |
|---|
| 874 | n/a | for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): |
|---|
| 875 | n/a | if name in pax_headers: |
|---|
| 876 | n/a | # The pax header has priority. Avoid overflow. |
|---|
| 877 | n/a | info[name] = 0 |
|---|
| 878 | n/a | continue |
|---|
| 879 | n/a | |
|---|
| 880 | n/a | val = info[name] |
|---|
| 881 | n/a | if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): |
|---|
| 882 | n/a | pax_headers[name] = str(val) |
|---|
| 883 | n/a | info[name] = 0 |
|---|
| 884 | n/a | |
|---|
| 885 | n/a | # Create a pax extended header if necessary. |
|---|
| 886 | n/a | if pax_headers: |
|---|
| 887 | n/a | buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) |
|---|
| 888 | n/a | else: |
|---|
| 889 | n/a | buf = b"" |
|---|
| 890 | n/a | |
|---|
| 891 | n/a | return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") |
|---|
| 892 | n/a | |
|---|
| 893 | n/a | @classmethod |
|---|
| 894 | n/a | def create_pax_global_header(cls, pax_headers): |
|---|
| 895 | n/a | """Return the object as a pax global header block sequence. |
|---|
| 896 | n/a | """ |
|---|
| 897 | n/a | return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8") |
|---|
| 898 | n/a | |
|---|
| 899 | n/a | def _posix_split_name(self, name, encoding, errors): |
|---|
| 900 | n/a | """Split a name longer than 100 chars into a prefix |
|---|
| 901 | n/a | and a name part. |
|---|
| 902 | n/a | """ |
|---|
| 903 | n/a | components = name.split("/") |
|---|
| 904 | n/a | for i in range(1, len(components)): |
|---|
| 905 | n/a | prefix = "/".join(components[:i]) |
|---|
| 906 | n/a | name = "/".join(components[i:]) |
|---|
| 907 | n/a | if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \ |
|---|
| 908 | n/a | len(name.encode(encoding, errors)) <= LENGTH_NAME: |
|---|
| 909 | n/a | break |
|---|
| 910 | n/a | else: |
|---|
| 911 | n/a | raise ValueError("name is too long") |
|---|
| 912 | n/a | |
|---|
| 913 | n/a | return prefix, name |
|---|
| 914 | n/a | |
|---|
| 915 | n/a | @staticmethod |
|---|
| 916 | n/a | def _create_header(info, format, encoding, errors): |
|---|
| 917 | n/a | """Return a header block. info is a dictionary with file |
|---|
| 918 | n/a | information, format must be one of the *_FORMAT constants. |
|---|
| 919 | n/a | """ |
|---|
| 920 | n/a | parts = [ |
|---|
| 921 | n/a | stn(info.get("name", ""), 100, encoding, errors), |
|---|
| 922 | n/a | itn(info.get("mode", 0) & 0o7777, 8, format), |
|---|
| 923 | n/a | itn(info.get("uid", 0), 8, format), |
|---|
| 924 | n/a | itn(info.get("gid", 0), 8, format), |
|---|
| 925 | n/a | itn(info.get("size", 0), 12, format), |
|---|
| 926 | n/a | itn(info.get("mtime", 0), 12, format), |
|---|
| 927 | n/a | b" ", # checksum field |
|---|
| 928 | n/a | info.get("type", REGTYPE), |
|---|
| 929 | n/a | stn(info.get("linkname", ""), 100, encoding, errors), |
|---|
| 930 | n/a | info.get("magic", POSIX_MAGIC), |
|---|
| 931 | n/a | stn(info.get("uname", ""), 32, encoding, errors), |
|---|
| 932 | n/a | stn(info.get("gname", ""), 32, encoding, errors), |
|---|
| 933 | n/a | itn(info.get("devmajor", 0), 8, format), |
|---|
| 934 | n/a | itn(info.get("devminor", 0), 8, format), |
|---|
| 935 | n/a | stn(info.get("prefix", ""), 155, encoding, errors) |
|---|
| 936 | n/a | ] |
|---|
| 937 | n/a | |
|---|
| 938 | n/a | buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) |
|---|
| 939 | n/a | chksum = calc_chksums(buf[-BLOCKSIZE:])[0] |
|---|
| 940 | n/a | buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:] |
|---|
| 941 | n/a | return buf |
|---|
| 942 | n/a | |
|---|
| 943 | n/a | @staticmethod |
|---|
| 944 | n/a | def _create_payload(payload): |
|---|
| 945 | n/a | """Return the string payload filled with zero bytes |
|---|
| 946 | n/a | up to the next 512 byte border. |
|---|
| 947 | n/a | """ |
|---|
| 948 | n/a | blocks, remainder = divmod(len(payload), BLOCKSIZE) |
|---|
| 949 | n/a | if remainder > 0: |
|---|
| 950 | n/a | payload += (BLOCKSIZE - remainder) * NUL |
|---|
| 951 | n/a | return payload |
|---|
| 952 | n/a | |
|---|
| 953 | n/a | @classmethod |
|---|
| 954 | n/a | def _create_gnu_long_header(cls, name, type, encoding, errors): |
|---|
| 955 | n/a | """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence |
|---|
| 956 | n/a | for name. |
|---|
| 957 | n/a | """ |
|---|
| 958 | n/a | name = name.encode(encoding, errors) + NUL |
|---|
| 959 | n/a | |
|---|
| 960 | n/a | info = {} |
|---|
| 961 | n/a | info["name"] = "././@LongLink" |
|---|
| 962 | n/a | info["type"] = type |
|---|
| 963 | n/a | info["size"] = len(name) |
|---|
| 964 | n/a | info["magic"] = GNU_MAGIC |
|---|
| 965 | n/a | |
|---|
| 966 | n/a | # create extended header + name blocks. |
|---|
| 967 | n/a | return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ |
|---|
| 968 | n/a | cls._create_payload(name) |
|---|
| 969 | n/a | |
|---|
| 970 | n/a | @classmethod |
|---|
| 971 | n/a | def _create_pax_generic_header(cls, pax_headers, type, encoding): |
|---|
| 972 | n/a | """Return a POSIX.1-2008 extended or global header sequence |
|---|
| 973 | n/a | that contains a list of keyword, value pairs. The values |
|---|
| 974 | n/a | must be strings. |
|---|
| 975 | n/a | """ |
|---|
| 976 | n/a | # Check if one of the fields contains surrogate characters and thereby |
|---|
| 977 | n/a | # forces hdrcharset=BINARY, see _proc_pax() for more information. |
|---|
| 978 | n/a | binary = False |
|---|
| 979 | n/a | for keyword, value in pax_headers.items(): |
|---|
| 980 | n/a | try: |
|---|
| 981 | n/a | value.encode("utf-8", "strict") |
|---|
| 982 | n/a | except UnicodeEncodeError: |
|---|
| 983 | n/a | binary = True |
|---|
| 984 | n/a | break |
|---|
| 985 | n/a | |
|---|
| 986 | n/a | records = b"" |
|---|
| 987 | n/a | if binary: |
|---|
| 988 | n/a | # Put the hdrcharset field at the beginning of the header. |
|---|
| 989 | n/a | records += b"21 hdrcharset=BINARY\n" |
|---|
| 990 | n/a | |
|---|
| 991 | n/a | for keyword, value in pax_headers.items(): |
|---|
| 992 | n/a | keyword = keyword.encode("utf-8") |
|---|
| 993 | n/a | if binary: |
|---|
| 994 | n/a | # Try to restore the original byte representation of `value'. |
|---|
| 995 | n/a | # Needless to say, that the encoding must match the string. |
|---|
| 996 | n/a | value = value.encode(encoding, "surrogateescape") |
|---|
| 997 | n/a | else: |
|---|
| 998 | n/a | value = value.encode("utf-8") |
|---|
| 999 | n/a | |
|---|
| 1000 | n/a | l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' |
|---|
| 1001 | n/a | n = p = 0 |
|---|
| 1002 | n/a | while True: |
|---|
| 1003 | n/a | n = l + len(str(p)) |
|---|
| 1004 | n/a | if n == p: |
|---|
| 1005 | n/a | break |
|---|
| 1006 | n/a | p = n |
|---|
| 1007 | n/a | records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" |
|---|
| 1008 | n/a | |
|---|
| 1009 | n/a | # We use a hardcoded "././@PaxHeader" name like star does |
|---|
| 1010 | n/a | # instead of the one that POSIX recommends. |
|---|
| 1011 | n/a | info = {} |
|---|
| 1012 | n/a | info["name"] = "././@PaxHeader" |
|---|
| 1013 | n/a | info["type"] = type |
|---|
| 1014 | n/a | info["size"] = len(records) |
|---|
| 1015 | n/a | info["magic"] = POSIX_MAGIC |
|---|
| 1016 | n/a | |
|---|
| 1017 | n/a | # Create pax header + record blocks. |
|---|
| 1018 | n/a | return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ |
|---|
| 1019 | n/a | cls._create_payload(records) |
|---|
| 1020 | n/a | |
|---|
| 1021 | n/a | @classmethod |
|---|
| 1022 | n/a | def frombuf(cls, buf, encoding, errors): |
|---|
| 1023 | n/a | """Construct a TarInfo object from a 512 byte bytes object. |
|---|
| 1024 | n/a | """ |
|---|
| 1025 | n/a | if len(buf) == 0: |
|---|
| 1026 | n/a | raise EmptyHeaderError("empty header") |
|---|
| 1027 | n/a | if len(buf) != BLOCKSIZE: |
|---|
| 1028 | n/a | raise TruncatedHeaderError("truncated header") |
|---|
| 1029 | n/a | if buf.count(NUL) == BLOCKSIZE: |
|---|
| 1030 | n/a | raise EOFHeaderError("end of file header") |
|---|
| 1031 | n/a | |
|---|
| 1032 | n/a | chksum = nti(buf[148:156]) |
|---|
| 1033 | n/a | if chksum not in calc_chksums(buf): |
|---|
| 1034 | n/a | raise InvalidHeaderError("bad checksum") |
|---|
| 1035 | n/a | |
|---|
| 1036 | n/a | obj = cls() |
|---|
| 1037 | n/a | obj.name = nts(buf[0:100], encoding, errors) |
|---|
| 1038 | n/a | obj.mode = nti(buf[100:108]) |
|---|
| 1039 | n/a | obj.uid = nti(buf[108:116]) |
|---|
| 1040 | n/a | obj.gid = nti(buf[116:124]) |
|---|
| 1041 | n/a | obj.size = nti(buf[124:136]) |
|---|
| 1042 | n/a | obj.mtime = nti(buf[136:148]) |
|---|
| 1043 | n/a | obj.chksum = chksum |
|---|
| 1044 | n/a | obj.type = buf[156:157] |
|---|
| 1045 | n/a | obj.linkname = nts(buf[157:257], encoding, errors) |
|---|
| 1046 | n/a | obj.uname = nts(buf[265:297], encoding, errors) |
|---|
| 1047 | n/a | obj.gname = nts(buf[297:329], encoding, errors) |
|---|
| 1048 | n/a | obj.devmajor = nti(buf[329:337]) |
|---|
| 1049 | n/a | obj.devminor = nti(buf[337:345]) |
|---|
| 1050 | n/a | prefix = nts(buf[345:500], encoding, errors) |
|---|
| 1051 | n/a | |
|---|
| 1052 | n/a | # Old V7 tar format represents a directory as a regular |
|---|
| 1053 | n/a | # file with a trailing slash. |
|---|
| 1054 | n/a | if obj.type == AREGTYPE and obj.name.endswith("/"): |
|---|
| 1055 | n/a | obj.type = DIRTYPE |
|---|
| 1056 | n/a | |
|---|
| 1057 | n/a | # The old GNU sparse format occupies some of the unused |
|---|
| 1058 | n/a | # space in the buffer for up to 4 sparse structures. |
|---|
| 1059 | n/a | # Save the them for later processing in _proc_sparse(). |
|---|
| 1060 | n/a | if obj.type == GNUTYPE_SPARSE: |
|---|
| 1061 | n/a | pos = 386 |
|---|
| 1062 | n/a | structs = [] |
|---|
| 1063 | n/a | for i in range(4): |
|---|
| 1064 | n/a | try: |
|---|
| 1065 | n/a | offset = nti(buf[pos:pos + 12]) |
|---|
| 1066 | n/a | numbytes = nti(buf[pos + 12:pos + 24]) |
|---|
| 1067 | n/a | except ValueError: |
|---|
| 1068 | n/a | break |
|---|
| 1069 | n/a | structs.append((offset, numbytes)) |
|---|
| 1070 | n/a | pos += 24 |
|---|
| 1071 | n/a | isextended = bool(buf[482]) |
|---|
| 1072 | n/a | origsize = nti(buf[483:495]) |
|---|
| 1073 | n/a | obj._sparse_structs = (structs, isextended, origsize) |
|---|
| 1074 | n/a | |
|---|
| 1075 | n/a | # Remove redundant slashes from directories. |
|---|
| 1076 | n/a | if obj.isdir(): |
|---|
| 1077 | n/a | obj.name = obj.name.rstrip("/") |
|---|
| 1078 | n/a | |
|---|
| 1079 | n/a | # Reconstruct a ustar longname. |
|---|
| 1080 | n/a | if prefix and obj.type not in GNU_TYPES: |
|---|
| 1081 | n/a | obj.name = prefix + "/" + obj.name |
|---|
| 1082 | n/a | return obj |
|---|
| 1083 | n/a | |
|---|
| 1084 | n/a | @classmethod |
|---|
| 1085 | n/a | def fromtarfile(cls, tarfile): |
|---|
| 1086 | n/a | """Return the next TarInfo object from TarFile object |
|---|
| 1087 | n/a | tarfile. |
|---|
| 1088 | n/a | """ |
|---|
| 1089 | n/a | buf = tarfile.fileobj.read(BLOCKSIZE) |
|---|
| 1090 | n/a | obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) |
|---|
| 1091 | n/a | obj.offset = tarfile.fileobj.tell() - BLOCKSIZE |
|---|
| 1092 | n/a | return obj._proc_member(tarfile) |
|---|
| 1093 | n/a | |
|---|
| 1094 | n/a | #-------------------------------------------------------------------------- |
|---|
| 1095 | n/a | # The following are methods that are called depending on the type of a |
|---|
| 1096 | n/a | # member. The entry point is _proc_member() which can be overridden in a |
|---|
| 1097 | n/a | # subclass to add custom _proc_*() methods. A _proc_*() method MUST |
|---|
| 1098 | n/a | # implement the following |
|---|
| 1099 | n/a | # operations: |
|---|
| 1100 | n/a | # 1. Set self.offset_data to the position where the data blocks begin, |
|---|
| 1101 | n/a | # if there is data that follows. |
|---|
| 1102 | n/a | # 2. Set tarfile.offset to the position where the next member's header will |
|---|
| 1103 | n/a | # begin. |
|---|
| 1104 | n/a | # 3. Return self or another valid TarInfo object. |
|---|
| 1105 | n/a | def _proc_member(self, tarfile): |
|---|
| 1106 | n/a | """Choose the right processing method depending on |
|---|
| 1107 | n/a | the type and call it. |
|---|
| 1108 | n/a | """ |
|---|
| 1109 | n/a | if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): |
|---|
| 1110 | n/a | return self._proc_gnulong(tarfile) |
|---|
| 1111 | n/a | elif self.type == GNUTYPE_SPARSE: |
|---|
| 1112 | n/a | return self._proc_sparse(tarfile) |
|---|
| 1113 | n/a | elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): |
|---|
| 1114 | n/a | return self._proc_pax(tarfile) |
|---|
| 1115 | n/a | else: |
|---|
| 1116 | n/a | return self._proc_builtin(tarfile) |
|---|
| 1117 | n/a | |
|---|
| 1118 | n/a | def _proc_builtin(self, tarfile): |
|---|
| 1119 | n/a | """Process a builtin type or an unknown type which |
|---|
| 1120 | n/a | will be treated as a regular file. |
|---|
| 1121 | n/a | """ |
|---|
| 1122 | n/a | self.offset_data = tarfile.fileobj.tell() |
|---|
| 1123 | n/a | offset = self.offset_data |
|---|
| 1124 | n/a | if self.isreg() or self.type not in SUPPORTED_TYPES: |
|---|
| 1125 | n/a | # Skip the following data blocks. |
|---|
| 1126 | n/a | offset += self._block(self.size) |
|---|
| 1127 | n/a | tarfile.offset = offset |
|---|
| 1128 | n/a | |
|---|
| 1129 | n/a | # Patch the TarInfo object with saved global |
|---|
| 1130 | n/a | # header information. |
|---|
| 1131 | n/a | self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) |
|---|
| 1132 | n/a | |
|---|
| 1133 | n/a | return self |
|---|
| 1134 | n/a | |
|---|
| 1135 | n/a | def _proc_gnulong(self, tarfile): |
|---|
| 1136 | n/a | """Process the blocks that hold a GNU longname |
|---|
| 1137 | n/a | or longlink member. |
|---|
| 1138 | n/a | """ |
|---|
| 1139 | n/a | buf = tarfile.fileobj.read(self._block(self.size)) |
|---|
| 1140 | n/a | |
|---|
| 1141 | n/a | # Fetch the next header and process it. |
|---|
| 1142 | n/a | try: |
|---|
| 1143 | n/a | next = self.fromtarfile(tarfile) |
|---|
| 1144 | n/a | except HeaderError: |
|---|
| 1145 | n/a | raise SubsequentHeaderError("missing or bad subsequent header") |
|---|
| 1146 | n/a | |
|---|
| 1147 | n/a | # Patch the TarInfo object from the next header with |
|---|
| 1148 | n/a | # the longname information. |
|---|
| 1149 | n/a | next.offset = self.offset |
|---|
| 1150 | n/a | if self.type == GNUTYPE_LONGNAME: |
|---|
| 1151 | n/a | next.name = nts(buf, tarfile.encoding, tarfile.errors) |
|---|
| 1152 | n/a | elif self.type == GNUTYPE_LONGLINK: |
|---|
| 1153 | n/a | next.linkname = nts(buf, tarfile.encoding, tarfile.errors) |
|---|
| 1154 | n/a | |
|---|
| 1155 | n/a | return next |
|---|
| 1156 | n/a | |
|---|
| 1157 | n/a | def _proc_sparse(self, tarfile): |
|---|
| 1158 | n/a | """Process a GNU sparse header plus extra headers. |
|---|
| 1159 | n/a | """ |
|---|
| 1160 | n/a | # We already collected some sparse structures in frombuf(). |
|---|
| 1161 | n/a | structs, isextended, origsize = self._sparse_structs |
|---|
| 1162 | n/a | del self._sparse_structs |
|---|
| 1163 | n/a | |
|---|
| 1164 | n/a | # Collect sparse structures from extended header blocks. |
|---|
| 1165 | n/a | while isextended: |
|---|
| 1166 | n/a | buf = tarfile.fileobj.read(BLOCKSIZE) |
|---|
| 1167 | n/a | pos = 0 |
|---|
| 1168 | n/a | for i in range(21): |
|---|
| 1169 | n/a | try: |
|---|
| 1170 | n/a | offset = nti(buf[pos:pos + 12]) |
|---|
| 1171 | n/a | numbytes = nti(buf[pos + 12:pos + 24]) |
|---|
| 1172 | n/a | except ValueError: |
|---|
| 1173 | n/a | break |
|---|
| 1174 | n/a | if offset and numbytes: |
|---|
| 1175 | n/a | structs.append((offset, numbytes)) |
|---|
| 1176 | n/a | pos += 24 |
|---|
| 1177 | n/a | isextended = bool(buf[504]) |
|---|
| 1178 | n/a | self.sparse = structs |
|---|
| 1179 | n/a | |
|---|
| 1180 | n/a | self.offset_data = tarfile.fileobj.tell() |
|---|
| 1181 | n/a | tarfile.offset = self.offset_data + self._block(self.size) |
|---|
| 1182 | n/a | self.size = origsize |
|---|
| 1183 | n/a | return self |
|---|
| 1184 | n/a | |
|---|
| 1185 | n/a | def _proc_pax(self, tarfile): |
|---|
| 1186 | n/a | """Process an extended or global header as described in |
|---|
| 1187 | n/a | POSIX.1-2008. |
|---|
| 1188 | n/a | """ |
|---|
| 1189 | n/a | # Read the header information. |
|---|
| 1190 | n/a | buf = tarfile.fileobj.read(self._block(self.size)) |
|---|
| 1191 | n/a | |
|---|
| 1192 | n/a | # A pax header stores supplemental information for either |
|---|
| 1193 | n/a | # the following file (extended) or all following files |
|---|
| 1194 | n/a | # (global). |
|---|
| 1195 | n/a | if self.type == XGLTYPE: |
|---|
| 1196 | n/a | pax_headers = tarfile.pax_headers |
|---|
| 1197 | n/a | else: |
|---|
| 1198 | n/a | pax_headers = tarfile.pax_headers.copy() |
|---|
| 1199 | n/a | |
|---|
| 1200 | n/a | # Check if the pax header contains a hdrcharset field. This tells us |
|---|
| 1201 | n/a | # the encoding of the path, linkpath, uname and gname fields. Normally, |
|---|
| 1202 | n/a | # these fields are UTF-8 encoded but since POSIX.1-2008 tar |
|---|
| 1203 | n/a | # implementations are allowed to store them as raw binary strings if |
|---|
| 1204 | n/a | # the translation to UTF-8 fails. |
|---|
| 1205 | n/a | match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) |
|---|
| 1206 | n/a | if match is not None: |
|---|
| 1207 | n/a | pax_headers["hdrcharset"] = match.group(1).decode("utf-8") |
|---|
| 1208 | n/a | |
|---|
| 1209 | n/a | # For the time being, we don't care about anything other than "BINARY". |
|---|
| 1210 | n/a | # The only other value that is currently allowed by the standard is |
|---|
| 1211 | n/a | # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. |
|---|
| 1212 | n/a | hdrcharset = pax_headers.get("hdrcharset") |
|---|
| 1213 | n/a | if hdrcharset == "BINARY": |
|---|
| 1214 | n/a | encoding = tarfile.encoding |
|---|
| 1215 | n/a | else: |
|---|
| 1216 | n/a | encoding = "utf-8" |
|---|
| 1217 | n/a | |
|---|
| 1218 | n/a | # Parse pax header information. A record looks like that: |
|---|
| 1219 | n/a | # "%d %s=%s\n" % (length, keyword, value). length is the size |
|---|
| 1220 | n/a | # of the complete record including the length field itself and |
|---|
| 1221 | n/a | # the newline. keyword and value are both UTF-8 encoded strings. |
|---|
| 1222 | n/a | regex = re.compile(br"(\d+) ([^=]+)=") |
|---|
| 1223 | n/a | pos = 0 |
|---|
| 1224 | n/a | while True: |
|---|
| 1225 | n/a | match = regex.match(buf, pos) |
|---|
| 1226 | n/a | if not match: |
|---|
| 1227 | n/a | break |
|---|
| 1228 | n/a | |
|---|
| 1229 | n/a | length, keyword = match.groups() |
|---|
| 1230 | n/a | length = int(length) |
|---|
| 1231 | n/a | value = buf[match.end(2) + 1:match.start(1) + length - 1] |
|---|
| 1232 | n/a | |
|---|
| 1233 | n/a | # Normally, we could just use "utf-8" as the encoding and "strict" |
|---|
| 1234 | n/a | # as the error handler, but we better not take the risk. For |
|---|
| 1235 | n/a | # example, GNU tar <= 1.23 is known to store filenames it cannot |
|---|
| 1236 | n/a | # translate to UTF-8 as raw strings (unfortunately without a |
|---|
| 1237 | n/a | # hdrcharset=BINARY header). |
|---|
| 1238 | n/a | # We first try the strict standard encoding, and if that fails we |
|---|
| 1239 | n/a | # fall back on the user's encoding and error handler. |
|---|
| 1240 | n/a | keyword = self._decode_pax_field(keyword, "utf-8", "utf-8", |
|---|
| 1241 | n/a | tarfile.errors) |
|---|
| 1242 | n/a | if keyword in PAX_NAME_FIELDS: |
|---|
| 1243 | n/a | value = self._decode_pax_field(value, encoding, tarfile.encoding, |
|---|
| 1244 | n/a | tarfile.errors) |
|---|
| 1245 | n/a | else: |
|---|
| 1246 | n/a | value = self._decode_pax_field(value, "utf-8", "utf-8", |
|---|
| 1247 | n/a | tarfile.errors) |
|---|
| 1248 | n/a | |
|---|
| 1249 | n/a | pax_headers[keyword] = value |
|---|
| 1250 | n/a | pos += length |
|---|
| 1251 | n/a | |
|---|
| 1252 | n/a | # Fetch the next header. |
|---|
| 1253 | n/a | try: |
|---|
| 1254 | n/a | next = self.fromtarfile(tarfile) |
|---|
| 1255 | n/a | except HeaderError: |
|---|
| 1256 | n/a | raise SubsequentHeaderError("missing or bad subsequent header") |
|---|
| 1257 | n/a | |
|---|
| 1258 | n/a | # Process GNU sparse information. |
|---|
| 1259 | n/a | if "GNU.sparse.map" in pax_headers: |
|---|
| 1260 | n/a | # GNU extended sparse format version 0.1. |
|---|
| 1261 | n/a | self._proc_gnusparse_01(next, pax_headers) |
|---|
| 1262 | n/a | |
|---|
| 1263 | n/a | elif "GNU.sparse.size" in pax_headers: |
|---|
| 1264 | n/a | # GNU extended sparse format version 0.0. |
|---|
| 1265 | n/a | self._proc_gnusparse_00(next, pax_headers, buf) |
|---|
| 1266 | n/a | |
|---|
| 1267 | n/a | elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": |
|---|
| 1268 | n/a | # GNU extended sparse format version 1.0. |
|---|
| 1269 | n/a | self._proc_gnusparse_10(next, pax_headers, tarfile) |
|---|
| 1270 | n/a | |
|---|
| 1271 | n/a | if self.type in (XHDTYPE, SOLARIS_XHDTYPE): |
|---|
| 1272 | n/a | # Patch the TarInfo object with the extended header info. |
|---|
| 1273 | n/a | next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) |
|---|
| 1274 | n/a | next.offset = self.offset |
|---|
| 1275 | n/a | |
|---|
| 1276 | n/a | if "size" in pax_headers: |
|---|
| 1277 | n/a | # If the extended header replaces the size field, |
|---|
| 1278 | n/a | # we need to recalculate the offset where the next |
|---|
| 1279 | n/a | # header starts. |
|---|
| 1280 | n/a | offset = next.offset_data |
|---|
| 1281 | n/a | if next.isreg() or next.type not in SUPPORTED_TYPES: |
|---|
| 1282 | n/a | offset += next._block(next.size) |
|---|
| 1283 | n/a | tarfile.offset = offset |
|---|
| 1284 | n/a | |
|---|
| 1285 | n/a | return next |
|---|
| 1286 | n/a | |
|---|
| 1287 | n/a | def _proc_gnusparse_00(self, next, pax_headers, buf): |
|---|
| 1288 | n/a | """Process a GNU tar extended sparse header, version 0.0. |
|---|
| 1289 | n/a | """ |
|---|
| 1290 | n/a | offsets = [] |
|---|
| 1291 | n/a | for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): |
|---|
| 1292 | n/a | offsets.append(int(match.group(1))) |
|---|
| 1293 | n/a | numbytes = [] |
|---|
| 1294 | n/a | for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): |
|---|
| 1295 | n/a | numbytes.append(int(match.group(1))) |
|---|
| 1296 | n/a | next.sparse = list(zip(offsets, numbytes)) |
|---|
| 1297 | n/a | |
|---|
| 1298 | n/a | def _proc_gnusparse_01(self, next, pax_headers): |
|---|
| 1299 | n/a | """Process a GNU tar extended sparse header, version 0.1. |
|---|
| 1300 | n/a | """ |
|---|
| 1301 | n/a | sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] |
|---|
| 1302 | n/a | next.sparse = list(zip(sparse[::2], sparse[1::2])) |
|---|
| 1303 | n/a | |
|---|
| 1304 | n/a | def _proc_gnusparse_10(self, next, pax_headers, tarfile): |
|---|
| 1305 | n/a | """Process a GNU tar extended sparse header, version 1.0. |
|---|
| 1306 | n/a | """ |
|---|
| 1307 | n/a | fields = None |
|---|
| 1308 | n/a | sparse = [] |
|---|
| 1309 | n/a | buf = tarfile.fileobj.read(BLOCKSIZE) |
|---|
| 1310 | n/a | fields, buf = buf.split(b"\n", 1) |
|---|
| 1311 | n/a | fields = int(fields) |
|---|
| 1312 | n/a | while len(sparse) < fields * 2: |
|---|
| 1313 | n/a | if b"\n" not in buf: |
|---|
| 1314 | n/a | buf += tarfile.fileobj.read(BLOCKSIZE) |
|---|
| 1315 | n/a | number, buf = buf.split(b"\n", 1) |
|---|
| 1316 | n/a | sparse.append(int(number)) |
|---|
| 1317 | n/a | next.offset_data = tarfile.fileobj.tell() |
|---|
| 1318 | n/a | next.sparse = list(zip(sparse[::2], sparse[1::2])) |
|---|
| 1319 | n/a | |
|---|
| 1320 | n/a | def _apply_pax_info(self, pax_headers, encoding, errors): |
|---|
| 1321 | n/a | """Replace fields with supplemental information from a previous |
|---|
| 1322 | n/a | pax extended or global header. |
|---|
| 1323 | n/a | """ |
|---|
| 1324 | n/a | for keyword, value in pax_headers.items(): |
|---|
| 1325 | n/a | if keyword == "GNU.sparse.name": |
|---|
| 1326 | n/a | setattr(self, "path", value) |
|---|
| 1327 | n/a | elif keyword == "GNU.sparse.size": |
|---|
| 1328 | n/a | setattr(self, "size", int(value)) |
|---|
| 1329 | n/a | elif keyword == "GNU.sparse.realsize": |
|---|
| 1330 | n/a | setattr(self, "size", int(value)) |
|---|
| 1331 | n/a | elif keyword in PAX_FIELDS: |
|---|
| 1332 | n/a | if keyword in PAX_NUMBER_FIELDS: |
|---|
| 1333 | n/a | try: |
|---|
| 1334 | n/a | value = PAX_NUMBER_FIELDS[keyword](value) |
|---|
| 1335 | n/a | except ValueError: |
|---|
| 1336 | n/a | value = 0 |
|---|
| 1337 | n/a | if keyword == "path": |
|---|
| 1338 | n/a | value = value.rstrip("/") |
|---|
| 1339 | n/a | setattr(self, keyword, value) |
|---|
| 1340 | n/a | |
|---|
| 1341 | n/a | self.pax_headers = pax_headers.copy() |
|---|
| 1342 | n/a | |
|---|
| 1343 | n/a | def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): |
|---|
| 1344 | n/a | """Decode a single field from a pax record. |
|---|
| 1345 | n/a | """ |
|---|
| 1346 | n/a | try: |
|---|
| 1347 | n/a | return value.decode(encoding, "strict") |
|---|
| 1348 | n/a | except UnicodeDecodeError: |
|---|
| 1349 | n/a | return value.decode(fallback_encoding, fallback_errors) |
|---|
| 1350 | n/a | |
|---|
| 1351 | n/a | def _block(self, count): |
|---|
| 1352 | n/a | """Round up a byte count by BLOCKSIZE and return it, |
|---|
| 1353 | n/a | e.g. _block(834) => 1024. |
|---|
| 1354 | n/a | """ |
|---|
| 1355 | n/a | blocks, remainder = divmod(count, BLOCKSIZE) |
|---|
| 1356 | n/a | if remainder: |
|---|
| 1357 | n/a | blocks += 1 |
|---|
| 1358 | n/a | return blocks * BLOCKSIZE |
|---|
| 1359 | n/a | |
|---|
| 1360 | n/a | def isreg(self): |
|---|
| 1361 | n/a | return self.type in REGULAR_TYPES |
|---|
| 1362 | n/a | def isfile(self): |
|---|
| 1363 | n/a | return self.isreg() |
|---|
| 1364 | n/a | def isdir(self): |
|---|
| 1365 | n/a | return self.type == DIRTYPE |
|---|
| 1366 | n/a | def issym(self): |
|---|
| 1367 | n/a | return self.type == SYMTYPE |
|---|
| 1368 | n/a | def islnk(self): |
|---|
| 1369 | n/a | return self.type == LNKTYPE |
|---|
| 1370 | n/a | def ischr(self): |
|---|
| 1371 | n/a | return self.type == CHRTYPE |
|---|
| 1372 | n/a | def isblk(self): |
|---|
| 1373 | n/a | return self.type == BLKTYPE |
|---|
| 1374 | n/a | def isfifo(self): |
|---|
| 1375 | n/a | return self.type == FIFOTYPE |
|---|
| 1376 | n/a | def issparse(self): |
|---|
| 1377 | n/a | return self.sparse is not None |
|---|
| 1378 | n/a | def isdev(self): |
|---|
| 1379 | n/a | return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) |
|---|
| 1380 | n/a | # class TarInfo |
|---|
| 1381 | n/a | |
|---|
| 1382 | n/a | class TarFile(object): |
|---|
| 1383 | n/a | """The TarFile Class provides an interface to tar archives. |
|---|
| 1384 | n/a | """ |
|---|
| 1385 | n/a | |
|---|
| 1386 | n/a | debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) |
|---|
| 1387 | n/a | |
|---|
| 1388 | n/a | dereference = False # If true, add content of linked file to the |
|---|
| 1389 | n/a | # tar file, else the link. |
|---|
| 1390 | n/a | |
|---|
| 1391 | n/a | ignore_zeros = False # If true, skips empty or invalid blocks and |
|---|
| 1392 | n/a | # continues processing. |
|---|
| 1393 | n/a | |
|---|
| 1394 | n/a | errorlevel = 1 # If 0, fatal errors only appear in debug |
|---|
| 1395 | n/a | # messages (if debug >= 0). If > 0, errors |
|---|
| 1396 | n/a | # are passed to the caller as exceptions. |
|---|
| 1397 | n/a | |
|---|
| 1398 | n/a | format = DEFAULT_FORMAT # The format to use when creating an archive. |
|---|
| 1399 | n/a | |
|---|
| 1400 | n/a | encoding = ENCODING # Encoding for 8-bit character strings. |
|---|
| 1401 | n/a | |
|---|
| 1402 | n/a | errors = None # Error handler for unicode conversion. |
|---|
| 1403 | n/a | |
|---|
| 1404 | n/a | tarinfo = TarInfo # The default TarInfo class to use. |
|---|
| 1405 | n/a | |
|---|
| 1406 | n/a | fileobject = ExFileObject # The file-object for extractfile(). |
|---|
| 1407 | n/a | |
|---|
| 1408 | n/a | def __init__(self, name=None, mode="r", fileobj=None, format=None, |
|---|
| 1409 | n/a | tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, |
|---|
| 1410 | n/a | errors="surrogateescape", pax_headers=None, debug=None, |
|---|
| 1411 | n/a | errorlevel=None, copybufsize=None): |
|---|
| 1412 | n/a | """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to |
|---|
| 1413 | n/a | read from an existing archive, 'a' to append data to an existing |
|---|
| 1414 | n/a | file or 'w' to create a new file overwriting an existing one. `mode' |
|---|
| 1415 | n/a | defaults to 'r'. |
|---|
| 1416 | n/a | If `fileobj' is given, it is used for reading or writing data. If it |
|---|
| 1417 | n/a | can be determined, `mode' is overridden by `fileobj's mode. |
|---|
| 1418 | n/a | `fileobj' is not closed, when TarFile is closed. |
|---|
| 1419 | n/a | """ |
|---|
| 1420 | n/a | modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"} |
|---|
| 1421 | n/a | if mode not in modes: |
|---|
| 1422 | n/a | raise ValueError("mode must be 'r', 'a', 'w' or 'x'") |
|---|
| 1423 | n/a | self.mode = mode |
|---|
| 1424 | n/a | self._mode = modes[mode] |
|---|
| 1425 | n/a | |
|---|
| 1426 | n/a | if not fileobj: |
|---|
| 1427 | n/a | if self.mode == "a" and not os.path.exists(name): |
|---|
| 1428 | n/a | # Create nonexistent files in append mode. |
|---|
| 1429 | n/a | self.mode = "w" |
|---|
| 1430 | n/a | self._mode = "wb" |
|---|
| 1431 | n/a | fileobj = bltn_open(name, self._mode) |
|---|
| 1432 | n/a | self._extfileobj = False |
|---|
| 1433 | n/a | else: |
|---|
| 1434 | n/a | if (name is None and hasattr(fileobj, "name") and |
|---|
| 1435 | n/a | isinstance(fileobj.name, (str, bytes))): |
|---|
| 1436 | n/a | name = fileobj.name |
|---|
| 1437 | n/a | if hasattr(fileobj, "mode"): |
|---|
| 1438 | n/a | self._mode = fileobj.mode |
|---|
| 1439 | n/a | self._extfileobj = True |
|---|
| 1440 | n/a | self.name = os.path.abspath(name) if name else None |
|---|
| 1441 | n/a | self.fileobj = fileobj |
|---|
| 1442 | n/a | |
|---|
| 1443 | n/a | # Init attributes. |
|---|
| 1444 | n/a | if format is not None: |
|---|
| 1445 | n/a | self.format = format |
|---|
| 1446 | n/a | if tarinfo is not None: |
|---|
| 1447 | n/a | self.tarinfo = tarinfo |
|---|
| 1448 | n/a | if dereference is not None: |
|---|
| 1449 | n/a | self.dereference = dereference |
|---|
| 1450 | n/a | if ignore_zeros is not None: |
|---|
| 1451 | n/a | self.ignore_zeros = ignore_zeros |
|---|
| 1452 | n/a | if encoding is not None: |
|---|
| 1453 | n/a | self.encoding = encoding |
|---|
| 1454 | n/a | self.errors = errors |
|---|
| 1455 | n/a | |
|---|
| 1456 | n/a | if pax_headers is not None and self.format == PAX_FORMAT: |
|---|
| 1457 | n/a | self.pax_headers = pax_headers |
|---|
| 1458 | n/a | else: |
|---|
| 1459 | n/a | self.pax_headers = {} |
|---|
| 1460 | n/a | |
|---|
| 1461 | n/a | if debug is not None: |
|---|
| 1462 | n/a | self.debug = debug |
|---|
| 1463 | n/a | if errorlevel is not None: |
|---|
| 1464 | n/a | self.errorlevel = errorlevel |
|---|
| 1465 | n/a | |
|---|
| 1466 | n/a | # Init datastructures. |
|---|
| 1467 | n/a | self.copybufsize = copybufsize |
|---|
| 1468 | n/a | self.closed = False |
|---|
| 1469 | n/a | self.members = [] # list of members as TarInfo objects |
|---|
| 1470 | n/a | self._loaded = False # flag if all members have been read |
|---|
| 1471 | n/a | self.offset = self.fileobj.tell() |
|---|
| 1472 | n/a | # current position in the archive file |
|---|
| 1473 | n/a | self.inodes = {} # dictionary caching the inodes of |
|---|
| 1474 | n/a | # archive members already added |
|---|
| 1475 | n/a | |
|---|
| 1476 | n/a | try: |
|---|
| 1477 | n/a | if self.mode == "r": |
|---|
| 1478 | n/a | self.firstmember = None |
|---|
| 1479 | n/a | self.firstmember = self.next() |
|---|
| 1480 | n/a | |
|---|
| 1481 | n/a | if self.mode == "a": |
|---|
| 1482 | n/a | # Move to the end of the archive, |
|---|
| 1483 | n/a | # before the first empty block. |
|---|
| 1484 | n/a | while True: |
|---|
| 1485 | n/a | self.fileobj.seek(self.offset) |
|---|
| 1486 | n/a | try: |
|---|
| 1487 | n/a | tarinfo = self.tarinfo.fromtarfile(self) |
|---|
| 1488 | n/a | self.members.append(tarinfo) |
|---|
| 1489 | n/a | except EOFHeaderError: |
|---|
| 1490 | n/a | self.fileobj.seek(self.offset) |
|---|
| 1491 | n/a | break |
|---|
| 1492 | n/a | except HeaderError as e: |
|---|
| 1493 | n/a | raise ReadError(str(e)) |
|---|
| 1494 | n/a | |
|---|
| 1495 | n/a | if self.mode in ("a", "w", "x"): |
|---|
| 1496 | n/a | self._loaded = True |
|---|
| 1497 | n/a | |
|---|
| 1498 | n/a | if self.pax_headers: |
|---|
| 1499 | n/a | buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) |
|---|
| 1500 | n/a | self.fileobj.write(buf) |
|---|
| 1501 | n/a | self.offset += len(buf) |
|---|
| 1502 | n/a | except: |
|---|
| 1503 | n/a | if not self._extfileobj: |
|---|
| 1504 | n/a | self.fileobj.close() |
|---|
| 1505 | n/a | self.closed = True |
|---|
| 1506 | n/a | raise |
|---|
| 1507 | n/a | |
|---|
| 1508 | n/a | #-------------------------------------------------------------------------- |
|---|
| 1509 | n/a | # Below are the classmethods which act as alternate constructors to the |
|---|
| 1510 | n/a | # TarFile class. The open() method is the only one that is needed for |
|---|
| 1511 | n/a | # public use; it is the "super"-constructor and is able to select an |
|---|
| 1512 | n/a | # adequate "sub"-constructor for a particular compression using the mapping |
|---|
| 1513 | n/a | # from OPEN_METH. |
|---|
| 1514 | n/a | # |
|---|
| 1515 | n/a | # This concept allows one to subclass TarFile without losing the comfort of |
|---|
| 1516 | n/a | # the super-constructor. A sub-constructor is registered and made available |
|---|
| 1517 | n/a | # by adding it to the mapping in OPEN_METH. |
|---|
| 1518 | n/a | |
|---|
| 1519 | n/a | @classmethod |
|---|
| 1520 | n/a | def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): |
|---|
| 1521 | n/a | """Open a tar archive for reading, writing or appending. Return |
|---|
| 1522 | n/a | an appropriate TarFile class. |
|---|
| 1523 | n/a | |
|---|
| 1524 | n/a | mode: |
|---|
| 1525 | n/a | 'r' or 'r:*' open for reading with transparent compression |
|---|
| 1526 | n/a | 'r:' open for reading exclusively uncompressed |
|---|
| 1527 | n/a | 'r:gz' open for reading with gzip compression |
|---|
| 1528 | n/a | 'r:bz2' open for reading with bzip2 compression |
|---|
| 1529 | n/a | 'r:xz' open for reading with lzma compression |
|---|
| 1530 | n/a | 'a' or 'a:' open for appending, creating the file if necessary |
|---|
| 1531 | n/a | 'w' or 'w:' open for writing without compression |
|---|
| 1532 | n/a | 'w:gz' open for writing with gzip compression |
|---|
| 1533 | n/a | 'w:bz2' open for writing with bzip2 compression |
|---|
| 1534 | n/a | 'w:xz' open for writing with lzma compression |
|---|
| 1535 | n/a | |
|---|
| 1536 | n/a | 'x' or 'x:' create a tarfile exclusively without compression, raise |
|---|
| 1537 | n/a | an exception if the file is already created |
|---|
| 1538 | n/a | 'x:gz' create a gzip compressed tarfile, raise an exception |
|---|
| 1539 | n/a | if the file is already created |
|---|
| 1540 | n/a | 'x:bz2' create a bzip2 compressed tarfile, raise an exception |
|---|
| 1541 | n/a | if the file is already created |
|---|
| 1542 | n/a | 'x:xz' create an lzma compressed tarfile, raise an exception |
|---|
| 1543 | n/a | if the file is already created |
|---|
| 1544 | n/a | |
|---|
| 1545 | n/a | 'r|*' open a stream of tar blocks with transparent compression |
|---|
| 1546 | n/a | 'r|' open an uncompressed stream of tar blocks for reading |
|---|
| 1547 | n/a | 'r|gz' open a gzip compressed stream of tar blocks |
|---|
| 1548 | n/a | 'r|bz2' open a bzip2 compressed stream of tar blocks |
|---|
| 1549 | n/a | 'r|xz' open an lzma compressed stream of tar blocks |
|---|
| 1550 | n/a | 'w|' open an uncompressed stream for writing |
|---|
| 1551 | n/a | 'w|gz' open a gzip compressed stream for writing |
|---|
| 1552 | n/a | 'w|bz2' open a bzip2 compressed stream for writing |
|---|
| 1553 | n/a | 'w|xz' open an lzma compressed stream for writing |
|---|
| 1554 | n/a | """ |
|---|
| 1555 | n/a | |
|---|
| 1556 | n/a | if not name and not fileobj: |
|---|
| 1557 | n/a | raise ValueError("nothing to open") |
|---|
| 1558 | n/a | |
|---|
| 1559 | n/a | if mode in ("r", "r:*"): |
|---|
| 1560 | n/a | # Find out which *open() is appropriate for opening the file. |
|---|
| 1561 | n/a | def not_compressed(comptype): |
|---|
| 1562 | n/a | return cls.OPEN_METH[comptype] == 'taropen' |
|---|
| 1563 | n/a | for comptype in sorted(cls.OPEN_METH, key=not_compressed): |
|---|
| 1564 | n/a | func = getattr(cls, cls.OPEN_METH[comptype]) |
|---|
| 1565 | n/a | if fileobj is not None: |
|---|
| 1566 | n/a | saved_pos = fileobj.tell() |
|---|
| 1567 | n/a | try: |
|---|
| 1568 | n/a | return func(name, "r", fileobj, **kwargs) |
|---|
| 1569 | n/a | except (ReadError, CompressionError): |
|---|
| 1570 | n/a | if fileobj is not None: |
|---|
| 1571 | n/a | fileobj.seek(saved_pos) |
|---|
| 1572 | n/a | continue |
|---|
| 1573 | n/a | raise ReadError("file could not be opened successfully") |
|---|
| 1574 | n/a | |
|---|
| 1575 | n/a | elif ":" in mode: |
|---|
| 1576 | n/a | filemode, comptype = mode.split(":", 1) |
|---|
| 1577 | n/a | filemode = filemode or "r" |
|---|
| 1578 | n/a | comptype = comptype or "tar" |
|---|
| 1579 | n/a | |
|---|
| 1580 | n/a | # Select the *open() function according to |
|---|
| 1581 | n/a | # given compression. |
|---|
| 1582 | n/a | if comptype in cls.OPEN_METH: |
|---|
| 1583 | n/a | func = getattr(cls, cls.OPEN_METH[comptype]) |
|---|
| 1584 | n/a | else: |
|---|
| 1585 | n/a | raise CompressionError("unknown compression type %r" % comptype) |
|---|
| 1586 | n/a | return func(name, filemode, fileobj, **kwargs) |
|---|
| 1587 | n/a | |
|---|
| 1588 | n/a | elif "|" in mode: |
|---|
| 1589 | n/a | filemode, comptype = mode.split("|", 1) |
|---|
| 1590 | n/a | filemode = filemode or "r" |
|---|
| 1591 | n/a | comptype = comptype or "tar" |
|---|
| 1592 | n/a | |
|---|
| 1593 | n/a | if filemode not in ("r", "w"): |
|---|
| 1594 | n/a | raise ValueError("mode must be 'r' or 'w'") |
|---|
| 1595 | n/a | |
|---|
| 1596 | n/a | stream = _Stream(name, filemode, comptype, fileobj, bufsize) |
|---|
| 1597 | n/a | try: |
|---|
| 1598 | n/a | t = cls(name, filemode, stream, **kwargs) |
|---|
| 1599 | n/a | except: |
|---|
| 1600 | n/a | stream.close() |
|---|
| 1601 | n/a | raise |
|---|
| 1602 | n/a | t._extfileobj = False |
|---|
| 1603 | n/a | return t |
|---|
| 1604 | n/a | |
|---|
| 1605 | n/a | elif mode in ("a", "w", "x"): |
|---|
| 1606 | n/a | return cls.taropen(name, mode, fileobj, **kwargs) |
|---|
| 1607 | n/a | |
|---|
| 1608 | n/a | raise ValueError("undiscernible mode") |
|---|
| 1609 | n/a | |
|---|
| 1610 | n/a | @classmethod |
|---|
| 1611 | n/a | def taropen(cls, name, mode="r", fileobj=None, **kwargs): |
|---|
| 1612 | n/a | """Open uncompressed tar archive name for reading or writing. |
|---|
| 1613 | n/a | """ |
|---|
| 1614 | n/a | if mode not in ("r", "a", "w", "x"): |
|---|
| 1615 | n/a | raise ValueError("mode must be 'r', 'a', 'w' or 'x'") |
|---|
| 1616 | n/a | return cls(name, mode, fileobj, **kwargs) |
|---|
| 1617 | n/a | |
|---|
| 1618 | n/a | @classmethod |
|---|
| 1619 | n/a | def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): |
|---|
| 1620 | n/a | """Open gzip compressed tar archive name for reading or writing. |
|---|
| 1621 | n/a | Appending is not allowed. |
|---|
| 1622 | n/a | """ |
|---|
| 1623 | n/a | if mode not in ("r", "w", "x"): |
|---|
| 1624 | n/a | raise ValueError("mode must be 'r', 'w' or 'x'") |
|---|
| 1625 | n/a | |
|---|
| 1626 | n/a | try: |
|---|
| 1627 | n/a | import gzip |
|---|
| 1628 | n/a | gzip.GzipFile |
|---|
| 1629 | n/a | except (ImportError, AttributeError): |
|---|
| 1630 | n/a | raise CompressionError("gzip module is not available") |
|---|
| 1631 | n/a | |
|---|
| 1632 | n/a | try: |
|---|
| 1633 | n/a | fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) |
|---|
| 1634 | n/a | except OSError: |
|---|
| 1635 | n/a | if fileobj is not None and mode == 'r': |
|---|
| 1636 | n/a | raise ReadError("not a gzip file") |
|---|
| 1637 | n/a | raise |
|---|
| 1638 | n/a | |
|---|
| 1639 | n/a | try: |
|---|
| 1640 | n/a | t = cls.taropen(name, mode, fileobj, **kwargs) |
|---|
| 1641 | n/a | except OSError: |
|---|
| 1642 | n/a | fileobj.close() |
|---|
| 1643 | n/a | if mode == 'r': |
|---|
| 1644 | n/a | raise ReadError("not a gzip file") |
|---|
| 1645 | n/a | raise |
|---|
| 1646 | n/a | except: |
|---|
| 1647 | n/a | fileobj.close() |
|---|
| 1648 | n/a | raise |
|---|
| 1649 | n/a | t._extfileobj = False |
|---|
| 1650 | n/a | return t |
|---|
| 1651 | n/a | |
|---|
| 1652 | n/a | @classmethod |
|---|
| 1653 | n/a | def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): |
|---|
| 1654 | n/a | """Open bzip2 compressed tar archive name for reading or writing. |
|---|
| 1655 | n/a | Appending is not allowed. |
|---|
| 1656 | n/a | """ |
|---|
| 1657 | n/a | if mode not in ("r", "w", "x"): |
|---|
| 1658 | n/a | raise ValueError("mode must be 'r', 'w' or 'x'") |
|---|
| 1659 | n/a | |
|---|
| 1660 | n/a | try: |
|---|
| 1661 | n/a | import bz2 |
|---|
| 1662 | n/a | except ImportError: |
|---|
| 1663 | n/a | raise CompressionError("bz2 module is not available") |
|---|
| 1664 | n/a | |
|---|
| 1665 | n/a | fileobj = bz2.BZ2File(fileobj or name, mode, |
|---|
| 1666 | n/a | compresslevel=compresslevel) |
|---|
| 1667 | n/a | |
|---|
| 1668 | n/a | try: |
|---|
| 1669 | n/a | t = cls.taropen(name, mode, fileobj, **kwargs) |
|---|
| 1670 | n/a | except (OSError, EOFError): |
|---|
| 1671 | n/a | fileobj.close() |
|---|
| 1672 | n/a | if mode == 'r': |
|---|
| 1673 | n/a | raise ReadError("not a bzip2 file") |
|---|
| 1674 | n/a | raise |
|---|
| 1675 | n/a | except: |
|---|
| 1676 | n/a | fileobj.close() |
|---|
| 1677 | n/a | raise |
|---|
| 1678 | n/a | t._extfileobj = False |
|---|
| 1679 | n/a | return t |
|---|
| 1680 | n/a | |
|---|
| 1681 | n/a | @classmethod |
|---|
| 1682 | n/a | def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs): |
|---|
| 1683 | n/a | """Open lzma compressed tar archive name for reading or writing. |
|---|
| 1684 | n/a | Appending is not allowed. |
|---|
| 1685 | n/a | """ |
|---|
| 1686 | n/a | if mode not in ("r", "w", "x"): |
|---|
| 1687 | n/a | raise ValueError("mode must be 'r', 'w' or 'x'") |
|---|
| 1688 | n/a | |
|---|
| 1689 | n/a | try: |
|---|
| 1690 | n/a | import lzma |
|---|
| 1691 | n/a | except ImportError: |
|---|
| 1692 | n/a | raise CompressionError("lzma module is not available") |
|---|
| 1693 | n/a | |
|---|
| 1694 | n/a | fileobj = lzma.LZMAFile(fileobj or name, mode, preset=preset) |
|---|
| 1695 | n/a | |
|---|
| 1696 | n/a | try: |
|---|
| 1697 | n/a | t = cls.taropen(name, mode, fileobj, **kwargs) |
|---|
| 1698 | n/a | except (lzma.LZMAError, EOFError): |
|---|
| 1699 | n/a | fileobj.close() |
|---|
| 1700 | n/a | if mode == 'r': |
|---|
| 1701 | n/a | raise ReadError("not an lzma file") |
|---|
| 1702 | n/a | raise |
|---|
| 1703 | n/a | except: |
|---|
| 1704 | n/a | fileobj.close() |
|---|
| 1705 | n/a | raise |
|---|
| 1706 | n/a | t._extfileobj = False |
|---|
| 1707 | n/a | return t |
|---|
| 1708 | n/a | |
|---|
| 1709 | n/a | # All *open() methods are registered here. |
|---|
| 1710 | n/a | OPEN_METH = { |
|---|
| 1711 | n/a | "tar": "taropen", # uncompressed tar |
|---|
| 1712 | n/a | "gz": "gzopen", # gzip compressed tar |
|---|
| 1713 | n/a | "bz2": "bz2open", # bzip2 compressed tar |
|---|
| 1714 | n/a | "xz": "xzopen" # lzma compressed tar |
|---|
| 1715 | n/a | } |
|---|
| 1716 | n/a | |
|---|
| 1717 | n/a | #-------------------------------------------------------------------------- |
|---|
| 1718 | n/a | # The public methods which TarFile provides: |
|---|
| 1719 | n/a | |
|---|
| 1720 | n/a | def close(self): |
|---|
| 1721 | n/a | """Close the TarFile. In write-mode, two finishing zero blocks are |
|---|
| 1722 | n/a | appended to the archive. |
|---|
| 1723 | n/a | """ |
|---|
| 1724 | n/a | if self.closed: |
|---|
| 1725 | n/a | return |
|---|
| 1726 | n/a | |
|---|
| 1727 | n/a | self.closed = True |
|---|
| 1728 | n/a | try: |
|---|
| 1729 | n/a | if self.mode in ("a", "w", "x"): |
|---|
| 1730 | n/a | self.fileobj.write(NUL * (BLOCKSIZE * 2)) |
|---|
| 1731 | n/a | self.offset += (BLOCKSIZE * 2) |
|---|
| 1732 | n/a | # fill up the end with zero-blocks |
|---|
| 1733 | n/a | # (like option -b20 for tar does) |
|---|
| 1734 | n/a | blocks, remainder = divmod(self.offset, RECORDSIZE) |
|---|
| 1735 | n/a | if remainder > 0: |
|---|
| 1736 | n/a | self.fileobj.write(NUL * (RECORDSIZE - remainder)) |
|---|
| 1737 | n/a | finally: |
|---|
| 1738 | n/a | if not self._extfileobj: |
|---|
| 1739 | n/a | self.fileobj.close() |
|---|
| 1740 | n/a | |
|---|
| 1741 | n/a | def getmember(self, name): |
|---|
| 1742 | n/a | """Return a TarInfo object for member `name'. If `name' can not be |
|---|
| 1743 | n/a | found in the archive, KeyError is raised. If a member occurs more |
|---|
| 1744 | n/a | than once in the archive, its last occurrence is assumed to be the |
|---|
| 1745 | n/a | most up-to-date version. |
|---|
| 1746 | n/a | """ |
|---|
| 1747 | n/a | tarinfo = self._getmember(name) |
|---|
| 1748 | n/a | if tarinfo is None: |
|---|
| 1749 | n/a | raise KeyError("filename %r not found" % name) |
|---|
| 1750 | n/a | return tarinfo |
|---|
| 1751 | n/a | |
|---|
| 1752 | n/a | def getmembers(self): |
|---|
| 1753 | n/a | """Return the members of the archive as a list of TarInfo objects. The |
|---|
| 1754 | n/a | list has the same order as the members in the archive. |
|---|
| 1755 | n/a | """ |
|---|
| 1756 | n/a | self._check() |
|---|
| 1757 | n/a | if not self._loaded: # if we want to obtain a list of |
|---|
| 1758 | n/a | self._load() # all members, we first have to |
|---|
| 1759 | n/a | # scan the whole archive. |
|---|
| 1760 | n/a | return self.members |
|---|
| 1761 | n/a | |
|---|
| 1762 | n/a | def getnames(self): |
|---|
| 1763 | n/a | """Return the members of the archive as a list of their names. It has |
|---|
| 1764 | n/a | the same order as the list returned by getmembers(). |
|---|
| 1765 | n/a | """ |
|---|
| 1766 | n/a | return [tarinfo.name for tarinfo in self.getmembers()] |
|---|
| 1767 | n/a | |
|---|
| 1768 | n/a | def gettarinfo(self, name=None, arcname=None, fileobj=None): |
|---|
| 1769 | n/a | """Create a TarInfo object from the result of os.stat or equivalent |
|---|
| 1770 | n/a | on an existing file. The file is either named by `name', or |
|---|
| 1771 | n/a | specified as a file object `fileobj' with a file descriptor. If |
|---|
| 1772 | n/a | given, `arcname' specifies an alternative name for the file in the |
|---|
| 1773 | n/a | archive, otherwise, the name is taken from the 'name' attribute of |
|---|
| 1774 | n/a | 'fileobj', or the 'name' argument. The name should be a text |
|---|
| 1775 | n/a | string. |
|---|
| 1776 | n/a | """ |
|---|
| 1777 | n/a | self._check("awx") |
|---|
| 1778 | n/a | |
|---|
| 1779 | n/a | # When fileobj is given, replace name by |
|---|
| 1780 | n/a | # fileobj's real name. |
|---|
| 1781 | n/a | if fileobj is not None: |
|---|
| 1782 | n/a | name = fileobj.name |
|---|
| 1783 | n/a | |
|---|
| 1784 | n/a | # Building the name of the member in the archive. |
|---|
| 1785 | n/a | # Backward slashes are converted to forward slashes, |
|---|
| 1786 | n/a | # Absolute paths are turned to relative paths. |
|---|
| 1787 | n/a | if arcname is None: |
|---|
| 1788 | n/a | arcname = name |
|---|
| 1789 | n/a | drv, arcname = os.path.splitdrive(arcname) |
|---|
| 1790 | n/a | arcname = arcname.replace(os.sep, "/") |
|---|
| 1791 | n/a | arcname = arcname.lstrip("/") |
|---|
| 1792 | n/a | |
|---|
| 1793 | n/a | # Now, fill the TarInfo object with |
|---|
| 1794 | n/a | # information specific for the file. |
|---|
| 1795 | n/a | tarinfo = self.tarinfo() |
|---|
| 1796 | n/a | tarinfo.tarfile = self # Not needed |
|---|
| 1797 | n/a | |
|---|
| 1798 | n/a | # Use os.stat or os.lstat, depending on platform |
|---|
| 1799 | n/a | # and if symlinks shall be resolved. |
|---|
| 1800 | n/a | if fileobj is None: |
|---|
| 1801 | n/a | if hasattr(os, "lstat") and not self.dereference: |
|---|
| 1802 | n/a | statres = os.lstat(name) |
|---|
| 1803 | n/a | else: |
|---|
| 1804 | n/a | statres = os.stat(name) |
|---|
| 1805 | n/a | else: |
|---|
| 1806 | n/a | statres = os.fstat(fileobj.fileno()) |
|---|
| 1807 | n/a | linkname = "" |
|---|
| 1808 | n/a | |
|---|
| 1809 | n/a | stmd = statres.st_mode |
|---|
| 1810 | n/a | if stat.S_ISREG(stmd): |
|---|
| 1811 | n/a | inode = (statres.st_ino, statres.st_dev) |
|---|
| 1812 | n/a | if not self.dereference and statres.st_nlink > 1 and \ |
|---|
| 1813 | n/a | inode in self.inodes and arcname != self.inodes[inode]: |
|---|
| 1814 | n/a | # Is it a hardlink to an already |
|---|
| 1815 | n/a | # archived file? |
|---|
| 1816 | n/a | type = LNKTYPE |
|---|
| 1817 | n/a | linkname = self.inodes[inode] |
|---|
| 1818 | n/a | else: |
|---|
| 1819 | n/a | # The inode is added only if its valid. |
|---|
| 1820 | n/a | # For win32 it is always 0. |
|---|
| 1821 | n/a | type = REGTYPE |
|---|
| 1822 | n/a | if inode[0]: |
|---|
| 1823 | n/a | self.inodes[inode] = arcname |
|---|
| 1824 | n/a | elif stat.S_ISDIR(stmd): |
|---|
| 1825 | n/a | type = DIRTYPE |
|---|
| 1826 | n/a | elif stat.S_ISFIFO(stmd): |
|---|
| 1827 | n/a | type = FIFOTYPE |
|---|
| 1828 | n/a | elif stat.S_ISLNK(stmd): |
|---|
| 1829 | n/a | type = SYMTYPE |
|---|
| 1830 | n/a | linkname = os.readlink(name) |
|---|
| 1831 | n/a | elif stat.S_ISCHR(stmd): |
|---|
| 1832 | n/a | type = CHRTYPE |
|---|
| 1833 | n/a | elif stat.S_ISBLK(stmd): |
|---|
| 1834 | n/a | type = BLKTYPE |
|---|
| 1835 | n/a | else: |
|---|
| 1836 | n/a | return None |
|---|
| 1837 | n/a | |
|---|
| 1838 | n/a | # Fill the TarInfo object with all |
|---|
| 1839 | n/a | # information we can get. |
|---|
| 1840 | n/a | tarinfo.name = arcname |
|---|
| 1841 | n/a | tarinfo.mode = stmd |
|---|
| 1842 | n/a | tarinfo.uid = statres.st_uid |
|---|
| 1843 | n/a | tarinfo.gid = statres.st_gid |
|---|
| 1844 | n/a | if type == REGTYPE: |
|---|
| 1845 | n/a | tarinfo.size = statres.st_size |
|---|
| 1846 | n/a | else: |
|---|
| 1847 | n/a | tarinfo.size = 0 |
|---|
| 1848 | n/a | tarinfo.mtime = statres.st_mtime |
|---|
| 1849 | n/a | tarinfo.type = type |
|---|
| 1850 | n/a | tarinfo.linkname = linkname |
|---|
| 1851 | n/a | if pwd: |
|---|
| 1852 | n/a | try: |
|---|
| 1853 | n/a | tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] |
|---|
| 1854 | n/a | except KeyError: |
|---|
| 1855 | n/a | pass |
|---|
| 1856 | n/a | if grp: |
|---|
| 1857 | n/a | try: |
|---|
| 1858 | n/a | tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] |
|---|
| 1859 | n/a | except KeyError: |
|---|
| 1860 | n/a | pass |
|---|
| 1861 | n/a | |
|---|
| 1862 | n/a | if type in (CHRTYPE, BLKTYPE): |
|---|
| 1863 | n/a | if hasattr(os, "major") and hasattr(os, "minor"): |
|---|
| 1864 | n/a | tarinfo.devmajor = os.major(statres.st_rdev) |
|---|
| 1865 | n/a | tarinfo.devminor = os.minor(statres.st_rdev) |
|---|
| 1866 | n/a | return tarinfo |
|---|
| 1867 | n/a | |
|---|
| 1868 | n/a | def list(self, verbose=True, *, members=None): |
|---|
| 1869 | n/a | """Print a table of contents to sys.stdout. If `verbose' is False, only |
|---|
| 1870 | n/a | the names of the members are printed. If it is True, an `ls -l'-like |
|---|
| 1871 | n/a | output is produced. `members' is optional and must be a subset of the |
|---|
| 1872 | n/a | list returned by getmembers(). |
|---|
| 1873 | n/a | """ |
|---|
| 1874 | n/a | self._check() |
|---|
| 1875 | n/a | |
|---|
| 1876 | n/a | if members is None: |
|---|
| 1877 | n/a | members = self |
|---|
| 1878 | n/a | for tarinfo in members: |
|---|
| 1879 | n/a | if verbose: |
|---|
| 1880 | n/a | _safe_print(stat.filemode(tarinfo.mode)) |
|---|
| 1881 | n/a | _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid, |
|---|
| 1882 | n/a | tarinfo.gname or tarinfo.gid)) |
|---|
| 1883 | n/a | if tarinfo.ischr() or tarinfo.isblk(): |
|---|
| 1884 | n/a | _safe_print("%10s" % |
|---|
| 1885 | n/a | ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor))) |
|---|
| 1886 | n/a | else: |
|---|
| 1887 | n/a | _safe_print("%10d" % tarinfo.size) |
|---|
| 1888 | n/a | _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ |
|---|
| 1889 | n/a | % time.localtime(tarinfo.mtime)[:6]) |
|---|
| 1890 | n/a | |
|---|
| 1891 | n/a | _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else "")) |
|---|
| 1892 | n/a | |
|---|
| 1893 | n/a | if verbose: |
|---|
| 1894 | n/a | if tarinfo.issym(): |
|---|
| 1895 | n/a | _safe_print("-> " + tarinfo.linkname) |
|---|
| 1896 | n/a | if tarinfo.islnk(): |
|---|
| 1897 | n/a | _safe_print("link to " + tarinfo.linkname) |
|---|
| 1898 | n/a | print() |
|---|
| 1899 | n/a | |
|---|
| 1900 | n/a | def add(self, name, arcname=None, recursive=True, *, filter=None): |
|---|
| 1901 | n/a | """Add the file `name' to the archive. `name' may be any type of file |
|---|
| 1902 | n/a | (directory, fifo, symbolic link, etc.). If given, `arcname' |
|---|
| 1903 | n/a | specifies an alternative name for the file in the archive. |
|---|
| 1904 | n/a | Directories are added recursively by default. This can be avoided by |
|---|
| 1905 | n/a | setting `recursive' to False. `filter' is a function |
|---|
| 1906 | n/a | that expects a TarInfo object argument and returns the changed |
|---|
| 1907 | n/a | TarInfo object, if it returns None the TarInfo object will be |
|---|
| 1908 | n/a | excluded from the archive. |
|---|
| 1909 | n/a | """ |
|---|
| 1910 | n/a | self._check("awx") |
|---|
| 1911 | n/a | |
|---|
| 1912 | n/a | if arcname is None: |
|---|
| 1913 | n/a | arcname = name |
|---|
| 1914 | n/a | |
|---|
| 1915 | n/a | # Skip if somebody tries to archive the archive... |
|---|
| 1916 | n/a | if self.name is not None and os.path.abspath(name) == self.name: |
|---|
| 1917 | n/a | self._dbg(2, "tarfile: Skipped %r" % name) |
|---|
| 1918 | n/a | return |
|---|
| 1919 | n/a | |
|---|
| 1920 | n/a | self._dbg(1, name) |
|---|
| 1921 | n/a | |
|---|
| 1922 | n/a | # Create a TarInfo object from the file. |
|---|
| 1923 | n/a | tarinfo = self.gettarinfo(name, arcname) |
|---|
| 1924 | n/a | |
|---|
| 1925 | n/a | if tarinfo is None: |
|---|
| 1926 | n/a | self._dbg(1, "tarfile: Unsupported type %r" % name) |
|---|
| 1927 | n/a | return |
|---|
| 1928 | n/a | |
|---|
| 1929 | n/a | # Change or exclude the TarInfo object. |
|---|
| 1930 | n/a | if filter is not None: |
|---|
| 1931 | n/a | tarinfo = filter(tarinfo) |
|---|
| 1932 | n/a | if tarinfo is None: |
|---|
| 1933 | n/a | self._dbg(2, "tarfile: Excluded %r" % name) |
|---|
| 1934 | n/a | return |
|---|
| 1935 | n/a | |
|---|
| 1936 | n/a | # Append the tar header and data to the archive. |
|---|
| 1937 | n/a | if tarinfo.isreg(): |
|---|
| 1938 | n/a | with bltn_open(name, "rb") as f: |
|---|
| 1939 | n/a | self.addfile(tarinfo, f) |
|---|
| 1940 | n/a | |
|---|
| 1941 | n/a | elif tarinfo.isdir(): |
|---|
| 1942 | n/a | self.addfile(tarinfo) |
|---|
| 1943 | n/a | if recursive: |
|---|
| 1944 | n/a | for f in os.listdir(name): |
|---|
| 1945 | n/a | self.add(os.path.join(name, f), os.path.join(arcname, f), |
|---|
| 1946 | n/a | recursive, filter=filter) |
|---|
| 1947 | n/a | |
|---|
| 1948 | n/a | else: |
|---|
| 1949 | n/a | self.addfile(tarinfo) |
|---|
| 1950 | n/a | |
|---|
| 1951 | n/a | def addfile(self, tarinfo, fileobj=None): |
|---|
| 1952 | n/a | """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is |
|---|
| 1953 | n/a | given, it should be a binary file, and tarinfo.size bytes are read |
|---|
| 1954 | n/a | from it and added to the archive. You can create TarInfo objects |
|---|
| 1955 | n/a | directly, or by using gettarinfo(). |
|---|
| 1956 | n/a | """ |
|---|
| 1957 | n/a | self._check("awx") |
|---|
| 1958 | n/a | |
|---|
| 1959 | n/a | tarinfo = copy.copy(tarinfo) |
|---|
| 1960 | n/a | |
|---|
| 1961 | n/a | buf = tarinfo.tobuf(self.format, self.encoding, self.errors) |
|---|
| 1962 | n/a | self.fileobj.write(buf) |
|---|
| 1963 | n/a | self.offset += len(buf) |
|---|
| 1964 | n/a | bufsize=self.copybufsize |
|---|
| 1965 | n/a | # If there's data to follow, append it. |
|---|
| 1966 | n/a | if fileobj is not None: |
|---|
| 1967 | n/a | copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize) |
|---|
| 1968 | n/a | blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) |
|---|
| 1969 | n/a | if remainder > 0: |
|---|
| 1970 | n/a | self.fileobj.write(NUL * (BLOCKSIZE - remainder)) |
|---|
| 1971 | n/a | blocks += 1 |
|---|
| 1972 | n/a | self.offset += blocks * BLOCKSIZE |
|---|
| 1973 | n/a | |
|---|
| 1974 | n/a | self.members.append(tarinfo) |
|---|
| 1975 | n/a | |
|---|
| 1976 | n/a | def extractall(self, path=".", members=None, *, numeric_owner=False): |
|---|
| 1977 | n/a | """Extract all members from the archive to the current working |
|---|
| 1978 | n/a | directory and set owner, modification time and permissions on |
|---|
| 1979 | n/a | directories afterwards. `path' specifies a different directory |
|---|
| 1980 | n/a | to extract to. `members' is optional and must be a subset of the |
|---|
| 1981 | n/a | list returned by getmembers(). If `numeric_owner` is True, only |
|---|
| 1982 | n/a | the numbers for user/group names are used and not the names. |
|---|
| 1983 | n/a | """ |
|---|
| 1984 | n/a | directories = [] |
|---|
| 1985 | n/a | |
|---|
| 1986 | n/a | if members is None: |
|---|
| 1987 | n/a | members = self |
|---|
| 1988 | n/a | |
|---|
| 1989 | n/a | for tarinfo in members: |
|---|
| 1990 | n/a | if tarinfo.isdir(): |
|---|
| 1991 | n/a | # Extract directories with a safe mode. |
|---|
| 1992 | n/a | directories.append(tarinfo) |
|---|
| 1993 | n/a | tarinfo = copy.copy(tarinfo) |
|---|
| 1994 | n/a | tarinfo.mode = 0o700 |
|---|
| 1995 | n/a | # Do not set_attrs directories, as we will do that further down |
|---|
| 1996 | n/a | self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(), |
|---|
| 1997 | n/a | numeric_owner=numeric_owner) |
|---|
| 1998 | n/a | |
|---|
| 1999 | n/a | # Reverse sort directories. |
|---|
| 2000 | n/a | directories.sort(key=lambda a: a.name) |
|---|
| 2001 | n/a | directories.reverse() |
|---|
| 2002 | n/a | |
|---|
| 2003 | n/a | # Set correct owner, mtime and filemode on directories. |
|---|
| 2004 | n/a | for tarinfo in directories: |
|---|
| 2005 | n/a | dirpath = os.path.join(path, tarinfo.name) |
|---|
| 2006 | n/a | try: |
|---|
| 2007 | n/a | self.chown(tarinfo, dirpath, numeric_owner=numeric_owner) |
|---|
| 2008 | n/a | self.utime(tarinfo, dirpath) |
|---|
| 2009 | n/a | self.chmod(tarinfo, dirpath) |
|---|
| 2010 | n/a | except ExtractError as e: |
|---|
| 2011 | n/a | if self.errorlevel > 1: |
|---|
| 2012 | n/a | raise |
|---|
| 2013 | n/a | else: |
|---|
| 2014 | n/a | self._dbg(1, "tarfile: %s" % e) |
|---|
| 2015 | n/a | |
|---|
| 2016 | n/a | def extract(self, member, path="", set_attrs=True, *, numeric_owner=False): |
|---|
| 2017 | n/a | """Extract a member from the archive to the current working directory, |
|---|
| 2018 | n/a | using its full name. Its file information is extracted as accurately |
|---|
| 2019 | n/a | as possible. `member' may be a filename or a TarInfo object. You can |
|---|
| 2020 | n/a | specify a different directory using `path'. File attributes (owner, |
|---|
| 2021 | n/a | mtime, mode) are set unless `set_attrs' is False. If `numeric_owner` |
|---|
| 2022 | n/a | is True, only the numbers for user/group names are used and not |
|---|
| 2023 | n/a | the names. |
|---|
| 2024 | n/a | """ |
|---|
| 2025 | n/a | self._check("r") |
|---|
| 2026 | n/a | |
|---|
| 2027 | n/a | if isinstance(member, str): |
|---|
| 2028 | n/a | tarinfo = self.getmember(member) |
|---|
| 2029 | n/a | else: |
|---|
| 2030 | n/a | tarinfo = member |
|---|
| 2031 | n/a | |
|---|
| 2032 | n/a | # Prepare the link target for makelink(). |
|---|
| 2033 | n/a | if tarinfo.islnk(): |
|---|
| 2034 | n/a | tarinfo._link_target = os.path.join(path, tarinfo.linkname) |
|---|
| 2035 | n/a | |
|---|
| 2036 | n/a | try: |
|---|
| 2037 | n/a | self._extract_member(tarinfo, os.path.join(path, tarinfo.name), |
|---|
| 2038 | n/a | set_attrs=set_attrs, |
|---|
| 2039 | n/a | numeric_owner=numeric_owner) |
|---|
| 2040 | n/a | except OSError as e: |
|---|
| 2041 | n/a | if self.errorlevel > 0: |
|---|
| 2042 | n/a | raise |
|---|
| 2043 | n/a | else: |
|---|
| 2044 | n/a | if e.filename is None: |
|---|
| 2045 | n/a | self._dbg(1, "tarfile: %s" % e.strerror) |
|---|
| 2046 | n/a | else: |
|---|
| 2047 | n/a | self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) |
|---|
| 2048 | n/a | except ExtractError as e: |
|---|
| 2049 | n/a | if self.errorlevel > 1: |
|---|
| 2050 | n/a | raise |
|---|
| 2051 | n/a | else: |
|---|
| 2052 | n/a | self._dbg(1, "tarfile: %s" % e) |
|---|
| 2053 | n/a | |
|---|
| 2054 | n/a | def extractfile(self, member): |
|---|
| 2055 | n/a | """Extract a member from the archive as a file object. `member' may be |
|---|
| 2056 | n/a | a filename or a TarInfo object. If `member' is a regular file or a |
|---|
| 2057 | n/a | link, an io.BufferedReader object is returned. Otherwise, None is |
|---|
| 2058 | n/a | returned. |
|---|
| 2059 | n/a | """ |
|---|
| 2060 | n/a | self._check("r") |
|---|
| 2061 | n/a | |
|---|
| 2062 | n/a | if isinstance(member, str): |
|---|
| 2063 | n/a | tarinfo = self.getmember(member) |
|---|
| 2064 | n/a | else: |
|---|
| 2065 | n/a | tarinfo = member |
|---|
| 2066 | n/a | |
|---|
| 2067 | n/a | if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES: |
|---|
| 2068 | n/a | # Members with unknown types are treated as regular files. |
|---|
| 2069 | n/a | return self.fileobject(self, tarinfo) |
|---|
| 2070 | n/a | |
|---|
| 2071 | n/a | elif tarinfo.islnk() or tarinfo.issym(): |
|---|
| 2072 | n/a | if isinstance(self.fileobj, _Stream): |
|---|
| 2073 | n/a | # A small but ugly workaround for the case that someone tries |
|---|
| 2074 | n/a | # to extract a (sym)link as a file-object from a non-seekable |
|---|
| 2075 | n/a | # stream of tar blocks. |
|---|
| 2076 | n/a | raise StreamError("cannot extract (sym)link as file object") |
|---|
| 2077 | n/a | else: |
|---|
| 2078 | n/a | # A (sym)link's file object is its target's file object. |
|---|
| 2079 | n/a | return self.extractfile(self._find_link_target(tarinfo)) |
|---|
| 2080 | n/a | else: |
|---|
| 2081 | n/a | # If there's no data associated with the member (directory, chrdev, |
|---|
| 2082 | n/a | # blkdev, etc.), return None instead of a file object. |
|---|
| 2083 | n/a | return None |
|---|
| 2084 | n/a | |
|---|
| 2085 | n/a | def _extract_member(self, tarinfo, targetpath, set_attrs=True, |
|---|
| 2086 | n/a | numeric_owner=False): |
|---|
| 2087 | n/a | """Extract the TarInfo object tarinfo to a physical |
|---|
| 2088 | n/a | file called targetpath. |
|---|
| 2089 | n/a | """ |
|---|
| 2090 | n/a | # Fetch the TarInfo object for the given name |
|---|
| 2091 | n/a | # and build the destination pathname, replacing |
|---|
| 2092 | n/a | # forward slashes to platform specific separators. |
|---|
| 2093 | n/a | targetpath = targetpath.rstrip("/") |
|---|
| 2094 | n/a | targetpath = targetpath.replace("/", os.sep) |
|---|
| 2095 | n/a | |
|---|
| 2096 | n/a | # Create all upper directories. |
|---|
| 2097 | n/a | upperdirs = os.path.dirname(targetpath) |
|---|
| 2098 | n/a | if upperdirs and not os.path.exists(upperdirs): |
|---|
| 2099 | n/a | # Create directories that are not part of the archive with |
|---|
| 2100 | n/a | # default permissions. |
|---|
| 2101 | n/a | os.makedirs(upperdirs) |
|---|
| 2102 | n/a | |
|---|
| 2103 | n/a | if tarinfo.islnk() or tarinfo.issym(): |
|---|
| 2104 | n/a | self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) |
|---|
| 2105 | n/a | else: |
|---|
| 2106 | n/a | self._dbg(1, tarinfo.name) |
|---|
| 2107 | n/a | |
|---|
| 2108 | n/a | if tarinfo.isreg(): |
|---|
| 2109 | n/a | self.makefile(tarinfo, targetpath) |
|---|
| 2110 | n/a | elif tarinfo.isdir(): |
|---|
| 2111 | n/a | self.makedir(tarinfo, targetpath) |
|---|
| 2112 | n/a | elif tarinfo.isfifo(): |
|---|
| 2113 | n/a | self.makefifo(tarinfo, targetpath) |
|---|
| 2114 | n/a | elif tarinfo.ischr() or tarinfo.isblk(): |
|---|
| 2115 | n/a | self.makedev(tarinfo, targetpath) |
|---|
| 2116 | n/a | elif tarinfo.islnk() or tarinfo.issym(): |
|---|
| 2117 | n/a | self.makelink(tarinfo, targetpath) |
|---|
| 2118 | n/a | elif tarinfo.type not in SUPPORTED_TYPES: |
|---|
| 2119 | n/a | self.makeunknown(tarinfo, targetpath) |
|---|
| 2120 | n/a | else: |
|---|
| 2121 | n/a | self.makefile(tarinfo, targetpath) |
|---|
| 2122 | n/a | |
|---|
| 2123 | n/a | if set_attrs: |
|---|
| 2124 | n/a | self.chown(tarinfo, targetpath, numeric_owner) |
|---|
| 2125 | n/a | if not tarinfo.issym(): |
|---|
| 2126 | n/a | self.chmod(tarinfo, targetpath) |
|---|
| 2127 | n/a | self.utime(tarinfo, targetpath) |
|---|
| 2128 | n/a | |
|---|
| 2129 | n/a | #-------------------------------------------------------------------------- |
|---|
| 2130 | n/a | # Below are the different file methods. They are called via |
|---|
| 2131 | n/a | # _extract_member() when extract() is called. They can be replaced in a |
|---|
| 2132 | n/a | # subclass to implement other functionality. |
|---|
| 2133 | n/a | |
|---|
| 2134 | n/a | def makedir(self, tarinfo, targetpath): |
|---|
| 2135 | n/a | """Make a directory called targetpath. |
|---|
| 2136 | n/a | """ |
|---|
| 2137 | n/a | try: |
|---|
| 2138 | n/a | # Use a safe mode for the directory, the real mode is set |
|---|
| 2139 | n/a | # later in _extract_member(). |
|---|
| 2140 | n/a | os.mkdir(targetpath, 0o700) |
|---|
| 2141 | n/a | except FileExistsError: |
|---|
| 2142 | n/a | pass |
|---|
| 2143 | n/a | |
|---|
| 2144 | n/a | def makefile(self, tarinfo, targetpath): |
|---|
| 2145 | n/a | """Make a file called targetpath. |
|---|
| 2146 | n/a | """ |
|---|
| 2147 | n/a | source = self.fileobj |
|---|
| 2148 | n/a | source.seek(tarinfo.offset_data) |
|---|
| 2149 | n/a | bufsize = self.copybufsize |
|---|
| 2150 | n/a | with bltn_open(targetpath, "wb") as target: |
|---|
| 2151 | n/a | if tarinfo.sparse is not None: |
|---|
| 2152 | n/a | for offset, size in tarinfo.sparse: |
|---|
| 2153 | n/a | target.seek(offset) |
|---|
| 2154 | n/a | copyfileobj(source, target, size, ReadError, bufsize) |
|---|
| 2155 | n/a | target.seek(tarinfo.size) |
|---|
| 2156 | n/a | target.truncate() |
|---|
| 2157 | n/a | else: |
|---|
| 2158 | n/a | copyfileobj(source, target, tarinfo.size, ReadError, bufsize) |
|---|
| 2159 | n/a | |
|---|
| 2160 | n/a | def makeunknown(self, tarinfo, targetpath): |
|---|
| 2161 | n/a | """Make a file from a TarInfo object with an unknown type |
|---|
| 2162 | n/a | at targetpath. |
|---|
| 2163 | n/a | """ |
|---|
| 2164 | n/a | self.makefile(tarinfo, targetpath) |
|---|
| 2165 | n/a | self._dbg(1, "tarfile: Unknown file type %r, " \ |
|---|
| 2166 | n/a | "extracted as regular file." % tarinfo.type) |
|---|
| 2167 | n/a | |
|---|
| 2168 | n/a | def makefifo(self, tarinfo, targetpath): |
|---|
| 2169 | n/a | """Make a fifo called targetpath. |
|---|
| 2170 | n/a | """ |
|---|
| 2171 | n/a | if hasattr(os, "mkfifo"): |
|---|
| 2172 | n/a | os.mkfifo(targetpath) |
|---|
| 2173 | n/a | else: |
|---|
| 2174 | n/a | raise ExtractError("fifo not supported by system") |
|---|
| 2175 | n/a | |
|---|
| 2176 | n/a | def makedev(self, tarinfo, targetpath): |
|---|
| 2177 | n/a | """Make a character or block device called targetpath. |
|---|
| 2178 | n/a | """ |
|---|
| 2179 | n/a | if not hasattr(os, "mknod") or not hasattr(os, "makedev"): |
|---|
| 2180 | n/a | raise ExtractError("special devices not supported by system") |
|---|
| 2181 | n/a | |
|---|
| 2182 | n/a | mode = tarinfo.mode |
|---|
| 2183 | n/a | if tarinfo.isblk(): |
|---|
| 2184 | n/a | mode |= stat.S_IFBLK |
|---|
| 2185 | n/a | else: |
|---|
| 2186 | n/a | mode |= stat.S_IFCHR |
|---|
| 2187 | n/a | |
|---|
| 2188 | n/a | os.mknod(targetpath, mode, |
|---|
| 2189 | n/a | os.makedev(tarinfo.devmajor, tarinfo.devminor)) |
|---|
| 2190 | n/a | |
|---|
| 2191 | n/a | def makelink(self, tarinfo, targetpath): |
|---|
| 2192 | n/a | """Make a (symbolic) link called targetpath. If it cannot be created |
|---|
| 2193 | n/a | (platform limitation), we try to make a copy of the referenced file |
|---|
| 2194 | n/a | instead of a link. |
|---|
| 2195 | n/a | """ |
|---|
| 2196 | n/a | try: |
|---|
| 2197 | n/a | # For systems that support symbolic and hard links. |
|---|
| 2198 | n/a | if tarinfo.issym(): |
|---|
| 2199 | n/a | os.symlink(tarinfo.linkname, targetpath) |
|---|
| 2200 | n/a | else: |
|---|
| 2201 | n/a | # See extract(). |
|---|
| 2202 | n/a | if os.path.exists(tarinfo._link_target): |
|---|
| 2203 | n/a | os.link(tarinfo._link_target, targetpath) |
|---|
| 2204 | n/a | else: |
|---|
| 2205 | n/a | self._extract_member(self._find_link_target(tarinfo), |
|---|
| 2206 | n/a | targetpath) |
|---|
| 2207 | n/a | except symlink_exception: |
|---|
| 2208 | n/a | try: |
|---|
| 2209 | n/a | self._extract_member(self._find_link_target(tarinfo), |
|---|
| 2210 | n/a | targetpath) |
|---|
| 2211 | n/a | except KeyError: |
|---|
| 2212 | n/a | raise ExtractError("unable to resolve link inside archive") |
|---|
| 2213 | n/a | |
|---|
| 2214 | n/a | def chown(self, tarinfo, targetpath, numeric_owner): |
|---|
| 2215 | n/a | """Set owner of targetpath according to tarinfo. If numeric_owner |
|---|
| 2216 | n/a | is True, use .gid/.uid instead of .gname/.uname. If numeric_owner |
|---|
| 2217 | n/a | is False, fall back to .gid/.uid when the search based on name |
|---|
| 2218 | n/a | fails. |
|---|
| 2219 | n/a | """ |
|---|
| 2220 | n/a | if hasattr(os, "geteuid") and os.geteuid() == 0: |
|---|
| 2221 | n/a | # We have to be root to do so. |
|---|
| 2222 | n/a | g = tarinfo.gid |
|---|
| 2223 | n/a | u = tarinfo.uid |
|---|
| 2224 | n/a | if not numeric_owner: |
|---|
| 2225 | n/a | try: |
|---|
| 2226 | n/a | if grp: |
|---|
| 2227 | n/a | g = grp.getgrnam(tarinfo.gname)[2] |
|---|
| 2228 | n/a | except KeyError: |
|---|
| 2229 | n/a | pass |
|---|
| 2230 | n/a | try: |
|---|
| 2231 | n/a | if pwd: |
|---|
| 2232 | n/a | u = pwd.getpwnam(tarinfo.uname)[2] |
|---|
| 2233 | n/a | except KeyError: |
|---|
| 2234 | n/a | pass |
|---|
| 2235 | n/a | try: |
|---|
| 2236 | n/a | if tarinfo.issym() and hasattr(os, "lchown"): |
|---|
| 2237 | n/a | os.lchown(targetpath, u, g) |
|---|
| 2238 | n/a | else: |
|---|
| 2239 | n/a | os.chown(targetpath, u, g) |
|---|
| 2240 | n/a | except OSError: |
|---|
| 2241 | n/a | raise ExtractError("could not change owner") |
|---|
| 2242 | n/a | |
|---|
| 2243 | n/a | def chmod(self, tarinfo, targetpath): |
|---|
| 2244 | n/a | """Set file permissions of targetpath according to tarinfo. |
|---|
| 2245 | n/a | """ |
|---|
| 2246 | n/a | if hasattr(os, 'chmod'): |
|---|
| 2247 | n/a | try: |
|---|
| 2248 | n/a | os.chmod(targetpath, tarinfo.mode) |
|---|
| 2249 | n/a | except OSError: |
|---|
| 2250 | n/a | raise ExtractError("could not change mode") |
|---|
| 2251 | n/a | |
|---|
| 2252 | n/a | def utime(self, tarinfo, targetpath): |
|---|
| 2253 | n/a | """Set modification time of targetpath according to tarinfo. |
|---|
| 2254 | n/a | """ |
|---|
| 2255 | n/a | if not hasattr(os, 'utime'): |
|---|
| 2256 | n/a | return |
|---|
| 2257 | n/a | try: |
|---|
| 2258 | n/a | os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) |
|---|
| 2259 | n/a | except OSError: |
|---|
| 2260 | n/a | raise ExtractError("could not change modification time") |
|---|
| 2261 | n/a | |
|---|
| 2262 | n/a | #-------------------------------------------------------------------------- |
|---|
| 2263 | n/a | def next(self): |
|---|
| 2264 | n/a | """Return the next member of the archive as a TarInfo object, when |
|---|
| 2265 | n/a | TarFile is opened for reading. Return None if there is no more |
|---|
| 2266 | n/a | available. |
|---|
| 2267 | n/a | """ |
|---|
| 2268 | n/a | self._check("ra") |
|---|
| 2269 | n/a | if self.firstmember is not None: |
|---|
| 2270 | n/a | m = self.firstmember |
|---|
| 2271 | n/a | self.firstmember = None |
|---|
| 2272 | n/a | return m |
|---|
| 2273 | n/a | |
|---|
| 2274 | n/a | # Advance the file pointer. |
|---|
| 2275 | n/a | if self.offset != self.fileobj.tell(): |
|---|
| 2276 | n/a | self.fileobj.seek(self.offset - 1) |
|---|
| 2277 | n/a | if not self.fileobj.read(1): |
|---|
| 2278 | n/a | raise ReadError("unexpected end of data") |
|---|
| 2279 | n/a | |
|---|
| 2280 | n/a | # Read the next block. |
|---|
| 2281 | n/a | tarinfo = None |
|---|
| 2282 | n/a | while True: |
|---|
| 2283 | n/a | try: |
|---|
| 2284 | n/a | tarinfo = self.tarinfo.fromtarfile(self) |
|---|
| 2285 | n/a | except EOFHeaderError as e: |
|---|
| 2286 | n/a | if self.ignore_zeros: |
|---|
| 2287 | n/a | self._dbg(2, "0x%X: %s" % (self.offset, e)) |
|---|
| 2288 | n/a | self.offset += BLOCKSIZE |
|---|
| 2289 | n/a | continue |
|---|
| 2290 | n/a | except InvalidHeaderError as e: |
|---|
| 2291 | n/a | if self.ignore_zeros: |
|---|
| 2292 | n/a | self._dbg(2, "0x%X: %s" % (self.offset, e)) |
|---|
| 2293 | n/a | self.offset += BLOCKSIZE |
|---|
| 2294 | n/a | continue |
|---|
| 2295 | n/a | elif self.offset == 0: |
|---|
| 2296 | n/a | raise ReadError(str(e)) |
|---|
| 2297 | n/a | except EmptyHeaderError: |
|---|
| 2298 | n/a | if self.offset == 0: |
|---|
| 2299 | n/a | raise ReadError("empty file") |
|---|
| 2300 | n/a | except TruncatedHeaderError as e: |
|---|
| 2301 | n/a | if self.offset == 0: |
|---|
| 2302 | n/a | raise ReadError(str(e)) |
|---|
| 2303 | n/a | except SubsequentHeaderError as e: |
|---|
| 2304 | n/a | raise ReadError(str(e)) |
|---|
| 2305 | n/a | break |
|---|
| 2306 | n/a | |
|---|
| 2307 | n/a | if tarinfo is not None: |
|---|
| 2308 | n/a | self.members.append(tarinfo) |
|---|
| 2309 | n/a | else: |
|---|
| 2310 | n/a | self._loaded = True |
|---|
| 2311 | n/a | |
|---|
| 2312 | n/a | return tarinfo |
|---|
| 2313 | n/a | |
|---|
| 2314 | n/a | #-------------------------------------------------------------------------- |
|---|
| 2315 | n/a | # Little helper methods: |
|---|
| 2316 | n/a | |
|---|
| 2317 | n/a | def _getmember(self, name, tarinfo=None, normalize=False): |
|---|
| 2318 | n/a | """Find an archive member by name from bottom to top. |
|---|
| 2319 | n/a | If tarinfo is given, it is used as the starting point. |
|---|
| 2320 | n/a | """ |
|---|
| 2321 | n/a | # Ensure that all members have been loaded. |
|---|
| 2322 | n/a | members = self.getmembers() |
|---|
| 2323 | n/a | |
|---|
| 2324 | n/a | # Limit the member search list up to tarinfo. |
|---|
| 2325 | n/a | if tarinfo is not None: |
|---|
| 2326 | n/a | members = members[:members.index(tarinfo)] |
|---|
| 2327 | n/a | |
|---|
| 2328 | n/a | if normalize: |
|---|
| 2329 | n/a | name = os.path.normpath(name) |
|---|
| 2330 | n/a | |
|---|
| 2331 | n/a | for member in reversed(members): |
|---|
| 2332 | n/a | if normalize: |
|---|
| 2333 | n/a | member_name = os.path.normpath(member.name) |
|---|
| 2334 | n/a | else: |
|---|
| 2335 | n/a | member_name = member.name |
|---|
| 2336 | n/a | |
|---|
| 2337 | n/a | if name == member_name: |
|---|
| 2338 | n/a | return member |
|---|
| 2339 | n/a | |
|---|
| 2340 | n/a | def _load(self): |
|---|
| 2341 | n/a | """Read through the entire archive file and look for readable |
|---|
| 2342 | n/a | members. |
|---|
| 2343 | n/a | """ |
|---|
| 2344 | n/a | while True: |
|---|
| 2345 | n/a | tarinfo = self.next() |
|---|
| 2346 | n/a | if tarinfo is None: |
|---|
| 2347 | n/a | break |
|---|
| 2348 | n/a | self._loaded = True |
|---|
| 2349 | n/a | |
|---|
| 2350 | n/a | def _check(self, mode=None): |
|---|
| 2351 | n/a | """Check if TarFile is still open, and if the operation's mode |
|---|
| 2352 | n/a | corresponds to TarFile's mode. |
|---|
| 2353 | n/a | """ |
|---|
| 2354 | n/a | if self.closed: |
|---|
| 2355 | n/a | raise OSError("%s is closed" % self.__class__.__name__) |
|---|
| 2356 | n/a | if mode is not None and self.mode not in mode: |
|---|
| 2357 | n/a | raise OSError("bad operation for mode %r" % self.mode) |
|---|
| 2358 | n/a | |
|---|
| 2359 | n/a | def _find_link_target(self, tarinfo): |
|---|
| 2360 | n/a | """Find the target member of a symlink or hardlink member in the |
|---|
| 2361 | n/a | archive. |
|---|
| 2362 | n/a | """ |
|---|
| 2363 | n/a | if tarinfo.issym(): |
|---|
| 2364 | n/a | # Always search the entire archive. |
|---|
| 2365 | n/a | linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname))) |
|---|
| 2366 | n/a | limit = None |
|---|
| 2367 | n/a | else: |
|---|
| 2368 | n/a | # Search the archive before the link, because a hard link is |
|---|
| 2369 | n/a | # just a reference to an already archived file. |
|---|
| 2370 | n/a | linkname = tarinfo.linkname |
|---|
| 2371 | n/a | limit = tarinfo |
|---|
| 2372 | n/a | |
|---|
| 2373 | n/a | member = self._getmember(linkname, tarinfo=limit, normalize=True) |
|---|
| 2374 | n/a | if member is None: |
|---|
| 2375 | n/a | raise KeyError("linkname %r not found" % linkname) |
|---|
| 2376 | n/a | return member |
|---|
| 2377 | n/a | |
|---|
| 2378 | n/a | def __iter__(self): |
|---|
| 2379 | n/a | """Provide an iterator object. |
|---|
| 2380 | n/a | """ |
|---|
| 2381 | n/a | if self._loaded: |
|---|
| 2382 | n/a | yield from self.members |
|---|
| 2383 | n/a | return |
|---|
| 2384 | n/a | |
|---|
| 2385 | n/a | # Yield items using TarFile's next() method. |
|---|
| 2386 | n/a | # When all members have been read, set TarFile as _loaded. |
|---|
| 2387 | n/a | index = 0 |
|---|
| 2388 | n/a | # Fix for SF #1100429: Under rare circumstances it can |
|---|
| 2389 | n/a | # happen that getmembers() is called during iteration, |
|---|
| 2390 | n/a | # which will have already exhausted the next() method. |
|---|
| 2391 | n/a | if self.firstmember is not None: |
|---|
| 2392 | n/a | tarinfo = self.next() |
|---|
| 2393 | n/a | index += 1 |
|---|
| 2394 | n/a | yield tarinfo |
|---|
| 2395 | n/a | |
|---|
| 2396 | n/a | while True: |
|---|
| 2397 | n/a | if index < len(self.members): |
|---|
| 2398 | n/a | tarinfo = self.members[index] |
|---|
| 2399 | n/a | elif not self._loaded: |
|---|
| 2400 | n/a | tarinfo = self.next() |
|---|
| 2401 | n/a | if not tarinfo: |
|---|
| 2402 | n/a | self._loaded = True |
|---|
| 2403 | n/a | return |
|---|
| 2404 | n/a | else: |
|---|
| 2405 | n/a | return |
|---|
| 2406 | n/a | index += 1 |
|---|
| 2407 | n/a | yield tarinfo |
|---|
| 2408 | n/a | |
|---|
| 2409 | n/a | def _dbg(self, level, msg): |
|---|
| 2410 | n/a | """Write debugging output to sys.stderr. |
|---|
| 2411 | n/a | """ |
|---|
| 2412 | n/a | if level <= self.debug: |
|---|
| 2413 | n/a | print(msg, file=sys.stderr) |
|---|
| 2414 | n/a | |
|---|
| 2415 | n/a | def __enter__(self): |
|---|
| 2416 | n/a | self._check() |
|---|
| 2417 | n/a | return self |
|---|
| 2418 | n/a | |
|---|
| 2419 | n/a | def __exit__(self, type, value, traceback): |
|---|
| 2420 | n/a | if type is None: |
|---|
| 2421 | n/a | self.close() |
|---|
| 2422 | n/a | else: |
|---|
| 2423 | n/a | # An exception occurred. We must not call close() because |
|---|
| 2424 | n/a | # it would try to write end-of-archive blocks and padding. |
|---|
| 2425 | n/a | if not self._extfileobj: |
|---|
| 2426 | n/a | self.fileobj.close() |
|---|
| 2427 | n/a | self.closed = True |
|---|
| 2428 | n/a | |
|---|
| 2429 | n/a | #-------------------- |
|---|
| 2430 | n/a | # exported functions |
|---|
| 2431 | n/a | #-------------------- |
|---|
| 2432 | n/a | def is_tarfile(name): |
|---|
| 2433 | n/a | """Return True if name points to a tar archive that we |
|---|
| 2434 | n/a | are able to handle, else return False. |
|---|
| 2435 | n/a | """ |
|---|
| 2436 | n/a | try: |
|---|
| 2437 | n/a | t = open(name) |
|---|
| 2438 | n/a | t.close() |
|---|
| 2439 | n/a | return True |
|---|
| 2440 | n/a | except TarError: |
|---|
| 2441 | n/a | return False |
|---|
| 2442 | n/a | |
|---|
| 2443 | n/a | open = TarFile.open |
|---|
| 2444 | n/a | |
|---|
| 2445 | n/a | |
|---|
| 2446 | n/a | def main(): |
|---|
| 2447 | n/a | import argparse |
|---|
| 2448 | n/a | |
|---|
| 2449 | n/a | description = 'A simple command line interface for tarfile module.' |
|---|
| 2450 | n/a | parser = argparse.ArgumentParser(description=description) |
|---|
| 2451 | n/a | parser.add_argument('-v', '--verbose', action='store_true', default=False, |
|---|
| 2452 | n/a | help='Verbose output') |
|---|
| 2453 | n/a | group = parser.add_mutually_exclusive_group() |
|---|
| 2454 | n/a | group.add_argument('-l', '--list', metavar='<tarfile>', |
|---|
| 2455 | n/a | help='Show listing of a tarfile') |
|---|
| 2456 | n/a | group.add_argument('-e', '--extract', nargs='+', |
|---|
| 2457 | n/a | metavar=('<tarfile>', '<output_dir>'), |
|---|
| 2458 | n/a | help='Extract tarfile into target dir') |
|---|
| 2459 | n/a | group.add_argument('-c', '--create', nargs='+', |
|---|
| 2460 | n/a | metavar=('<name>', '<file>'), |
|---|
| 2461 | n/a | help='Create tarfile from sources') |
|---|
| 2462 | n/a | group.add_argument('-t', '--test', metavar='<tarfile>', |
|---|
| 2463 | n/a | help='Test if a tarfile is valid') |
|---|
| 2464 | n/a | args = parser.parse_args() |
|---|
| 2465 | n/a | |
|---|
| 2466 | n/a | if args.test: |
|---|
| 2467 | n/a | src = args.test |
|---|
| 2468 | n/a | if is_tarfile(src): |
|---|
| 2469 | n/a | with open(src, 'r') as tar: |
|---|
| 2470 | n/a | tar.getmembers() |
|---|
| 2471 | n/a | print(tar.getmembers(), file=sys.stderr) |
|---|
| 2472 | n/a | if args.verbose: |
|---|
| 2473 | n/a | print('{!r} is a tar archive.'.format(src)) |
|---|
| 2474 | n/a | else: |
|---|
| 2475 | n/a | parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) |
|---|
| 2476 | n/a | |
|---|
| 2477 | n/a | elif args.list: |
|---|
| 2478 | n/a | src = args.list |
|---|
| 2479 | n/a | if is_tarfile(src): |
|---|
| 2480 | n/a | with TarFile.open(src, 'r:*') as tf: |
|---|
| 2481 | n/a | tf.list(verbose=args.verbose) |
|---|
| 2482 | n/a | else: |
|---|
| 2483 | n/a | parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) |
|---|
| 2484 | n/a | |
|---|
| 2485 | n/a | elif args.extract: |
|---|
| 2486 | n/a | if len(args.extract) == 1: |
|---|
| 2487 | n/a | src = args.extract[0] |
|---|
| 2488 | n/a | curdir = os.curdir |
|---|
| 2489 | n/a | elif len(args.extract) == 2: |
|---|
| 2490 | n/a | src, curdir = args.extract |
|---|
| 2491 | n/a | else: |
|---|
| 2492 | n/a | parser.exit(1, parser.format_help()) |
|---|
| 2493 | n/a | |
|---|
| 2494 | n/a | if is_tarfile(src): |
|---|
| 2495 | n/a | with TarFile.open(src, 'r:*') as tf: |
|---|
| 2496 | n/a | tf.extractall(path=curdir) |
|---|
| 2497 | n/a | if args.verbose: |
|---|
| 2498 | n/a | if curdir == '.': |
|---|
| 2499 | n/a | msg = '{!r} file is extracted.'.format(src) |
|---|
| 2500 | n/a | else: |
|---|
| 2501 | n/a | msg = ('{!r} file is extracted ' |
|---|
| 2502 | n/a | 'into {!r} directory.').format(src, curdir) |
|---|
| 2503 | n/a | print(msg) |
|---|
| 2504 | n/a | else: |
|---|
| 2505 | n/a | parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) |
|---|
| 2506 | n/a | |
|---|
| 2507 | n/a | elif args.create: |
|---|
| 2508 | n/a | tar_name = args.create.pop(0) |
|---|
| 2509 | n/a | _, ext = os.path.splitext(tar_name) |
|---|
| 2510 | n/a | compressions = { |
|---|
| 2511 | n/a | # gz |
|---|
| 2512 | n/a | '.gz': 'gz', |
|---|
| 2513 | n/a | '.tgz': 'gz', |
|---|
| 2514 | n/a | # xz |
|---|
| 2515 | n/a | '.xz': 'xz', |
|---|
| 2516 | n/a | '.txz': 'xz', |
|---|
| 2517 | n/a | # bz2 |
|---|
| 2518 | n/a | '.bz2': 'bz2', |
|---|
| 2519 | n/a | '.tbz': 'bz2', |
|---|
| 2520 | n/a | '.tbz2': 'bz2', |
|---|
| 2521 | n/a | '.tb2': 'bz2', |
|---|
| 2522 | n/a | } |
|---|
| 2523 | n/a | tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w' |
|---|
| 2524 | n/a | tar_files = args.create |
|---|
| 2525 | n/a | |
|---|
| 2526 | n/a | with TarFile.open(tar_name, tar_mode) as tf: |
|---|
| 2527 | n/a | for file_name in tar_files: |
|---|
| 2528 | n/a | tf.add(file_name) |
|---|
| 2529 | n/a | |
|---|
| 2530 | n/a | if args.verbose: |
|---|
| 2531 | n/a | print('{!r} file created.'.format(tar_name)) |
|---|
| 2532 | n/a | |
|---|
| 2533 | n/a | else: |
|---|
| 2534 | n/a | parser.exit(1, parser.format_help()) |
|---|
| 2535 | n/a | |
|---|
| 2536 | n/a | if __name__ == '__main__': |
|---|
| 2537 | n/a | main() |
|---|