logo

oasis-root

Compiled tree of Oasis Linux based on own branch at <https://hacktivis.me/git/oasis/> git clone https://anongit.hacktivis.me/git/oasis-root.git

tarfile.py (94844B)


  1. #!/usr/bin/env python3
  2. #-------------------------------------------------------------------
  3. # tarfile.py
  4. #-------------------------------------------------------------------
  5. # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
  6. # All rights reserved.
  7. #
  8. # Permission is hereby granted, free of charge, to any person
  9. # obtaining a copy of this software and associated documentation
  10. # files (the "Software"), to deal in the Software without
  11. # restriction, including without limitation the rights to use,
  12. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  13. # copies of the Software, and to permit persons to whom the
  14. # Software is furnished to do so, subject to the following
  15. # conditions:
  16. #
  17. # The above copyright notice and this permission notice shall be
  18. # included in all copies or substantial portions of the Software.
  19. #
  20. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  21. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  22. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  23. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  24. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  25. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  26. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  27. # OTHER DEALINGS IN THE SOFTWARE.
  28. #
  29. """Read from and write to tar format archives.
  30. """
  31. version = "0.9.0"
  32. __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
  33. __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
  34. #---------
  35. # Imports
  36. #---------
  37. from builtins import open as bltn_open
  38. import sys
  39. import os
  40. import io
  41. import shutil
  42. import stat
  43. import time
  44. import struct
  45. import copy
  46. import re
  47. try:
  48. import pwd
  49. except ImportError:
  50. pwd = None
  51. try:
  52. import grp
  53. except ImportError:
  54. grp = None
  55. # os.symlink on Windows prior to 6.0 raises NotImplementedError
  56. symlink_exception = (AttributeError, NotImplementedError)
  57. try:
  58. # OSError (winerror=1314) will be raised if the caller does not hold the
  59. # SeCreateSymbolicLinkPrivilege privilege
  60. symlink_exception += (OSError,)
  61. except NameError:
  62. pass
  63. # from tarfile import *
  64. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
  65. "CompressionError", "StreamError", "ExtractError", "HeaderError",
  66. "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
  67. "DEFAULT_FORMAT", "open"]
  68. #---------------------------------------------------------
  69. # tar constants
  70. #---------------------------------------------------------
  71. NUL = b"\0" # the null character
  72. BLOCKSIZE = 512 # length of processing blocks
  73. RECORDSIZE = BLOCKSIZE * 20 # length of records
  74. GNU_MAGIC = b"ustar \0" # magic gnu tar string
  75. POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
  76. LENGTH_NAME = 100 # maximum length of a filename
  77. LENGTH_LINK = 100 # maximum length of a linkname
  78. LENGTH_PREFIX = 155 # maximum length of the prefix field
  79. REGTYPE = b"0" # regular file
  80. AREGTYPE = b"\0" # regular file
  81. LNKTYPE = b"1" # link (inside tarfile)
  82. SYMTYPE = b"2" # symbolic link
  83. CHRTYPE = b"3" # character special device
  84. BLKTYPE = b"4" # block special device
  85. DIRTYPE = b"5" # directory
  86. FIFOTYPE = b"6" # fifo special device
  87. CONTTYPE = b"7" # contiguous file
  88. GNUTYPE_LONGNAME = b"L" # GNU tar longname
  89. GNUTYPE_LONGLINK = b"K" # GNU tar longlink
  90. GNUTYPE_SPARSE = b"S" # GNU tar sparse file
  91. XHDTYPE = b"x" # POSIX.1-2001 extended header
  92. XGLTYPE = b"g" # POSIX.1-2001 global header
  93. SOLARIS_XHDTYPE = b"X" # Solaris extended header
  94. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  95. GNU_FORMAT = 1 # GNU tar format
  96. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  97. DEFAULT_FORMAT = PAX_FORMAT
  98. #---------------------------------------------------------
  99. # tarfile constants
  100. #---------------------------------------------------------
  101. # File types that tarfile supports:
  102. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  103. SYMTYPE, DIRTYPE, FIFOTYPE,
  104. CONTTYPE, CHRTYPE, BLKTYPE,
  105. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  106. GNUTYPE_SPARSE)
  107. # File types that will be treated as a regular file.
  108. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  109. CONTTYPE, GNUTYPE_SPARSE)
  110. # File types that are part of the GNU tar format.
  111. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  112. GNUTYPE_SPARSE)
  113. # Fields from a pax header that override a TarInfo attribute.
  114. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  115. "uid", "gid", "uname", "gname")
  116. # Fields from a pax header that are affected by hdrcharset.
  117. PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
  118. # Fields in a pax header that are numbers, all other fields
  119. # are treated as strings.
  120. PAX_NUMBER_FIELDS = {
  121. "atime": float,
  122. "ctime": float,
  123. "mtime": float,
  124. "uid": int,
  125. "gid": int,
  126. "size": int
  127. }
  128. #---------------------------------------------------------
  129. # initialization
  130. #---------------------------------------------------------
  131. if os.name == "nt":
  132. ENCODING = "utf-8"
  133. else:
  134. ENCODING = sys.getfilesystemencoding()
  135. #---------------------------------------------------------
  136. # Some useful functions
  137. #---------------------------------------------------------
  138. def stn(s, length, encoding, errors):
  139. """Convert a string to a null-terminated bytes object.
  140. """
  141. s = s.encode(encoding, errors)
  142. return s[:length] + (length - len(s)) * NUL
  143. def nts(s, encoding, errors):
  144. """Convert a null-terminated bytes object to a string.
  145. """
  146. p = s.find(b"\0")
  147. if p != -1:
  148. s = s[:p]
  149. return s.decode(encoding, errors)
  150. def nti(s):
  151. """Convert a number field to a python number.
  152. """
  153. # There are two possible encodings for a number field, see
  154. # itn() below.
  155. if s[0] in (0o200, 0o377):
  156. n = 0
  157. for i in range(len(s) - 1):
  158. n <<= 8
  159. n += s[i + 1]
  160. if s[0] == 0o377:
  161. n = -(256 ** (len(s) - 1) - n)
  162. else:
  163. try:
  164. s = nts(s, "ascii", "strict")
  165. n = int(s.strip() or "0", 8)
  166. except ValueError:
  167. raise InvalidHeaderError("invalid header")
  168. return n
  169. def itn(n, digits=8, format=DEFAULT_FORMAT):
  170. """Convert a python number to a number field.
  171. """
  172. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  173. # octal digits followed by a null-byte, this allows values up to
  174. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  175. # that if necessary. A leading 0o200 or 0o377 byte indicate this
  176. # particular encoding, the following digits-1 bytes are a big-endian
  177. # base-256 representation. This allows values up to (256**(digits-1))-1.
  178. # A 0o200 byte indicates a positive number, a 0o377 byte a negative
  179. # number.
  180. original_n = n
  181. n = int(n)
  182. if 0 <= n < 8 ** (digits - 1):
  183. s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
  184. elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
  185. if n >= 0:
  186. s = bytearray([0o200])
  187. else:
  188. s = bytearray([0o377])
  189. n = 256 ** digits + n
  190. for i in range(digits - 1):
  191. s.insert(1, n & 0o377)
  192. n >>= 8
  193. else:
  194. raise ValueError("overflow in number field")
  195. return s
  196. def calc_chksums(buf):
  197. """Calculate the checksum for a member's header by summing up all
  198. characters except for the chksum field which is treated as if
  199. it was filled with spaces. According to the GNU tar sources,
  200. some tars (Sun and NeXT) calculate chksum with signed char,
  201. which will be different if there are chars in the buffer with
  202. the high bit set. So we calculate two checksums, unsigned and
  203. signed.
  204. """
  205. unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
  206. signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
  207. return unsigned_chksum, signed_chksum
  208. def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
  209. """Copy length bytes from fileobj src to fileobj dst.
  210. If length is None, copy the entire content.
  211. """
  212. bufsize = bufsize or 16 * 1024
  213. if length == 0:
  214. return
  215. if length is None:
  216. shutil.copyfileobj(src, dst, bufsize)
  217. return
  218. blocks, remainder = divmod(length, bufsize)
  219. for b in range(blocks):
  220. buf = src.read(bufsize)
  221. if len(buf) < bufsize:
  222. raise exception("unexpected end of data")
  223. dst.write(buf)
  224. if remainder != 0:
  225. buf = src.read(remainder)
  226. if len(buf) < remainder:
  227. raise exception("unexpected end of data")
  228. dst.write(buf)
  229. return
  230. def _safe_print(s):
  231. encoding = getattr(sys.stdout, 'encoding', None)
  232. if encoding is not None:
  233. s = s.encode(encoding, 'backslashreplace').decode(encoding)
  234. print(s, end=' ')
  235. class TarError(Exception):
  236. """Base exception."""
  237. pass
  238. class ExtractError(TarError):
  239. """General exception for extract errors."""
  240. pass
  241. class ReadError(TarError):
  242. """Exception for unreadable tar archives."""
  243. pass
  244. class CompressionError(TarError):
  245. """Exception for unavailable compression methods."""
  246. pass
  247. class StreamError(TarError):
  248. """Exception for unsupported operations on stream-like TarFiles."""
  249. pass
  250. class HeaderError(TarError):
  251. """Base exception for header errors."""
  252. pass
  253. class EmptyHeaderError(HeaderError):
  254. """Exception for empty headers."""
  255. pass
  256. class TruncatedHeaderError(HeaderError):
  257. """Exception for truncated headers."""
  258. pass
  259. class EOFHeaderError(HeaderError):
  260. """Exception for end of file headers."""
  261. pass
  262. class InvalidHeaderError(HeaderError):
  263. """Exception for invalid headers."""
  264. pass
  265. class SubsequentHeaderError(HeaderError):
  266. """Exception for missing and invalid extended headers."""
  267. pass
  268. #---------------------------
  269. # internal stream interface
  270. #---------------------------
  271. class _LowLevelFile:
  272. """Low-level file object. Supports reading and writing.
  273. It is used instead of a regular file object for streaming
  274. access.
  275. """
  276. def __init__(self, name, mode):
  277. mode = {
  278. "r": os.O_RDONLY,
  279. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  280. }[mode]
  281. if hasattr(os, "O_BINARY"):
  282. mode |= os.O_BINARY
  283. self.fd = os.open(name, mode, 0o666)
  284. def close(self):
  285. os.close(self.fd)
  286. def read(self, size):
  287. return os.read(self.fd, size)
  288. def write(self, s):
  289. os.write(self.fd, s)
  290. class _Stream:
  291. """Class that serves as an adapter between TarFile and
  292. a stream-like object. The stream-like object only
  293. needs to have a read() or write() method and is accessed
  294. blockwise. Use of gzip or bzip2 compression is possible.
  295. A stream-like object could be for example: sys.stdin,
  296. sys.stdout, a socket, a tape device etc.
  297. _Stream is intended to be used only internally.
  298. """
  299. def __init__(self, name, mode, comptype, fileobj, bufsize):
  300. """Construct a _Stream object.
  301. """
  302. self._extfileobj = True
  303. if fileobj is None:
  304. fileobj = _LowLevelFile(name, mode)
  305. self._extfileobj = False
  306. if comptype == '*':
  307. # Enable transparent compression detection for the
  308. # stream interface
  309. fileobj = _StreamProxy(fileobj)
  310. comptype = fileobj.getcomptype()
  311. self.name = name or ""
  312. self.mode = mode
  313. self.comptype = comptype
  314. self.fileobj = fileobj
  315. self.bufsize = bufsize
  316. self.buf = b""
  317. self.pos = 0
  318. self.closed = False
  319. try:
  320. if comptype == "gz":
  321. try:
  322. import zlib
  323. except ImportError:
  324. raise CompressionError("zlib module is not available") from None
  325. self.zlib = zlib
  326. self.crc = zlib.crc32(b"")
  327. if mode == "r":
  328. self._init_read_gz()
  329. self.exception = zlib.error
  330. else:
  331. self._init_write_gz()
  332. elif comptype == "bz2":
  333. try:
  334. import bz2
  335. except ImportError:
  336. raise CompressionError("bz2 module is not available") from None
  337. if mode == "r":
  338. self.dbuf = b""
  339. self.cmp = bz2.BZ2Decompressor()
  340. self.exception = OSError
  341. else:
  342. self.cmp = bz2.BZ2Compressor()
  343. elif comptype == "xz":
  344. try:
  345. import lzma
  346. except ImportError:
  347. raise CompressionError("lzma module is not available") from None
  348. if mode == "r":
  349. self.dbuf = b""
  350. self.cmp = lzma.LZMADecompressor()
  351. self.exception = lzma.LZMAError
  352. else:
  353. self.cmp = lzma.LZMACompressor()
  354. elif comptype != "tar":
  355. raise CompressionError("unknown compression type %r" % comptype)
  356. except:
  357. if not self._extfileobj:
  358. self.fileobj.close()
  359. self.closed = True
  360. raise
  361. def __del__(self):
  362. if hasattr(self, "closed") and not self.closed:
  363. self.close()
  364. def _init_write_gz(self):
  365. """Initialize for writing with gzip compression.
  366. """
  367. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  368. -self.zlib.MAX_WBITS,
  369. self.zlib.DEF_MEM_LEVEL,
  370. 0)
  371. timestamp = struct.pack("<L", int(time.time()))
  372. self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
  373. if self.name.endswith(".gz"):
  374. self.name = self.name[:-3]
  375. # Honor "directory components removed" from RFC1952
  376. self.name = os.path.basename(self.name)
  377. # RFC1952 says we must use ISO-8859-1 for the FNAME field.
  378. self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
  379. def write(self, s):
  380. """Write string s to the stream.
  381. """
  382. if self.comptype == "gz":
  383. self.crc = self.zlib.crc32(s, self.crc)
  384. self.pos += len(s)
  385. if self.comptype != "tar":
  386. s = self.cmp.compress(s)
  387. self.__write(s)
  388. def __write(self, s):
  389. """Write string s to the stream if a whole new block
  390. is ready to be written.
  391. """
  392. self.buf += s
  393. while len(self.buf) > self.bufsize:
  394. self.fileobj.write(self.buf[:self.bufsize])
  395. self.buf = self.buf[self.bufsize:]
  396. def close(self):
  397. """Close the _Stream object. No operation should be
  398. done on it afterwards.
  399. """
  400. if self.closed:
  401. return
  402. self.closed = True
  403. try:
  404. if self.mode == "w" and self.comptype != "tar":
  405. self.buf += self.cmp.flush()
  406. if self.mode == "w" and self.buf:
  407. self.fileobj.write(self.buf)
  408. self.buf = b""
  409. if self.comptype == "gz":
  410. self.fileobj.write(struct.pack("<L", self.crc))
  411. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
  412. finally:
  413. if not self._extfileobj:
  414. self.fileobj.close()
  415. def _init_read_gz(self):
  416. """Initialize for reading a gzip compressed fileobj.
  417. """
  418. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  419. self.dbuf = b""
  420. # taken from gzip.GzipFile with some alterations
  421. if self.__read(2) != b"\037\213":
  422. raise ReadError("not a gzip file")
  423. if self.__read(1) != b"\010":
  424. raise CompressionError("unsupported compression method")
  425. flag = ord(self.__read(1))
  426. self.__read(6)
  427. if flag & 4:
  428. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  429. self.read(xlen)
  430. if flag & 8:
  431. while True:
  432. s = self.__read(1)
  433. if not s or s == NUL:
  434. break
  435. if flag & 16:
  436. while True:
  437. s = self.__read(1)
  438. if not s or s == NUL:
  439. break
  440. if flag & 2:
  441. self.__read(2)
  442. def tell(self):
  443. """Return the stream's file pointer position.
  444. """
  445. return self.pos
  446. def seek(self, pos=0):
  447. """Set the stream's file pointer to pos. Negative seeking
  448. is forbidden.
  449. """
  450. if pos - self.pos >= 0:
  451. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  452. for i in range(blocks):
  453. self.read(self.bufsize)
  454. self.read(remainder)
  455. else:
  456. raise StreamError("seeking backwards is not allowed")
  457. return self.pos
  458. def read(self, size):
  459. """Return the next size number of bytes from the stream."""
  460. assert size is not None
  461. buf = self._read(size)
  462. self.pos += len(buf)
  463. return buf
  464. def _read(self, size):
  465. """Return size bytes from the stream.
  466. """
  467. if self.comptype == "tar":
  468. return self.__read(size)
  469. c = len(self.dbuf)
  470. t = [self.dbuf]
  471. while c < size:
  472. # Skip underlying buffer to avoid unaligned double buffering.
  473. if self.buf:
  474. buf = self.buf
  475. self.buf = b""
  476. else:
  477. buf = self.fileobj.read(self.bufsize)
  478. if not buf:
  479. break
  480. try:
  481. buf = self.cmp.decompress(buf)
  482. except self.exception as e:
  483. raise ReadError("invalid compressed data") from e
  484. t.append(buf)
  485. c += len(buf)
  486. t = b"".join(t)
  487. self.dbuf = t[size:]
  488. return t[:size]
  489. def __read(self, size):
  490. """Return size bytes from stream. If internal buffer is empty,
  491. read another block from the stream.
  492. """
  493. c = len(self.buf)
  494. t = [self.buf]
  495. while c < size:
  496. buf = self.fileobj.read(self.bufsize)
  497. if not buf:
  498. break
  499. t.append(buf)
  500. c += len(buf)
  501. t = b"".join(t)
  502. self.buf = t[size:]
  503. return t[:size]
  504. # class _Stream
  505. class _StreamProxy(object):
  506. """Small proxy class that enables transparent compression
  507. detection for the Stream interface (mode 'r|*').
  508. """
  509. def __init__(self, fileobj):
  510. self.fileobj = fileobj
  511. self.buf = self.fileobj.read(BLOCKSIZE)
  512. def read(self, size):
  513. self.read = self.fileobj.read
  514. return self.buf
  515. def getcomptype(self):
  516. if self.buf.startswith(b"\x1f\x8b\x08"):
  517. return "gz"
  518. elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
  519. return "bz2"
  520. elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
  521. return "xz"
  522. else:
  523. return "tar"
  524. def close(self):
  525. self.fileobj.close()
  526. # class StreamProxy
  527. #------------------------
  528. # Extraction file object
  529. #------------------------
  530. class _FileInFile(object):
  531. """A thin wrapper around an existing file object that
  532. provides a part of its data as an individual file
  533. object.
  534. """
  535. def __init__(self, fileobj, offset, size, blockinfo=None):
  536. self.fileobj = fileobj
  537. self.offset = offset
  538. self.size = size
  539. self.position = 0
  540. self.name = getattr(fileobj, "name", None)
  541. self.closed = False
  542. if blockinfo is None:
  543. blockinfo = [(0, size)]
  544. # Construct a map with data and zero blocks.
  545. self.map_index = 0
  546. self.map = []
  547. lastpos = 0
  548. realpos = self.offset
  549. for offset, size in blockinfo:
  550. if offset > lastpos:
  551. self.map.append((False, lastpos, offset, None))
  552. self.map.append((True, offset, offset + size, realpos))
  553. realpos += size
  554. lastpos = offset + size
  555. if lastpos < self.size:
  556. self.map.append((False, lastpos, self.size, None))
  557. def flush(self):
  558. pass
  559. def readable(self):
  560. return True
  561. def writable(self):
  562. return False
  563. def seekable(self):
  564. return self.fileobj.seekable()
  565. def tell(self):
  566. """Return the current file position.
  567. """
  568. return self.position
  569. def seek(self, position, whence=io.SEEK_SET):
  570. """Seek to a position in the file.
  571. """
  572. if whence == io.SEEK_SET:
  573. self.position = min(max(position, 0), self.size)
  574. elif whence == io.SEEK_CUR:
  575. if position < 0:
  576. self.position = max(self.position + position, 0)
  577. else:
  578. self.position = min(self.position + position, self.size)
  579. elif whence == io.SEEK_END:
  580. self.position = max(min(self.size + position, self.size), 0)
  581. else:
  582. raise ValueError("Invalid argument")
  583. return self.position
  584. def read(self, size=None):
  585. """Read data from the file.
  586. """
  587. if size is None:
  588. size = self.size - self.position
  589. else:
  590. size = min(size, self.size - self.position)
  591. buf = b""
  592. while size > 0:
  593. while True:
  594. data, start, stop, offset = self.map[self.map_index]
  595. if start <= self.position < stop:
  596. break
  597. else:
  598. self.map_index += 1
  599. if self.map_index == len(self.map):
  600. self.map_index = 0
  601. length = min(size, stop - self.position)
  602. if data:
  603. self.fileobj.seek(offset + (self.position - start))
  604. b = self.fileobj.read(length)
  605. if len(b) != length:
  606. raise ReadError("unexpected end of data")
  607. buf += b
  608. else:
  609. buf += NUL * length
  610. size -= length
  611. self.position += length
  612. return buf
  613. def readinto(self, b):
  614. buf = self.read(len(b))
  615. b[:len(buf)] = buf
  616. return len(buf)
  617. def close(self):
  618. self.closed = True
  619. #class _FileInFile
  620. class ExFileObject(io.BufferedReader):
  621. def __init__(self, tarfile, tarinfo):
  622. fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
  623. tarinfo.size, tarinfo.sparse)
  624. super().__init__(fileobj)
  625. #class ExFileObject
  626. #------------------
  627. # Exported Classes
  628. #------------------
  629. class TarInfo(object):
  630. """Informational class which holds the details about an
  631. archive member given by a tar header block.
  632. TarInfo objects are returned by TarFile.getmember(),
  633. TarFile.getmembers() and TarFile.gettarinfo() and are
  634. usually created internally.
  635. """
  636. __slots__ = dict(
  637. name = 'Name of the archive member.',
  638. mode = 'Permission bits.',
  639. uid = 'User ID of the user who originally stored this member.',
  640. gid = 'Group ID of the user who originally stored this member.',
  641. size = 'Size in bytes.',
  642. mtime = 'Time of last modification.',
  643. chksum = 'Header checksum.',
  644. type = ('File type. type is usually one of these constants: '
  645. 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
  646. 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
  647. linkname = ('Name of the target file name, which is only present '
  648. 'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
  649. uname = 'User name.',
  650. gname = 'Group name.',
  651. devmajor = 'Device major number.',
  652. devminor = 'Device minor number.',
  653. offset = 'The tar header starts here.',
  654. offset_data = "The file's data starts here.",
  655. pax_headers = ('A dictionary containing key-value pairs of an '
  656. 'associated pax extended header.'),
  657. sparse = 'Sparse member information.',
  658. tarfile = None,
  659. _sparse_structs = None,
  660. _link_target = None,
  661. )
  662. def __init__(self, name=""):
  663. """Construct a TarInfo object. name is the optional name
  664. of the member.
  665. """
  666. self.name = name # member name
  667. self.mode = 0o644 # file permissions
  668. self.uid = 0 # user id
  669. self.gid = 0 # group id
  670. self.size = 0 # file size
  671. self.mtime = 0 # modification time
  672. self.chksum = 0 # header checksum
  673. self.type = REGTYPE # member type
  674. self.linkname = "" # link name
  675. self.uname = "" # user name
  676. self.gname = "" # group name
  677. self.devmajor = 0 # device major number
  678. self.devminor = 0 # device minor number
  679. self.offset = 0 # the tar header starts here
  680. self.offset_data = 0 # the file's data starts here
  681. self.sparse = None # sparse member information
  682. self.pax_headers = {} # pax header information
  683. @property
  684. def path(self):
  685. 'In pax headers, "name" is called "path".'
  686. return self.name
  687. @path.setter
  688. def path(self, name):
  689. self.name = name
  690. @property
  691. def linkpath(self):
  692. 'In pax headers, "linkname" is called "linkpath".'
  693. return self.linkname
  694. @linkpath.setter
  695. def linkpath(self, linkname):
  696. self.linkname = linkname
  697. def __repr__(self):
  698. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  699. def get_info(self):
  700. """Return the TarInfo's attributes as a dictionary.
  701. """
  702. info = {
  703. "name": self.name,
  704. "mode": self.mode & 0o7777,
  705. "uid": self.uid,
  706. "gid": self.gid,
  707. "size": self.size,
  708. "mtime": self.mtime,
  709. "chksum": self.chksum,
  710. "type": self.type,
  711. "linkname": self.linkname,
  712. "uname": self.uname,
  713. "gname": self.gname,
  714. "devmajor": self.devmajor,
  715. "devminor": self.devminor
  716. }
  717. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  718. info["name"] += "/"
  719. return info
  720. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
  721. """Return a tar header as a string of 512 byte blocks.
  722. """
  723. info = self.get_info()
  724. if format == USTAR_FORMAT:
  725. return self.create_ustar_header(info, encoding, errors)
  726. elif format == GNU_FORMAT:
  727. return self.create_gnu_header(info, encoding, errors)
  728. elif format == PAX_FORMAT:
  729. return self.create_pax_header(info, encoding)
  730. else:
  731. raise ValueError("invalid format")
  732. def create_ustar_header(self, info, encoding, errors):
  733. """Return the object as a ustar header block.
  734. """
  735. info["magic"] = POSIX_MAGIC
  736. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  737. raise ValueError("linkname is too long")
  738. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  739. info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
  740. return self._create_header(info, USTAR_FORMAT, encoding, errors)
  741. def create_gnu_header(self, info, encoding, errors):
  742. """Return the object as a GNU header block sequence.
  743. """
  744. info["magic"] = GNU_MAGIC
  745. buf = b""
  746. if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
  747. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
  748. if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
  749. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
  750. return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
  751. def create_pax_header(self, info, encoding):
  752. """Return the object as a ustar header block. If it cannot be
  753. represented this way, prepend a pax extended header sequence
  754. with supplement information.
  755. """
  756. info["magic"] = POSIX_MAGIC
  757. pax_headers = self.pax_headers.copy()
  758. # Test string fields for values that exceed the field length or cannot
  759. # be represented in ASCII encoding.
  760. for name, hname, length in (
  761. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  762. ("uname", "uname", 32), ("gname", "gname", 32)):
  763. if hname in pax_headers:
  764. # The pax header has priority.
  765. continue
  766. # Try to encode the string as ASCII.
  767. try:
  768. info[name].encode("ascii", "strict")
  769. except UnicodeEncodeError:
  770. pax_headers[hname] = info[name]
  771. continue
  772. if len(info[name]) > length:
  773. pax_headers[hname] = info[name]
  774. # Test number fields for values that exceed the field limit or values
  775. # that like to be stored as float.
  776. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  777. if name in pax_headers:
  778. # The pax header has priority. Avoid overflow.
  779. info[name] = 0
  780. continue
  781. val = info[name]
  782. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  783. pax_headers[name] = str(val)
  784. info[name] = 0
  785. # Create a pax extended header if necessary.
  786. if pax_headers:
  787. buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
  788. else:
  789. buf = b""
  790. return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
  791. @classmethod
  792. def create_pax_global_header(cls, pax_headers):
  793. """Return the object as a pax global header block sequence.
  794. """
  795. return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
  796. def _posix_split_name(self, name, encoding, errors):
  797. """Split a name longer than 100 chars into a prefix
  798. and a name part.
  799. """
  800. components = name.split("/")
  801. for i in range(1, len(components)):
  802. prefix = "/".join(components[:i])
  803. name = "/".join(components[i:])
  804. if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
  805. len(name.encode(encoding, errors)) <= LENGTH_NAME:
  806. break
  807. else:
  808. raise ValueError("name is too long")
  809. return prefix, name
  810. @staticmethod
  811. def _create_header(info, format, encoding, errors):
  812. """Return a header block. info is a dictionary with file
  813. information, format must be one of the *_FORMAT constants.
  814. """
  815. has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
  816. if has_device_fields:
  817. devmajor = itn(info.get("devmajor", 0), 8, format)
  818. devminor = itn(info.get("devminor", 0), 8, format)
  819. else:
  820. devmajor = stn("", 8, encoding, errors)
  821. devminor = stn("", 8, encoding, errors)
  822. parts = [
  823. stn(info.get("name", ""), 100, encoding, errors),
  824. itn(info.get("mode", 0) & 0o7777, 8, format),
  825. itn(info.get("uid", 0), 8, format),
  826. itn(info.get("gid", 0), 8, format),
  827. itn(info.get("size", 0), 12, format),
  828. itn(info.get("mtime", 0), 12, format),
  829. b" ", # checksum field
  830. info.get("type", REGTYPE),
  831. stn(info.get("linkname", ""), 100, encoding, errors),
  832. info.get("magic", POSIX_MAGIC),
  833. stn(info.get("uname", ""), 32, encoding, errors),
  834. stn(info.get("gname", ""), 32, encoding, errors),
  835. devmajor,
  836. devminor,
  837. stn(info.get("prefix", ""), 155, encoding, errors)
  838. ]
  839. buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
  840. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  841. buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
  842. return buf
  843. @staticmethod
  844. def _create_payload(payload):
  845. """Return the string payload filled with zero bytes
  846. up to the next 512 byte border.
  847. """
  848. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  849. if remainder > 0:
  850. payload += (BLOCKSIZE - remainder) * NUL
  851. return payload
  852. @classmethod
  853. def _create_gnu_long_header(cls, name, type, encoding, errors):
  854. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  855. for name.
  856. """
  857. name = name.encode(encoding, errors) + NUL
  858. info = {}
  859. info["name"] = "././@LongLink"
  860. info["type"] = type
  861. info["size"] = len(name)
  862. info["magic"] = GNU_MAGIC
  863. # create extended header + name blocks.
  864. return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
  865. cls._create_payload(name)
  866. @classmethod
  867. def _create_pax_generic_header(cls, pax_headers, type, encoding):
  868. """Return a POSIX.1-2008 extended or global header sequence
  869. that contains a list of keyword, value pairs. The values
  870. must be strings.
  871. """
  872. # Check if one of the fields contains surrogate characters and thereby
  873. # forces hdrcharset=BINARY, see _proc_pax() for more information.
  874. binary = False
  875. for keyword, value in pax_headers.items():
  876. try:
  877. value.encode("utf-8", "strict")
  878. except UnicodeEncodeError:
  879. binary = True
  880. break
  881. records = b""
  882. if binary:
  883. # Put the hdrcharset field at the beginning of the header.
  884. records += b"21 hdrcharset=BINARY\n"
  885. for keyword, value in pax_headers.items():
  886. keyword = keyword.encode("utf-8")
  887. if binary:
  888. # Try to restore the original byte representation of `value'.
  889. # Needless to say, that the encoding must match the string.
  890. value = value.encode(encoding, "surrogateescape")
  891. else:
  892. value = value.encode("utf-8")
  893. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  894. n = p = 0
  895. while True:
  896. n = l + len(str(p))
  897. if n == p:
  898. break
  899. p = n
  900. records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
  901. # We use a hardcoded "././@PaxHeader" name like star does
  902. # instead of the one that POSIX recommends.
  903. info = {}
  904. info["name"] = "././@PaxHeader"
  905. info["type"] = type
  906. info["size"] = len(records)
  907. info["magic"] = POSIX_MAGIC
  908. # Create pax header + record blocks.
  909. return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
  910. cls._create_payload(records)
  911. @classmethod
  912. def frombuf(cls, buf, encoding, errors):
  913. """Construct a TarInfo object from a 512 byte bytes object.
  914. """
  915. if len(buf) == 0:
  916. raise EmptyHeaderError("empty header")
  917. if len(buf) != BLOCKSIZE:
  918. raise TruncatedHeaderError("truncated header")
  919. if buf.count(NUL) == BLOCKSIZE:
  920. raise EOFHeaderError("end of file header")
  921. chksum = nti(buf[148:156])
  922. if chksum not in calc_chksums(buf):
  923. raise InvalidHeaderError("bad checksum")
  924. obj = cls()
  925. obj.name = nts(buf[0:100], encoding, errors)
  926. obj.mode = nti(buf[100:108])
  927. obj.uid = nti(buf[108:116])
  928. obj.gid = nti(buf[116:124])
  929. obj.size = nti(buf[124:136])
  930. obj.mtime = nti(buf[136:148])
  931. obj.chksum = chksum
  932. obj.type = buf[156:157]
  933. obj.linkname = nts(buf[157:257], encoding, errors)
  934. obj.uname = nts(buf[265:297], encoding, errors)
  935. obj.gname = nts(buf[297:329], encoding, errors)
  936. obj.devmajor = nti(buf[329:337])
  937. obj.devminor = nti(buf[337:345])
  938. prefix = nts(buf[345:500], encoding, errors)
  939. # Old V7 tar format represents a directory as a regular
  940. # file with a trailing slash.
  941. if obj.type == AREGTYPE and obj.name.endswith("/"):
  942. obj.type = DIRTYPE
  943. # The old GNU sparse format occupies some of the unused
  944. # space in the buffer for up to 4 sparse structures.
  945. # Save them for later processing in _proc_sparse().
  946. if obj.type == GNUTYPE_SPARSE:
  947. pos = 386
  948. structs = []
  949. for i in range(4):
  950. try:
  951. offset = nti(buf[pos:pos + 12])
  952. numbytes = nti(buf[pos + 12:pos + 24])
  953. except ValueError:
  954. break
  955. structs.append((offset, numbytes))
  956. pos += 24
  957. isextended = bool(buf[482])
  958. origsize = nti(buf[483:495])
  959. obj._sparse_structs = (structs, isextended, origsize)
  960. # Remove redundant slashes from directories.
  961. if obj.isdir():
  962. obj.name = obj.name.rstrip("/")
  963. # Reconstruct a ustar longname.
  964. if prefix and obj.type not in GNU_TYPES:
  965. obj.name = prefix + "/" + obj.name
  966. return obj
  967. @classmethod
  968. def fromtarfile(cls, tarfile):
  969. """Return the next TarInfo object from TarFile object
  970. tarfile.
  971. """
  972. buf = tarfile.fileobj.read(BLOCKSIZE)
  973. obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
  974. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  975. return obj._proc_member(tarfile)
  976. #--------------------------------------------------------------------------
  977. # The following are methods that are called depending on the type of a
  978. # member. The entry point is _proc_member() which can be overridden in a
  979. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  980. # implement the following
  981. # operations:
  982. # 1. Set self.offset_data to the position where the data blocks begin,
  983. # if there is data that follows.
  984. # 2. Set tarfile.offset to the position where the next member's header will
  985. # begin.
  986. # 3. Return self or another valid TarInfo object.
  987. def _proc_member(self, tarfile):
  988. """Choose the right processing method depending on
  989. the type and call it.
  990. """
  991. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  992. return self._proc_gnulong(tarfile)
  993. elif self.type == GNUTYPE_SPARSE:
  994. return self._proc_sparse(tarfile)
  995. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  996. return self._proc_pax(tarfile)
  997. else:
  998. return self._proc_builtin(tarfile)
  999. def _proc_builtin(self, tarfile):
  1000. """Process a builtin type or an unknown type which
  1001. will be treated as a regular file.
  1002. """
  1003. self.offset_data = tarfile.fileobj.tell()
  1004. offset = self.offset_data
  1005. if self.isreg() or self.type not in SUPPORTED_TYPES:
  1006. # Skip the following data blocks.
  1007. offset += self._block(self.size)
  1008. tarfile.offset = offset
  1009. # Patch the TarInfo object with saved global
  1010. # header information.
  1011. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  1012. return self
  1013. def _proc_gnulong(self, tarfile):
  1014. """Process the blocks that hold a GNU longname
  1015. or longlink member.
  1016. """
  1017. buf = tarfile.fileobj.read(self._block(self.size))
  1018. # Fetch the next header and process it.
  1019. try:
  1020. next = self.fromtarfile(tarfile)
  1021. except HeaderError as e:
  1022. raise SubsequentHeaderError(str(e)) from None
  1023. # Patch the TarInfo object from the next header with
  1024. # the longname information.
  1025. next.offset = self.offset
  1026. if self.type == GNUTYPE_LONGNAME:
  1027. next.name = nts(buf, tarfile.encoding, tarfile.errors)
  1028. elif self.type == GNUTYPE_LONGLINK:
  1029. next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
  1030. return next
  1031. def _proc_sparse(self, tarfile):
  1032. """Process a GNU sparse header plus extra headers.
  1033. """
  1034. # We already collected some sparse structures in frombuf().
  1035. structs, isextended, origsize = self._sparse_structs
  1036. del self._sparse_structs
  1037. # Collect sparse structures from extended header blocks.
  1038. while isextended:
  1039. buf = tarfile.fileobj.read(BLOCKSIZE)
  1040. pos = 0
  1041. for i in range(21):
  1042. try:
  1043. offset = nti(buf[pos:pos + 12])
  1044. numbytes = nti(buf[pos + 12:pos + 24])
  1045. except ValueError:
  1046. break
  1047. if offset and numbytes:
  1048. structs.append((offset, numbytes))
  1049. pos += 24
  1050. isextended = bool(buf[504])
  1051. self.sparse = structs
  1052. self.offset_data = tarfile.fileobj.tell()
  1053. tarfile.offset = self.offset_data + self._block(self.size)
  1054. self.size = origsize
  1055. return self
  1056. def _proc_pax(self, tarfile):
  1057. """Process an extended or global header as described in
  1058. POSIX.1-2008.
  1059. """
  1060. # Read the header information.
  1061. buf = tarfile.fileobj.read(self._block(self.size))
  1062. # A pax header stores supplemental information for either
  1063. # the following file (extended) or all following files
  1064. # (global).
  1065. if self.type == XGLTYPE:
  1066. pax_headers = tarfile.pax_headers
  1067. else:
  1068. pax_headers = tarfile.pax_headers.copy()
  1069. # Check if the pax header contains a hdrcharset field. This tells us
  1070. # the encoding of the path, linkpath, uname and gname fields. Normally,
  1071. # these fields are UTF-8 encoded but since POSIX.1-2008 tar
  1072. # implementations are allowed to store them as raw binary strings if
  1073. # the translation to UTF-8 fails.
  1074. match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
  1075. if match is not None:
  1076. pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
  1077. # For the time being, we don't care about anything other than "BINARY".
  1078. # The only other value that is currently allowed by the standard is
  1079. # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
  1080. hdrcharset = pax_headers.get("hdrcharset")
  1081. if hdrcharset == "BINARY":
  1082. encoding = tarfile.encoding
  1083. else:
  1084. encoding = "utf-8"
  1085. # Parse pax header information. A record looks like that:
  1086. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1087. # of the complete record including the length field itself and
  1088. # the newline. keyword and value are both UTF-8 encoded strings.
  1089. regex = re.compile(br"(\d+) ([^=]+)=")
  1090. pos = 0
  1091. while True:
  1092. match = regex.match(buf, pos)
  1093. if not match:
  1094. break
  1095. length, keyword = match.groups()
  1096. length = int(length)
  1097. if length == 0:
  1098. raise InvalidHeaderError("invalid header")
  1099. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1100. # Normally, we could just use "utf-8" as the encoding and "strict"
  1101. # as the error handler, but we better not take the risk. For
  1102. # example, GNU tar <= 1.23 is known to store filenames it cannot
  1103. # translate to UTF-8 as raw strings (unfortunately without a
  1104. # hdrcharset=BINARY header).
  1105. # We first try the strict standard encoding, and if that fails we
  1106. # fall back on the user's encoding and error handler.
  1107. keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
  1108. tarfile.errors)
  1109. if keyword in PAX_NAME_FIELDS:
  1110. value = self._decode_pax_field(value, encoding, tarfile.encoding,
  1111. tarfile.errors)
  1112. else:
  1113. value = self._decode_pax_field(value, "utf-8", "utf-8",
  1114. tarfile.errors)
  1115. pax_headers[keyword] = value
  1116. pos += length
  1117. # Fetch the next header.
  1118. try:
  1119. next = self.fromtarfile(tarfile)
  1120. except HeaderError as e:
  1121. raise SubsequentHeaderError(str(e)) from None
  1122. # Process GNU sparse information.
  1123. if "GNU.sparse.map" in pax_headers:
  1124. # GNU extended sparse format version 0.1.
  1125. self._proc_gnusparse_01(next, pax_headers)
  1126. elif "GNU.sparse.size" in pax_headers:
  1127. # GNU extended sparse format version 0.0.
  1128. self._proc_gnusparse_00(next, pax_headers, buf)
  1129. elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
  1130. # GNU extended sparse format version 1.0.
  1131. self._proc_gnusparse_10(next, pax_headers, tarfile)
  1132. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1133. # Patch the TarInfo object with the extended header info.
  1134. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1135. next.offset = self.offset
  1136. if "size" in pax_headers:
  1137. # If the extended header replaces the size field,
  1138. # we need to recalculate the offset where the next
  1139. # header starts.
  1140. offset = next.offset_data
  1141. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1142. offset += next._block(next.size)
  1143. tarfile.offset = offset
  1144. return next
  1145. def _proc_gnusparse_00(self, next, pax_headers, buf):
  1146. """Process a GNU tar extended sparse header, version 0.0.
  1147. """
  1148. offsets = []
  1149. for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
  1150. offsets.append(int(match.group(1)))
  1151. numbytes = []
  1152. for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
  1153. numbytes.append(int(match.group(1)))
  1154. next.sparse = list(zip(offsets, numbytes))
  1155. def _proc_gnusparse_01(self, next, pax_headers):
  1156. """Process a GNU tar extended sparse header, version 0.1.
  1157. """
  1158. sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
  1159. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1160. def _proc_gnusparse_10(self, next, pax_headers, tarfile):
  1161. """Process a GNU tar extended sparse header, version 1.0.
  1162. """
  1163. fields = None
  1164. sparse = []
  1165. buf = tarfile.fileobj.read(BLOCKSIZE)
  1166. fields, buf = buf.split(b"\n", 1)
  1167. fields = int(fields)
  1168. while len(sparse) < fields * 2:
  1169. if b"\n" not in buf:
  1170. buf += tarfile.fileobj.read(BLOCKSIZE)
  1171. number, buf = buf.split(b"\n", 1)
  1172. sparse.append(int(number))
  1173. next.offset_data = tarfile.fileobj.tell()
  1174. next.sparse = list(zip(sparse[::2], sparse[1::2]))
  1175. def _apply_pax_info(self, pax_headers, encoding, errors):
  1176. """Replace fields with supplemental information from a previous
  1177. pax extended or global header.
  1178. """
  1179. for keyword, value in pax_headers.items():
  1180. if keyword == "GNU.sparse.name":
  1181. setattr(self, "path", value)
  1182. elif keyword == "GNU.sparse.size":
  1183. setattr(self, "size", int(value))
  1184. elif keyword == "GNU.sparse.realsize":
  1185. setattr(self, "size", int(value))
  1186. elif keyword in PAX_FIELDS:
  1187. if keyword in PAX_NUMBER_FIELDS:
  1188. try:
  1189. value = PAX_NUMBER_FIELDS[keyword](value)
  1190. except ValueError:
  1191. value = 0
  1192. if keyword == "path":
  1193. value = value.rstrip("/")
  1194. setattr(self, keyword, value)
  1195. self.pax_headers = pax_headers.copy()
  1196. def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
  1197. """Decode a single field from a pax record.
  1198. """
  1199. try:
  1200. return value.decode(encoding, "strict")
  1201. except UnicodeDecodeError:
  1202. return value.decode(fallback_encoding, fallback_errors)
  1203. def _block(self, count):
  1204. """Round up a byte count by BLOCKSIZE and return it,
  1205. e.g. _block(834) => 1024.
  1206. """
  1207. blocks, remainder = divmod(count, BLOCKSIZE)
  1208. if remainder:
  1209. blocks += 1
  1210. return blocks * BLOCKSIZE
  1211. def isreg(self):
  1212. 'Return True if the Tarinfo object is a regular file.'
  1213. return self.type in REGULAR_TYPES
  1214. def isfile(self):
  1215. 'Return True if the Tarinfo object is a regular file.'
  1216. return self.isreg()
  1217. def isdir(self):
  1218. 'Return True if it is a directory.'
  1219. return self.type == DIRTYPE
  1220. def issym(self):
  1221. 'Return True if it is a symbolic link.'
  1222. return self.type == SYMTYPE
  1223. def islnk(self):
  1224. 'Return True if it is a hard link.'
  1225. return self.type == LNKTYPE
  1226. def ischr(self):
  1227. 'Return True if it is a character device.'
  1228. return self.type == CHRTYPE
  1229. def isblk(self):
  1230. 'Return True if it is a block device.'
  1231. return self.type == BLKTYPE
  1232. def isfifo(self):
  1233. 'Return True if it is a FIFO.'
  1234. return self.type == FIFOTYPE
  1235. def issparse(self):
  1236. return self.sparse is not None
  1237. def isdev(self):
  1238. 'Return True if it is one of character device, block device or FIFO.'
  1239. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1240. # class TarInfo
  1241. class TarFile(object):
  1242. """The TarFile Class provides an interface to tar archives.
  1243. """
  1244. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1245. dereference = False # If true, add content of linked file to the
  1246. # tar file, else the link.
  1247. ignore_zeros = False # If true, skips empty or invalid blocks and
  1248. # continues processing.
  1249. errorlevel = 1 # If 0, fatal errors only appear in debug
  1250. # messages (if debug >= 0). If > 0, errors
  1251. # are passed to the caller as exceptions.
  1252. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1253. encoding = ENCODING # Encoding for 8-bit character strings.
  1254. errors = None # Error handler for unicode conversion.
  1255. tarinfo = TarInfo # The default TarInfo class to use.
  1256. fileobject = ExFileObject # The file-object for extractfile().
  1257. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1258. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1259. errors="surrogateescape", pax_headers=None, debug=None,
  1260. errorlevel=None, copybufsize=None):
  1261. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1262. read from an existing archive, 'a' to append data to an existing
  1263. file or 'w' to create a new file overwriting an existing one. `mode'
  1264. defaults to 'r'.
  1265. If `fileobj' is given, it is used for reading or writing data. If it
  1266. can be determined, `mode' is overridden by `fileobj's mode.
  1267. `fileobj' is not closed, when TarFile is closed.
  1268. """
  1269. modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
  1270. if mode not in modes:
  1271. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1272. self.mode = mode
  1273. self._mode = modes[mode]
  1274. if not fileobj:
  1275. if self.mode == "a" and not os.path.exists(name):
  1276. # Create nonexistent files in append mode.
  1277. self.mode = "w"
  1278. self._mode = "wb"
  1279. fileobj = bltn_open(name, self._mode)
  1280. self._extfileobj = False
  1281. else:
  1282. if (name is None and hasattr(fileobj, "name") and
  1283. isinstance(fileobj.name, (str, bytes))):
  1284. name = fileobj.name
  1285. if hasattr(fileobj, "mode"):
  1286. self._mode = fileobj.mode
  1287. self._extfileobj = True
  1288. self.name = os.path.abspath(name) if name else None
  1289. self.fileobj = fileobj
  1290. # Init attributes.
  1291. if format is not None:
  1292. self.format = format
  1293. if tarinfo is not None:
  1294. self.tarinfo = tarinfo
  1295. if dereference is not None:
  1296. self.dereference = dereference
  1297. if ignore_zeros is not None:
  1298. self.ignore_zeros = ignore_zeros
  1299. if encoding is not None:
  1300. self.encoding = encoding
  1301. self.errors = errors
  1302. if pax_headers is not None and self.format == PAX_FORMAT:
  1303. self.pax_headers = pax_headers
  1304. else:
  1305. self.pax_headers = {}
  1306. if debug is not None:
  1307. self.debug = debug
  1308. if errorlevel is not None:
  1309. self.errorlevel = errorlevel
  1310. # Init datastructures.
  1311. self.copybufsize = copybufsize
  1312. self.closed = False
  1313. self.members = [] # list of members as TarInfo objects
  1314. self._loaded = False # flag if all members have been read
  1315. self.offset = self.fileobj.tell()
  1316. # current position in the archive file
  1317. self.inodes = {} # dictionary caching the inodes of
  1318. # archive members already added
  1319. try:
  1320. if self.mode == "r":
  1321. self.firstmember = None
  1322. self.firstmember = self.next()
  1323. if self.mode == "a":
  1324. # Move to the end of the archive,
  1325. # before the first empty block.
  1326. while True:
  1327. self.fileobj.seek(self.offset)
  1328. try:
  1329. tarinfo = self.tarinfo.fromtarfile(self)
  1330. self.members.append(tarinfo)
  1331. except EOFHeaderError:
  1332. self.fileobj.seek(self.offset)
  1333. break
  1334. except HeaderError as e:
  1335. raise ReadError(str(e)) from None
  1336. if self.mode in ("a", "w", "x"):
  1337. self._loaded = True
  1338. if self.pax_headers:
  1339. buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
  1340. self.fileobj.write(buf)
  1341. self.offset += len(buf)
  1342. except:
  1343. if not self._extfileobj:
  1344. self.fileobj.close()
  1345. self.closed = True
  1346. raise
  1347. #--------------------------------------------------------------------------
  1348. # Below are the classmethods which act as alternate constructors to the
  1349. # TarFile class. The open() method is the only one that is needed for
  1350. # public use; it is the "super"-constructor and is able to select an
  1351. # adequate "sub"-constructor for a particular compression using the mapping
  1352. # from OPEN_METH.
  1353. #
  1354. # This concept allows one to subclass TarFile without losing the comfort of
  1355. # the super-constructor. A sub-constructor is registered and made available
  1356. # by adding it to the mapping in OPEN_METH.
  1357. @classmethod
  1358. def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
  1359. """Open a tar archive for reading, writing or appending. Return
  1360. an appropriate TarFile class.
  1361. mode:
  1362. 'r' or 'r:*' open for reading with transparent compression
  1363. 'r:' open for reading exclusively uncompressed
  1364. 'r:gz' open for reading with gzip compression
  1365. 'r:bz2' open for reading with bzip2 compression
  1366. 'r:xz' open for reading with lzma compression
  1367. 'a' or 'a:' open for appending, creating the file if necessary
  1368. 'w' or 'w:' open for writing without compression
  1369. 'w:gz' open for writing with gzip compression
  1370. 'w:bz2' open for writing with bzip2 compression
  1371. 'w:xz' open for writing with lzma compression
  1372. 'x' or 'x:' create a tarfile exclusively without compression, raise
  1373. an exception if the file is already created
  1374. 'x:gz' create a gzip compressed tarfile, raise an exception
  1375. if the file is already created
  1376. 'x:bz2' create a bzip2 compressed tarfile, raise an exception
  1377. if the file is already created
  1378. 'x:xz' create an lzma compressed tarfile, raise an exception
  1379. if the file is already created
  1380. 'r|*' open a stream of tar blocks with transparent compression
  1381. 'r|' open an uncompressed stream of tar blocks for reading
  1382. 'r|gz' open a gzip compressed stream of tar blocks
  1383. 'r|bz2' open a bzip2 compressed stream of tar blocks
  1384. 'r|xz' open an lzma compressed stream of tar blocks
  1385. 'w|' open an uncompressed stream for writing
  1386. 'w|gz' open a gzip compressed stream for writing
  1387. 'w|bz2' open a bzip2 compressed stream for writing
  1388. 'w|xz' open an lzma compressed stream for writing
  1389. """
  1390. if not name and not fileobj:
  1391. raise ValueError("nothing to open")
  1392. if mode in ("r", "r:*"):
  1393. # Find out which *open() is appropriate for opening the file.
  1394. def not_compressed(comptype):
  1395. return cls.OPEN_METH[comptype] == 'taropen'
  1396. error_msgs = []
  1397. for comptype in sorted(cls.OPEN_METH, key=not_compressed):
  1398. func = getattr(cls, cls.OPEN_METH[comptype])
  1399. if fileobj is not None:
  1400. saved_pos = fileobj.tell()
  1401. try:
  1402. return func(name, "r", fileobj, **kwargs)
  1403. except (ReadError, CompressionError) as e:
  1404. error_msgs.append(f'- method {comptype}: {e!r}')
  1405. if fileobj is not None:
  1406. fileobj.seek(saved_pos)
  1407. continue
  1408. error_msgs_summary = '\n'.join(error_msgs)
  1409. raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
  1410. elif ":" in mode:
  1411. filemode, comptype = mode.split(":", 1)
  1412. filemode = filemode or "r"
  1413. comptype = comptype or "tar"
  1414. # Select the *open() function according to
  1415. # given compression.
  1416. if comptype in cls.OPEN_METH:
  1417. func = getattr(cls, cls.OPEN_METH[comptype])
  1418. else:
  1419. raise CompressionError("unknown compression type %r" % comptype)
  1420. return func(name, filemode, fileobj, **kwargs)
  1421. elif "|" in mode:
  1422. filemode, comptype = mode.split("|", 1)
  1423. filemode = filemode or "r"
  1424. comptype = comptype or "tar"
  1425. if filemode not in ("r", "w"):
  1426. raise ValueError("mode must be 'r' or 'w'")
  1427. stream = _Stream(name, filemode, comptype, fileobj, bufsize)
  1428. try:
  1429. t = cls(name, filemode, stream, **kwargs)
  1430. except:
  1431. stream.close()
  1432. raise
  1433. t._extfileobj = False
  1434. return t
  1435. elif mode in ("a", "w", "x"):
  1436. return cls.taropen(name, mode, fileobj, **kwargs)
  1437. raise ValueError("undiscernible mode")
  1438. @classmethod
  1439. def taropen(cls, name, mode="r", fileobj=None, **kwargs):
  1440. """Open uncompressed tar archive name for reading or writing.
  1441. """
  1442. if mode not in ("r", "a", "w", "x"):
  1443. raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
  1444. return cls(name, mode, fileobj, **kwargs)
  1445. @classmethod
  1446. def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1447. """Open gzip compressed tar archive name for reading or writing.
  1448. Appending is not allowed.
  1449. """
  1450. if mode not in ("r", "w", "x"):
  1451. raise ValueError("mode must be 'r', 'w' or 'x'")
  1452. try:
  1453. from gzip import GzipFile
  1454. except ImportError:
  1455. raise CompressionError("gzip module is not available") from None
  1456. try:
  1457. fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
  1458. except OSError as e:
  1459. if fileobj is not None and mode == 'r':
  1460. raise ReadError("not a gzip file") from e
  1461. raise
  1462. try:
  1463. t = cls.taropen(name, mode, fileobj, **kwargs)
  1464. except OSError as e:
  1465. fileobj.close()
  1466. if mode == 'r':
  1467. raise ReadError("not a gzip file") from e
  1468. raise
  1469. except:
  1470. fileobj.close()
  1471. raise
  1472. t._extfileobj = False
  1473. return t
  1474. @classmethod
  1475. def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1476. """Open bzip2 compressed tar archive name for reading or writing.
  1477. Appending is not allowed.
  1478. """
  1479. if mode not in ("r", "w", "x"):
  1480. raise ValueError("mode must be 'r', 'w' or 'x'")
  1481. try:
  1482. from bz2 import BZ2File
  1483. except ImportError:
  1484. raise CompressionError("bz2 module is not available") from None
  1485. fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
  1486. try:
  1487. t = cls.taropen(name, mode, fileobj, **kwargs)
  1488. except (OSError, EOFError) as e:
  1489. fileobj.close()
  1490. if mode == 'r':
  1491. raise ReadError("not a bzip2 file") from e
  1492. raise
  1493. except:
  1494. fileobj.close()
  1495. raise
  1496. t._extfileobj = False
  1497. return t
  1498. @classmethod
  1499. def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
  1500. """Open lzma compressed tar archive name for reading or writing.
  1501. Appending is not allowed.
  1502. """
  1503. if mode not in ("r", "w", "x"):
  1504. raise ValueError("mode must be 'r', 'w' or 'x'")
  1505. try:
  1506. from lzma import LZMAFile, LZMAError
  1507. except ImportError:
  1508. raise CompressionError("lzma module is not available") from None
  1509. fileobj = LZMAFile(fileobj or name, mode, preset=preset)
  1510. try:
  1511. t = cls.taropen(name, mode, fileobj, **kwargs)
  1512. except (LZMAError, EOFError) as e:
  1513. fileobj.close()
  1514. if mode == 'r':
  1515. raise ReadError("not an lzma file") from e
  1516. raise
  1517. except:
  1518. fileobj.close()
  1519. raise
  1520. t._extfileobj = False
  1521. return t
  1522. # All *open() methods are registered here.
  1523. OPEN_METH = {
  1524. "tar": "taropen", # uncompressed tar
  1525. "gz": "gzopen", # gzip compressed tar
  1526. "bz2": "bz2open", # bzip2 compressed tar
  1527. "xz": "xzopen" # lzma compressed tar
  1528. }
  1529. #--------------------------------------------------------------------------
  1530. # The public methods which TarFile provides:
  1531. def close(self):
  1532. """Close the TarFile. In write-mode, two finishing zero blocks are
  1533. appended to the archive.
  1534. """
  1535. if self.closed:
  1536. return
  1537. self.closed = True
  1538. try:
  1539. if self.mode in ("a", "w", "x"):
  1540. self.fileobj.write(NUL * (BLOCKSIZE * 2))
  1541. self.offset += (BLOCKSIZE * 2)
  1542. # fill up the end with zero-blocks
  1543. # (like option -b20 for tar does)
  1544. blocks, remainder = divmod(self.offset, RECORDSIZE)
  1545. if remainder > 0:
  1546. self.fileobj.write(NUL * (RECORDSIZE - remainder))
  1547. finally:
  1548. if not self._extfileobj:
  1549. self.fileobj.close()
  1550. def getmember(self, name):
  1551. """Return a TarInfo object for member `name'. If `name' can not be
  1552. found in the archive, KeyError is raised. If a member occurs more
  1553. than once in the archive, its last occurrence is assumed to be the
  1554. most up-to-date version.
  1555. """
  1556. tarinfo = self._getmember(name)
  1557. if tarinfo is None:
  1558. raise KeyError("filename %r not found" % name)
  1559. return tarinfo
  1560. def getmembers(self):
  1561. """Return the members of the archive as a list of TarInfo objects. The
  1562. list has the same order as the members in the archive.
  1563. """
  1564. self._check()
  1565. if not self._loaded: # if we want to obtain a list of
  1566. self._load() # all members, we first have to
  1567. # scan the whole archive.
  1568. return self.members
  1569. def getnames(self):
  1570. """Return the members of the archive as a list of their names. It has
  1571. the same order as the list returned by getmembers().
  1572. """
  1573. return [tarinfo.name for tarinfo in self.getmembers()]
  1574. def gettarinfo(self, name=None, arcname=None, fileobj=None):
  1575. """Create a TarInfo object from the result of os.stat or equivalent
  1576. on an existing file. The file is either named by `name', or
  1577. specified as a file object `fileobj' with a file descriptor. If
  1578. given, `arcname' specifies an alternative name for the file in the
  1579. archive, otherwise, the name is taken from the 'name' attribute of
  1580. 'fileobj', or the 'name' argument. The name should be a text
  1581. string.
  1582. """
  1583. self._check("awx")
  1584. # When fileobj is given, replace name by
  1585. # fileobj's real name.
  1586. if fileobj is not None:
  1587. name = fileobj.name
  1588. # Building the name of the member in the archive.
  1589. # Backward slashes are converted to forward slashes,
  1590. # Absolute paths are turned to relative paths.
  1591. if arcname is None:
  1592. arcname = name
  1593. drv, arcname = os.path.splitdrive(arcname)
  1594. arcname = arcname.replace(os.sep, "/")
  1595. arcname = arcname.lstrip("/")
  1596. # Now, fill the TarInfo object with
  1597. # information specific for the file.
  1598. tarinfo = self.tarinfo()
  1599. tarinfo.tarfile = self # Not needed
  1600. # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
  1601. if fileobj is None:
  1602. if not self.dereference:
  1603. statres = os.lstat(name)
  1604. else:
  1605. statres = os.stat(name)
  1606. else:
  1607. statres = os.fstat(fileobj.fileno())
  1608. linkname = ""
  1609. stmd = statres.st_mode
  1610. if stat.S_ISREG(stmd):
  1611. inode = (statres.st_ino, statres.st_dev)
  1612. if not self.dereference and statres.st_nlink > 1 and \
  1613. inode in self.inodes and arcname != self.inodes[inode]:
  1614. # Is it a hardlink to an already
  1615. # archived file?
  1616. type = LNKTYPE
  1617. linkname = self.inodes[inode]
  1618. else:
  1619. # The inode is added only if its valid.
  1620. # For win32 it is always 0.
  1621. type = REGTYPE
  1622. if inode[0]:
  1623. self.inodes[inode] = arcname
  1624. elif stat.S_ISDIR(stmd):
  1625. type = DIRTYPE
  1626. elif stat.S_ISFIFO(stmd):
  1627. type = FIFOTYPE
  1628. elif stat.S_ISLNK(stmd):
  1629. type = SYMTYPE
  1630. linkname = os.readlink(name)
  1631. elif stat.S_ISCHR(stmd):
  1632. type = CHRTYPE
  1633. elif stat.S_ISBLK(stmd):
  1634. type = BLKTYPE
  1635. else:
  1636. return None
  1637. # Fill the TarInfo object with all
  1638. # information we can get.
  1639. tarinfo.name = arcname
  1640. tarinfo.mode = stmd
  1641. tarinfo.uid = statres.st_uid
  1642. tarinfo.gid = statres.st_gid
  1643. if type == REGTYPE:
  1644. tarinfo.size = statres.st_size
  1645. else:
  1646. tarinfo.size = 0
  1647. tarinfo.mtime = statres.st_mtime
  1648. tarinfo.type = type
  1649. tarinfo.linkname = linkname
  1650. if pwd:
  1651. try:
  1652. tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
  1653. except KeyError:
  1654. pass
  1655. if grp:
  1656. try:
  1657. tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
  1658. except KeyError:
  1659. pass
  1660. if type in (CHRTYPE, BLKTYPE):
  1661. if hasattr(os, "major") and hasattr(os, "minor"):
  1662. tarinfo.devmajor = os.major(statres.st_rdev)
  1663. tarinfo.devminor = os.minor(statres.st_rdev)
  1664. return tarinfo
  1665. def list(self, verbose=True, *, members=None):
  1666. """Print a table of contents to sys.stdout. If `verbose' is False, only
  1667. the names of the members are printed. If it is True, an `ls -l'-like
  1668. output is produced. `members' is optional and must be a subset of the
  1669. list returned by getmembers().
  1670. """
  1671. self._check()
  1672. if members is None:
  1673. members = self
  1674. for tarinfo in members:
  1675. if verbose:
  1676. _safe_print(stat.filemode(tarinfo.mode))
  1677. _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
  1678. tarinfo.gname or tarinfo.gid))
  1679. if tarinfo.ischr() or tarinfo.isblk():
  1680. _safe_print("%10s" %
  1681. ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
  1682. else:
  1683. _safe_print("%10d" % tarinfo.size)
  1684. _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
  1685. % time.localtime(tarinfo.mtime)[:6])
  1686. _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
  1687. if verbose:
  1688. if tarinfo.issym():
  1689. _safe_print("-> " + tarinfo.linkname)
  1690. if tarinfo.islnk():
  1691. _safe_print("link to " + tarinfo.linkname)
  1692. print()
  1693. def add(self, name, arcname=None, recursive=True, *, filter=None):
  1694. """Add the file `name' to the archive. `name' may be any type of file
  1695. (directory, fifo, symbolic link, etc.). If given, `arcname'
  1696. specifies an alternative name for the file in the archive.
  1697. Directories are added recursively by default. This can be avoided by
  1698. setting `recursive' to False. `filter' is a function
  1699. that expects a TarInfo object argument and returns the changed
  1700. TarInfo object, if it returns None the TarInfo object will be
  1701. excluded from the archive.
  1702. """
  1703. self._check("awx")
  1704. if arcname is None:
  1705. arcname = name
  1706. # Skip if somebody tries to archive the archive...
  1707. if self.name is not None and os.path.abspath(name) == self.name:
  1708. self._dbg(2, "tarfile: Skipped %r" % name)
  1709. return
  1710. self._dbg(1, name)
  1711. # Create a TarInfo object from the file.
  1712. tarinfo = self.gettarinfo(name, arcname)
  1713. if tarinfo is None:
  1714. self._dbg(1, "tarfile: Unsupported type %r" % name)
  1715. return
  1716. # Change or exclude the TarInfo object.
  1717. if filter is not None:
  1718. tarinfo = filter(tarinfo)
  1719. if tarinfo is None:
  1720. self._dbg(2, "tarfile: Excluded %r" % name)
  1721. return
  1722. # Append the tar header and data to the archive.
  1723. if tarinfo.isreg():
  1724. with bltn_open(name, "rb") as f:
  1725. self.addfile(tarinfo, f)
  1726. elif tarinfo.isdir():
  1727. self.addfile(tarinfo)
  1728. if recursive:
  1729. for f in sorted(os.listdir(name)):
  1730. self.add(os.path.join(name, f), os.path.join(arcname, f),
  1731. recursive, filter=filter)
  1732. else:
  1733. self.addfile(tarinfo)
  1734. def addfile(self, tarinfo, fileobj=None):
  1735. """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
  1736. given, it should be a binary file, and tarinfo.size bytes are read
  1737. from it and added to the archive. You can create TarInfo objects
  1738. directly, or by using gettarinfo().
  1739. """
  1740. self._check("awx")
  1741. tarinfo = copy.copy(tarinfo)
  1742. buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
  1743. self.fileobj.write(buf)
  1744. self.offset += len(buf)
  1745. bufsize=self.copybufsize
  1746. # If there's data to follow, append it.
  1747. if fileobj is not None:
  1748. copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
  1749. blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
  1750. if remainder > 0:
  1751. self.fileobj.write(NUL * (BLOCKSIZE - remainder))
  1752. blocks += 1
  1753. self.offset += blocks * BLOCKSIZE
  1754. self.members.append(tarinfo)
  1755. def extractall(self, path=".", members=None, *, numeric_owner=False):
  1756. """Extract all members from the archive to the current working
  1757. directory and set owner, modification time and permissions on
  1758. directories afterwards. `path' specifies a different directory
  1759. to extract to. `members' is optional and must be a subset of the
  1760. list returned by getmembers(). If `numeric_owner` is True, only
  1761. the numbers for user/group names are used and not the names.
  1762. """
  1763. directories = []
  1764. if members is None:
  1765. members = self
  1766. for tarinfo in members:
  1767. if tarinfo.isdir():
  1768. # Extract directories with a safe mode.
  1769. directories.append(tarinfo)
  1770. tarinfo = copy.copy(tarinfo)
  1771. tarinfo.mode = 0o700
  1772. # Do not set_attrs directories, as we will do that further down
  1773. self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
  1774. numeric_owner=numeric_owner)
  1775. # Reverse sort directories.
  1776. directories.sort(key=lambda a: a.name)
  1777. directories.reverse()
  1778. # Set correct owner, mtime and filemode on directories.
  1779. for tarinfo in directories:
  1780. dirpath = os.path.join(path, tarinfo.name)
  1781. try:
  1782. self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
  1783. self.utime(tarinfo, dirpath)
  1784. self.chmod(tarinfo, dirpath)
  1785. except ExtractError as e:
  1786. if self.errorlevel > 1:
  1787. raise
  1788. else:
  1789. self._dbg(1, "tarfile: %s" % e)
  1790. def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
  1791. """Extract a member from the archive to the current working directory,
  1792. using its full name. Its file information is extracted as accurately
  1793. as possible. `member' may be a filename or a TarInfo object. You can
  1794. specify a different directory using `path'. File attributes (owner,
  1795. mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
  1796. is True, only the numbers for user/group names are used and not
  1797. the names.
  1798. """
  1799. self._check("r")
  1800. if isinstance(member, str):
  1801. tarinfo = self.getmember(member)
  1802. else:
  1803. tarinfo = member
  1804. # Prepare the link target for makelink().
  1805. if tarinfo.islnk():
  1806. tarinfo._link_target = os.path.join(path, tarinfo.linkname)
  1807. try:
  1808. self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
  1809. set_attrs=set_attrs,
  1810. numeric_owner=numeric_owner)
  1811. except OSError as e:
  1812. if self.errorlevel > 0:
  1813. raise
  1814. else:
  1815. if e.filename is None:
  1816. self._dbg(1, "tarfile: %s" % e.strerror)
  1817. else:
  1818. self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
  1819. except ExtractError as e:
  1820. if self.errorlevel > 1:
  1821. raise
  1822. else:
  1823. self._dbg(1, "tarfile: %s" % e)
  1824. def extractfile(self, member):
  1825. """Extract a member from the archive as a file object. `member' may be
  1826. a filename or a TarInfo object. If `member' is a regular file or
  1827. a link, an io.BufferedReader object is returned. For all other
  1828. existing members, None is returned. If `member' does not appear
  1829. in the archive, KeyError is raised.
  1830. """
  1831. self._check("r")
  1832. if isinstance(member, str):
  1833. tarinfo = self.getmember(member)
  1834. else:
  1835. tarinfo = member
  1836. if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
  1837. # Members with unknown types are treated as regular files.
  1838. return self.fileobject(self, tarinfo)
  1839. elif tarinfo.islnk() or tarinfo.issym():
  1840. if isinstance(self.fileobj, _Stream):
  1841. # A small but ugly workaround for the case that someone tries
  1842. # to extract a (sym)link as a file-object from a non-seekable
  1843. # stream of tar blocks.
  1844. raise StreamError("cannot extract (sym)link as file object")
  1845. else:
  1846. # A (sym)link's file object is its target's file object.
  1847. return self.extractfile(self._find_link_target(tarinfo))
  1848. else:
  1849. # If there's no data associated with the member (directory, chrdev,
  1850. # blkdev, etc.), return None instead of a file object.
  1851. return None
  1852. def _extract_member(self, tarinfo, targetpath, set_attrs=True,
  1853. numeric_owner=False):
  1854. """Extract the TarInfo object tarinfo to a physical
  1855. file called targetpath.
  1856. """
  1857. # Fetch the TarInfo object for the given name
  1858. # and build the destination pathname, replacing
  1859. # forward slashes to platform specific separators.
  1860. targetpath = targetpath.rstrip("/")
  1861. targetpath = targetpath.replace("/", os.sep)
  1862. # Create all upper directories.
  1863. upperdirs = os.path.dirname(targetpath)
  1864. if upperdirs and not os.path.exists(upperdirs):
  1865. # Create directories that are not part of the archive with
  1866. # default permissions.
  1867. os.makedirs(upperdirs)
  1868. if tarinfo.islnk() or tarinfo.issym():
  1869. self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
  1870. else:
  1871. self._dbg(1, tarinfo.name)
  1872. if tarinfo.isreg():
  1873. self.makefile(tarinfo, targetpath)
  1874. elif tarinfo.isdir():
  1875. self.makedir(tarinfo, targetpath)
  1876. elif tarinfo.isfifo():
  1877. self.makefifo(tarinfo, targetpath)
  1878. elif tarinfo.ischr() or tarinfo.isblk():
  1879. self.makedev(tarinfo, targetpath)
  1880. elif tarinfo.islnk() or tarinfo.issym():
  1881. self.makelink(tarinfo, targetpath)
  1882. elif tarinfo.type not in SUPPORTED_TYPES:
  1883. self.makeunknown(tarinfo, targetpath)
  1884. else:
  1885. self.makefile(tarinfo, targetpath)
  1886. if set_attrs:
  1887. self.chown(tarinfo, targetpath, numeric_owner)
  1888. if not tarinfo.issym():
  1889. self.chmod(tarinfo, targetpath)
  1890. self.utime(tarinfo, targetpath)
  1891. #--------------------------------------------------------------------------
  1892. # Below are the different file methods. They are called via
  1893. # _extract_member() when extract() is called. They can be replaced in a
  1894. # subclass to implement other functionality.
  1895. def makedir(self, tarinfo, targetpath):
  1896. """Make a directory called targetpath.
  1897. """
  1898. try:
  1899. # Use a safe mode for the directory, the real mode is set
  1900. # later in _extract_member().
  1901. os.mkdir(targetpath, 0o700)
  1902. except FileExistsError:
  1903. pass
  1904. def makefile(self, tarinfo, targetpath):
  1905. """Make a file called targetpath.
  1906. """
  1907. source = self.fileobj
  1908. source.seek(tarinfo.offset_data)
  1909. bufsize = self.copybufsize
  1910. with bltn_open(targetpath, "wb") as target:
  1911. if tarinfo.sparse is not None:
  1912. for offset, size in tarinfo.sparse:
  1913. target.seek(offset)
  1914. copyfileobj(source, target, size, ReadError, bufsize)
  1915. target.seek(tarinfo.size)
  1916. target.truncate()
  1917. else:
  1918. copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
  1919. def makeunknown(self, tarinfo, targetpath):
  1920. """Make a file from a TarInfo object with an unknown type
  1921. at targetpath.
  1922. """
  1923. self.makefile(tarinfo, targetpath)
  1924. self._dbg(1, "tarfile: Unknown file type %r, " \
  1925. "extracted as regular file." % tarinfo.type)
  1926. def makefifo(self, tarinfo, targetpath):
  1927. """Make a fifo called targetpath.
  1928. """
  1929. if hasattr(os, "mkfifo"):
  1930. os.mkfifo(targetpath)
  1931. else:
  1932. raise ExtractError("fifo not supported by system")
  1933. def makedev(self, tarinfo, targetpath):
  1934. """Make a character or block device called targetpath.
  1935. """
  1936. if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
  1937. raise ExtractError("special devices not supported by system")
  1938. mode = tarinfo.mode
  1939. if tarinfo.isblk():
  1940. mode |= stat.S_IFBLK
  1941. else:
  1942. mode |= stat.S_IFCHR
  1943. os.mknod(targetpath, mode,
  1944. os.makedev(tarinfo.devmajor, tarinfo.devminor))
  1945. def makelink(self, tarinfo, targetpath):
  1946. """Make a (symbolic) link called targetpath. If it cannot be created
  1947. (platform limitation), we try to make a copy of the referenced file
  1948. instead of a link.
  1949. """
  1950. try:
  1951. # For systems that support symbolic and hard links.
  1952. if tarinfo.issym():
  1953. if os.path.lexists(targetpath):
  1954. # Avoid FileExistsError on following os.symlink.
  1955. os.unlink(targetpath)
  1956. os.symlink(tarinfo.linkname, targetpath)
  1957. else:
  1958. # See extract().
  1959. if os.path.exists(tarinfo._link_target):
  1960. os.link(tarinfo._link_target, targetpath)
  1961. else:
  1962. self._extract_member(self._find_link_target(tarinfo),
  1963. targetpath)
  1964. except symlink_exception:
  1965. try:
  1966. self._extract_member(self._find_link_target(tarinfo),
  1967. targetpath)
  1968. except KeyError:
  1969. raise ExtractError("unable to resolve link inside archive") from None
  1970. def chown(self, tarinfo, targetpath, numeric_owner):
  1971. """Set owner of targetpath according to tarinfo. If numeric_owner
  1972. is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
  1973. is False, fall back to .gid/.uid when the search based on name
  1974. fails.
  1975. """
  1976. if hasattr(os, "geteuid") and os.geteuid() == 0:
  1977. # We have to be root to do so.
  1978. g = tarinfo.gid
  1979. u = tarinfo.uid
  1980. if not numeric_owner:
  1981. try:
  1982. if grp:
  1983. g = grp.getgrnam(tarinfo.gname)[2]
  1984. except KeyError:
  1985. pass
  1986. try:
  1987. if pwd:
  1988. u = pwd.getpwnam(tarinfo.uname)[2]
  1989. except KeyError:
  1990. pass
  1991. try:
  1992. if tarinfo.issym() and hasattr(os, "lchown"):
  1993. os.lchown(targetpath, u, g)
  1994. else:
  1995. os.chown(targetpath, u, g)
  1996. except OSError as e:
  1997. raise ExtractError("could not change owner") from e
  1998. def chmod(self, tarinfo, targetpath):
  1999. """Set file permissions of targetpath according to tarinfo.
  2000. """
  2001. try:
  2002. os.chmod(targetpath, tarinfo.mode)
  2003. except OSError as e:
  2004. raise ExtractError("could not change mode") from e
  2005. def utime(self, tarinfo, targetpath):
  2006. """Set modification time of targetpath according to tarinfo.
  2007. """
  2008. if not hasattr(os, 'utime'):
  2009. return
  2010. try:
  2011. os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
  2012. except OSError as e:
  2013. raise ExtractError("could not change modification time") from e
  2014. #--------------------------------------------------------------------------
  2015. def next(self):
  2016. """Return the next member of the archive as a TarInfo object, when
  2017. TarFile is opened for reading. Return None if there is no more
  2018. available.
  2019. """
  2020. self._check("ra")
  2021. if self.firstmember is not None:
  2022. m = self.firstmember
  2023. self.firstmember = None
  2024. return m
  2025. # Advance the file pointer.
  2026. if self.offset != self.fileobj.tell():
  2027. self.fileobj.seek(self.offset - 1)
  2028. if not self.fileobj.read(1):
  2029. raise ReadError("unexpected end of data")
  2030. # Read the next block.
  2031. tarinfo = None
  2032. while True:
  2033. try:
  2034. tarinfo = self.tarinfo.fromtarfile(self)
  2035. except EOFHeaderError as e:
  2036. if self.ignore_zeros:
  2037. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2038. self.offset += BLOCKSIZE
  2039. continue
  2040. except InvalidHeaderError as e:
  2041. if self.ignore_zeros:
  2042. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2043. self.offset += BLOCKSIZE
  2044. continue
  2045. elif self.offset == 0:
  2046. raise ReadError(str(e)) from None
  2047. except EmptyHeaderError:
  2048. if self.offset == 0:
  2049. raise ReadError("empty file") from None
  2050. except TruncatedHeaderError as e:
  2051. if self.offset == 0:
  2052. raise ReadError(str(e)) from None
  2053. except SubsequentHeaderError as e:
  2054. raise ReadError(str(e)) from None
  2055. break
  2056. if tarinfo is not None:
  2057. self.members.append(tarinfo)
  2058. else:
  2059. self._loaded = True
  2060. return tarinfo
  2061. #--------------------------------------------------------------------------
  2062. # Little helper methods:
  2063. def _getmember(self, name, tarinfo=None, normalize=False):
  2064. """Find an archive member by name from bottom to top.
  2065. If tarinfo is given, it is used as the starting point.
  2066. """
  2067. # Ensure that all members have been loaded.
  2068. members = self.getmembers()
  2069. # Limit the member search list up to tarinfo.
  2070. if tarinfo is not None:
  2071. members = members[:members.index(tarinfo)]
  2072. if normalize:
  2073. name = os.path.normpath(name)
  2074. for member in reversed(members):
  2075. if normalize:
  2076. member_name = os.path.normpath(member.name)
  2077. else:
  2078. member_name = member.name
  2079. if name == member_name:
  2080. return member
  2081. def _load(self):
  2082. """Read through the entire archive file and look for readable
  2083. members.
  2084. """
  2085. while True:
  2086. tarinfo = self.next()
  2087. if tarinfo is None:
  2088. break
  2089. self._loaded = True
  2090. def _check(self, mode=None):
  2091. """Check if TarFile is still open, and if the operation's mode
  2092. corresponds to TarFile's mode.
  2093. """
  2094. if self.closed:
  2095. raise OSError("%s is closed" % self.__class__.__name__)
  2096. if mode is not None and self.mode not in mode:
  2097. raise OSError("bad operation for mode %r" % self.mode)
  2098. def _find_link_target(self, tarinfo):
  2099. """Find the target member of a symlink or hardlink member in the
  2100. archive.
  2101. """
  2102. if tarinfo.issym():
  2103. # Always search the entire archive.
  2104. linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
  2105. limit = None
  2106. else:
  2107. # Search the archive before the link, because a hard link is
  2108. # just a reference to an already archived file.
  2109. linkname = tarinfo.linkname
  2110. limit = tarinfo
  2111. member = self._getmember(linkname, tarinfo=limit, normalize=True)
  2112. if member is None:
  2113. raise KeyError("linkname %r not found" % linkname)
  2114. return member
  2115. def __iter__(self):
  2116. """Provide an iterator object.
  2117. """
  2118. if self._loaded:
  2119. yield from self.members
  2120. return
  2121. # Yield items using TarFile's next() method.
  2122. # When all members have been read, set TarFile as _loaded.
  2123. index = 0
  2124. # Fix for SF #1100429: Under rare circumstances it can
  2125. # happen that getmembers() is called during iteration,
  2126. # which will have already exhausted the next() method.
  2127. if self.firstmember is not None:
  2128. tarinfo = self.next()
  2129. index += 1
  2130. yield tarinfo
  2131. while True:
  2132. if index < len(self.members):
  2133. tarinfo = self.members[index]
  2134. elif not self._loaded:
  2135. tarinfo = self.next()
  2136. if not tarinfo:
  2137. self._loaded = True
  2138. return
  2139. else:
  2140. return
  2141. index += 1
  2142. yield tarinfo
  2143. def _dbg(self, level, msg):
  2144. """Write debugging output to sys.stderr.
  2145. """
  2146. if level <= self.debug:
  2147. print(msg, file=sys.stderr)
  2148. def __enter__(self):
  2149. self._check()
  2150. return self
  2151. def __exit__(self, type, value, traceback):
  2152. if type is None:
  2153. self.close()
  2154. else:
  2155. # An exception occurred. We must not call close() because
  2156. # it would try to write end-of-archive blocks and padding.
  2157. if not self._extfileobj:
  2158. self.fileobj.close()
  2159. self.closed = True
  2160. #--------------------
  2161. # exported functions
  2162. #--------------------
  2163. def is_tarfile(name):
  2164. """Return True if name points to a tar archive that we
  2165. are able to handle, else return False.
  2166. 'name' should be a string, file, or file-like object.
  2167. """
  2168. try:
  2169. if hasattr(name, "read"):
  2170. t = open(fileobj=name)
  2171. else:
  2172. t = open(name)
  2173. t.close()
  2174. return True
  2175. except TarError:
  2176. return False
  2177. open = TarFile.open
  2178. def main():
  2179. import argparse
  2180. description = 'A simple command-line interface for tarfile module.'
  2181. parser = argparse.ArgumentParser(description=description)
  2182. parser.add_argument('-v', '--verbose', action='store_true', default=False,
  2183. help='Verbose output')
  2184. group = parser.add_mutually_exclusive_group(required=True)
  2185. group.add_argument('-l', '--list', metavar='<tarfile>',
  2186. help='Show listing of a tarfile')
  2187. group.add_argument('-e', '--extract', nargs='+',
  2188. metavar=('<tarfile>', '<output_dir>'),
  2189. help='Extract tarfile into target dir')
  2190. group.add_argument('-c', '--create', nargs='+',
  2191. metavar=('<name>', '<file>'),
  2192. help='Create tarfile from sources')
  2193. group.add_argument('-t', '--test', metavar='<tarfile>',
  2194. help='Test if a tarfile is valid')
  2195. args = parser.parse_args()
  2196. if args.test is not None:
  2197. src = args.test
  2198. if is_tarfile(src):
  2199. with open(src, 'r') as tar:
  2200. tar.getmembers()
  2201. print(tar.getmembers(), file=sys.stderr)
  2202. if args.verbose:
  2203. print('{!r} is a tar archive.'.format(src))
  2204. else:
  2205. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2206. elif args.list is not None:
  2207. src = args.list
  2208. if is_tarfile(src):
  2209. with TarFile.open(src, 'r:*') as tf:
  2210. tf.list(verbose=args.verbose)
  2211. else:
  2212. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2213. elif args.extract is not None:
  2214. if len(args.extract) == 1:
  2215. src = args.extract[0]
  2216. curdir = os.curdir
  2217. elif len(args.extract) == 2:
  2218. src, curdir = args.extract
  2219. else:
  2220. parser.exit(1, parser.format_help())
  2221. if is_tarfile(src):
  2222. with TarFile.open(src, 'r:*') as tf:
  2223. tf.extractall(path=curdir)
  2224. if args.verbose:
  2225. if curdir == '.':
  2226. msg = '{!r} file is extracted.'.format(src)
  2227. else:
  2228. msg = ('{!r} file is extracted '
  2229. 'into {!r} directory.').format(src, curdir)
  2230. print(msg)
  2231. else:
  2232. parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
  2233. elif args.create is not None:
  2234. tar_name = args.create.pop(0)
  2235. _, ext = os.path.splitext(tar_name)
  2236. compressions = {
  2237. # gz
  2238. '.gz': 'gz',
  2239. '.tgz': 'gz',
  2240. # xz
  2241. '.xz': 'xz',
  2242. '.txz': 'xz',
  2243. # bz2
  2244. '.bz2': 'bz2',
  2245. '.tbz': 'bz2',
  2246. '.tbz2': 'bz2',
  2247. '.tb2': 'bz2',
  2248. }
  2249. tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
  2250. tar_files = args.create
  2251. with TarFile.open(tar_name, tar_mode) as tf:
  2252. for file_name in tar_files:
  2253. tf.add(file_name)
  2254. if args.verbose:
  2255. print('{!r} file created.'.format(tar_name))
  2256. if __name__ == '__main__':
  2257. main()