Skip to content

Commit 1ee07aa

Browse files
committed
Small update
1 parent 8b2008c commit 1ee07aa

1 file changed

Lines changed: 277 additions & 0 deletions

File tree

pyfoxfile/pyfoxfile.py

Lines changed: 277 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
import hashlib
3838
import inspect
3939
import tempfile
40+
import libarchive
4041
import configparser
4142
from io import open, StringIO, BytesIO
4243
from decimal import Decimal, ROUND_HALF_UP
@@ -6542,6 +6543,282 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
65426543
pass
65436544
return fp
65446545

6546+
def _is_pathlike(x):
6547+
return isinstance(x, (str, bytes, os.PathLike))
6548+
6549+
6550+
def open_archive_reader(infile):
6551+
"""
6552+
Returns a context manager that yields a libarchive reader.
6553+
6554+
Supports:
6555+
- path-like: libarchive.file_reader(path)
6556+
- file object with fileno(): libarchive.fd_reader(fd)
6557+
- bytes / bytearray / memoryview / io.BytesIO: libarchive.memory_reader(data)
6558+
(falls back to a temp file if memory_reader isn't available)
6559+
"""
6560+
# 1) Path
6561+
if _is_pathlike(infile):
6562+
return libarchive.file_reader(infile)
6563+
6564+
# 2) BytesIO or any bytes-like object
6565+
data = None
6566+
if isinstance(infile, io.BytesIO):
6567+
data = infile.getvalue()
6568+
elif isinstance(infile, (bytes, bytearray, memoryview)):
6569+
data = bytes(infile)
6570+
6571+
if data is not None:
6572+
# Preferred: in-memory reader (if available)
6573+
mem_reader = getattr(libarchive, "memory_reader", None)
6574+
if callable(mem_reader):
6575+
return mem_reader(data)
6576+
6577+
# Fallback: spill to temp file (still works everywhere)
6578+
# Note: caller doesn't need to manage cleanup; NamedTemporaryFile is a context manager,
6579+
# but libarchive.file_reader expects a filename. We'll implement a tiny CM wrapper.
6580+
class _TempFileReaderCM:
6581+
def __init__(self, blob):
6582+
self._blob = blob
6583+
self._tmp = None
6584+
self._cm = None
6585+
self._archive = None
6586+
6587+
def __enter__(self):
6588+
self._tmp = tempfile.NamedTemporaryFile(delete=False)
6589+
try:
6590+
self._tmp.write(self._blob)
6591+
self._tmp.flush()
6592+
self._tmp.close()
6593+
self._cm = libarchive.file_reader(self._tmp.name)
6594+
self._archive = self._cm.__enter__()
6595+
return self._archive
6596+
except Exception:
6597+
self.__exit__(None, None, None)
6598+
raise
6599+
6600+
def __exit__(self, exc_type, exc, tb):
6601+
try:
6602+
if self._cm is not None:
6603+
self._cm.__exit__(exc_type, exc, tb)
6604+
finally:
6605+
if self._tmp is not None:
6606+
try:
6607+
os.unlink(self._tmp.name)
6608+
except OSError:
6609+
pass
6610+
6611+
return _TempFileReaderCM(data)
6612+
6613+
# 3) File object with fileno()
6614+
if hasattr(infile, "read") and hasattr(infile, "fileno"):
6615+
return libarchive.fd_reader(infile.fileno())
6616+
6617+
raise TypeError(f"Unsupported infile type: {type(infile)!r}")
6618+
6619+
6620+
def AppendFilesWithContentFromBSDTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6621+
curinode = 0
6622+
curfid = 0
6623+
inodelist = []
6624+
inodetofile = {}
6625+
filetoinode = {}
6626+
inodetoforminode = {}
6627+
if(isinstance(infile, (list, tuple, ))):
6628+
infile = infile[0]
6629+
if(infile == "-"):
6630+
infile = MkTempFile()
6631+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6632+
infile.seek(0, 0)
6633+
if(not infile):
6634+
return False
6635+
infile.seek(0, 0)
6636+
elif(re.findall(__download_proto_support__, infile) and pywwwget):
6637+
infile = download_file_from_internet_file(infile)
6638+
infile.seek(0, 0)
6639+
if(not infile):
6640+
return False
6641+
infile.seek(0, 0)
6642+
elif(hasattr(infile, "read") or hasattr(infile, "write")):
6643+
try:
6644+
if(not tarfile.is_tarfile(infile)):
6645+
return False
6646+
except AttributeError:
6647+
if(not TarFileCheck(infile)):
6648+
return False
6649+
elif(not os.path.exists(infile) or not os.path.isfile(infile)):
6650+
return False
6651+
tmpoutlist = []
6652+
with open_archive_reader(infile) as archive:
6653+
for member in archive:
6654+
fencoding = "UTF-8"
6655+
fname = member.pathname
6656+
if(verbose):
6657+
VerbosePrintOut(fname)
6658+
fpremode = member.mode
6659+
ffullmode = member.mode
6660+
flinkcount = 0
6661+
fblksize = format(int(0), 'x').lower()
6662+
fblocks = format(int(0), 'x').lower()
6663+
fflags = format(int(0), 'x').lower()
6664+
ftype = 0
6665+
if(member.isreg or member.isfile):
6666+
ffullmode = member.mode | stat.S_IFREG
6667+
ftype = 0
6668+
elif(member.islnk):
6669+
ffullmode = member.mode | stat.S_IFREG
6670+
ftype = 1
6671+
elif(member.issym):
6672+
ffullmode = member.mode | stat.S_IFLNK
6673+
ftype = 2
6674+
elif(member.ischr):
6675+
ffullmode = member.mode | stat.S_IFCHR
6676+
ftype = 3
6677+
elif(member.isblk):
6678+
ffullmode = member.mode | stat.S_IFBLK
6679+
ftype = 4
6680+
elif(member.isdir):
6681+
ffullmode = member.mode | stat.S_IFDIR
6682+
ftype = 5
6683+
elif(member.isfifo):
6684+
ffullmode = member.mode | stat.S_IFIFO
6685+
ftype = 6
6686+
elif(hasattr(member, "issparse") and member.issparse):
6687+
ffullmode = member.mode | stat.S_IFREG
6688+
ftype = 12
6689+
elif(member.isdev()):
6690+
ffullmode = member.mode
6691+
ftype = 14
6692+
else:
6693+
ffullmode = member.mode | stat.S_IFREG
6694+
ftype = 0
6695+
flinkname = ""
6696+
fcurfid = format(int(curfid), 'x').lower()
6697+
fcurinode = format(int(curfid), 'x').lower()
6698+
curfid = curfid + 1
6699+
if(ftype == 2):
6700+
flinkname = member.linkname
6701+
fdev = format(int("0"), 'x').lower()
6702+
frdev = format(int(member.rdev), 'x').lower()
6703+
# Types that should be considered zero-length in the archive context:
6704+
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
6705+
# Types that have actual data to read:
6706+
data_types = {0, 7}
6707+
sparse_types = {12}
6708+
if ftype in zero_length_types:
6709+
fsize = format(int("0"), 'x').lower()
6710+
elif ftype in data_types:
6711+
fsize = format(int(member.size), 'x').lower()
6712+
else:
6713+
fsize = format(int(member.size), 'x').lower()
6714+
if(hasattr(member, "atime") and member.atime is not None):
6715+
fatime = format(int(to_ns(member.atime)), 'x').lower()
6716+
else:
6717+
fatime = format(int(to_ns(member.mtime)), 'x').lower()
6718+
fmtime = format(int(to_ns(member.mtime)), 'x').lower()
6719+
if(hasattr(member, "ctime") and member.ctime is not None):
6720+
fctime = format(int(to_ns(member.ctime)), 'x').lower()
6721+
else:
6722+
fctime = format(int(to_ns(member.mtime)), 'x').lower()
6723+
if(hasattr(member, "birthtime") and member.birthtime is not None):
6724+
fbtime = format(int(to_ns(member.birthtime)), 'x').lower()
6725+
else:
6726+
fbtime = format(int(to_ns(member.mtime)), 'x').lower()
6727+
fmode = format(int(ffullmode), 'x').lower()
6728+
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
6729+
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
6730+
fuid = format(int(member.uid), 'x').lower()
6731+
fgid = format(int(member.gid), 'x').lower()
6732+
funame = member.uname
6733+
fgname = member.gname
6734+
flinkcount = format(int(flinkcount), 'x').lower()
6735+
fwinattributes = format(int(0), 'x').lower()
6736+
fcompression = ""
6737+
fcsize = format(int(0), 'x').lower()
6738+
fcontents = MkTempFile()
6739+
fcencoding = "UTF-8"
6740+
curcompression = "none"
6741+
if ftype in data_types:
6742+
for block in member.get_blocks():
6743+
fcontents.write(block)
6744+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6745+
fcontents.seek(0, 0)
6746+
if(typechecktest is not False):
6747+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6748+
fcontents.seek(0, 0)
6749+
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6750+
if(typechecktest is False and not compresswholefile):
6751+
fcontents.seek(0, 2)
6752+
ucfsize = fcontents.tell()
6753+
fcontents.seek(0, 0)
6754+
if(compression == "auto"):
6755+
ilsize = len(compressionuselist)
6756+
ilmin = 0
6757+
ilcsize = []
6758+
while(ilmin < ilsize):
6759+
cfcontents = MkTempFile()
6760+
fcontents.seek(0, 0)
6761+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6762+
fcontents.seek(0, 0)
6763+
cfcontents.seek(0, 0)
6764+
cfcontents = CompressOpenFileAlt(
6765+
cfcontents, compressionuselist[ilmin], compressionlevel, compressionuselist, formatspecs)
6766+
if(cfcontents):
6767+
cfcontents.seek(0, 2)
6768+
ilcsize.append(cfcontents.tell())
6769+
cfcontents.close()
6770+
else:
6771+
ilcsize.append(float("inf"))
6772+
ilmin = ilmin + 1
6773+
ilcmin = ilcsize.index(min(ilcsize))
6774+
curcompression = compressionuselist[ilcmin]
6775+
fcontents.seek(0, 0)
6776+
cfcontents = MkTempFile()
6777+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6778+
cfcontents.seek(0, 0)
6779+
cfcontents = CompressOpenFileAlt(
6780+
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
6781+
cfcontents.seek(0, 2)
6782+
cfsize = cfcontents.tell()
6783+
if(ucfsize > cfsize):
6784+
fcsize = format(int(cfsize), 'x').lower()
6785+
fcompression = curcompression
6786+
fcontents.close()
6787+
fcontents = cfcontents
6788+
if(fcompression == "none"):
6789+
fcompression = ""
6790+
fcontents.seek(0, 0)
6791+
if(not contentasfile):
6792+
fcontents = fcontents.read()
6793+
ftypehex = format(ftype, 'x').lower()
6794+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6795+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
6796+
return tmpoutlist
6797+
6798+
def AppendFilesWithContentFromBSDTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6799+
if(not hasattr(fp, "write")):
6800+
return False
6801+
GetDirList = AppendFilesWithContentFromBSDTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
6802+
numfiles = int(len(GetDirList))
6803+
fnumfiles = format(numfiles, 'x').lower()
6804+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6805+
try:
6806+
fp.flush()
6807+
if(hasattr(os, "sync")):
6808+
os.fsync(fp.fileno())
6809+
except (io.UnsupportedOperation, AttributeError, OSError):
6810+
pass
6811+
for curfname in GetDirList:
6812+
tmpoutlist = curfname['fheaders']
6813+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6814+
try:
6815+
fp.flush()
6816+
if(hasattr(os, "sync")):
6817+
os.fsync(fp.fileno())
6818+
except (io.UnsupportedOperation, AttributeError, OSError):
6819+
pass
6820+
return fp
6821+
65456822
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
65466823
curinode = 0
65476824
curfid = 0

0 commit comments

Comments
 (0)