Skip to content

Commit 2f72807

Browse files
committed
Small update
1 parent 5b7e5c4 commit 2f72807

1 file changed

Lines changed: 277 additions & 0 deletions

File tree

pyneofile/pyneofile.py

Lines changed: 277 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
import hashlib
3838
import inspect
3939
import tempfile
40+
import libarchive
4041
import configparser
4142
from io import open, StringIO, BytesIO
4243
from decimal import Decimal, ROUND_HALF_UP
@@ -6535,6 +6536,282 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
65356536
pass
65366537
return fp
65376538

6539+
def _is_pathlike(x):
6540+
return isinstance(x, (str, bytes, os.PathLike))
6541+
6542+
6543+
def open_archive_reader(infile):
6544+
"""
6545+
Returns a context manager that yields a libarchive reader.
6546+
6547+
Supports:
6548+
- path-like: libarchive.file_reader(path)
6549+
- file object with fileno(): libarchive.fd_reader(fd)
6550+
- bytes / bytearray / memoryview / io.BytesIO: libarchive.memory_reader(data)
6551+
(falls back to a temp file if memory_reader isn't available)
6552+
"""
6553+
# 1) Path
6554+
if _is_pathlike(infile):
6555+
return libarchive.file_reader(infile)
6556+
6557+
# 2) BytesIO or any bytes-like object
6558+
data = None
6559+
if isinstance(infile, io.BytesIO):
6560+
data = infile.getvalue()
6561+
elif isinstance(infile, (bytes, bytearray, memoryview)):
6562+
data = bytes(infile)
6563+
6564+
if data is not None:
6565+
# Preferred: in-memory reader (if available)
6566+
mem_reader = getattr(libarchive, "memory_reader", None)
6567+
if callable(mem_reader):
6568+
return mem_reader(data)
6569+
6570+
# Fallback: spill to temp file (still works everywhere)
6571+
# Note: caller doesn't need to manage cleanup; NamedTemporaryFile is a context manager,
6572+
# but libarchive.file_reader expects a filename. We'll implement a tiny CM wrapper.
6573+
class _TempFileReaderCM:
6574+
def __init__(self, blob):
6575+
self._blob = blob
6576+
self._tmp = None
6577+
self._cm = None
6578+
self._archive = None
6579+
6580+
def __enter__(self):
6581+
self._tmp = tempfile.NamedTemporaryFile(delete=False)
6582+
try:
6583+
self._tmp.write(self._blob)
6584+
self._tmp.flush()
6585+
self._tmp.close()
6586+
self._cm = libarchive.file_reader(self._tmp.name)
6587+
self._archive = self._cm.__enter__()
6588+
return self._archive
6589+
except Exception:
6590+
self.__exit__(None, None, None)
6591+
raise
6592+
6593+
def __exit__(self, exc_type, exc, tb):
6594+
try:
6595+
if self._cm is not None:
6596+
self._cm.__exit__(exc_type, exc, tb)
6597+
finally:
6598+
if self._tmp is not None:
6599+
try:
6600+
os.unlink(self._tmp.name)
6601+
except OSError:
6602+
pass
6603+
6604+
return _TempFileReaderCM(data)
6605+
6606+
# 3) File object with fileno()
6607+
if hasattr(infile, "read") and hasattr(infile, "fileno"):
6608+
return libarchive.fd_reader(infile.fileno())
6609+
6610+
raise TypeError(f"Unsupported infile type: {type(infile)!r}")
6611+
6612+
6613+
def AppendFilesWithContentFromBSDTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6614+
curinode = 0
6615+
curfid = 0
6616+
inodelist = []
6617+
inodetofile = {}
6618+
filetoinode = {}
6619+
inodetoforminode = {}
6620+
if(isinstance(infile, (list, tuple, ))):
6621+
infile = infile[0]
6622+
if(infile == "-"):
6623+
infile = MkTempFile()
6624+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6625+
infile.seek(0, 0)
6626+
if(not infile):
6627+
return False
6628+
infile.seek(0, 0)
6629+
elif(re.findall(__download_proto_support__, infile) and pywwwget):
6630+
infile = download_file_from_internet_file(infile)
6631+
infile.seek(0, 0)
6632+
if(not infile):
6633+
return False
6634+
infile.seek(0, 0)
6635+
elif(hasattr(infile, "read") or hasattr(infile, "write")):
6636+
try:
6637+
if(not tarfile.is_tarfile(infile)):
6638+
return False
6639+
except AttributeError:
6640+
if(not TarFileCheck(infile)):
6641+
return False
6642+
elif(not os.path.exists(infile) or not os.path.isfile(infile)):
6643+
return False
6644+
tmpoutlist = []
6645+
with open_archive_reader(infile) as archive:
6646+
for member in archive:
6647+
fencoding = "UTF-8"
6648+
fname = member.pathname
6649+
if(verbose):
6650+
VerbosePrintOut(fname)
6651+
fpremode = member.mode
6652+
ffullmode = member.mode
6653+
flinkcount = 0
6654+
fblksize = format(int(0), 'x').lower()
6655+
fblocks = format(int(0), 'x').lower()
6656+
fflags = format(int(0), 'x').lower()
6657+
ftype = 0
6658+
if(member.isreg or member.isfile):
6659+
ffullmode = member.mode | stat.S_IFREG
6660+
ftype = 0
6661+
elif(member.islnk):
6662+
ffullmode = member.mode | stat.S_IFREG
6663+
ftype = 1
6664+
elif(member.issym):
6665+
ffullmode = member.mode | stat.S_IFLNK
6666+
ftype = 2
6667+
elif(member.ischr):
6668+
ffullmode = member.mode | stat.S_IFCHR
6669+
ftype = 3
6670+
elif(member.isblk):
6671+
ffullmode = member.mode | stat.S_IFBLK
6672+
ftype = 4
6673+
elif(member.isdir):
6674+
ffullmode = member.mode | stat.S_IFDIR
6675+
ftype = 5
6676+
elif(member.isfifo):
6677+
ffullmode = member.mode | stat.S_IFIFO
6678+
ftype = 6
6679+
elif(hasattr(member, "issparse") and member.issparse):
6680+
ffullmode = member.mode | stat.S_IFREG
6681+
ftype = 12
6682+
elif(member.isdev()):
6683+
ffullmode = member.mode
6684+
ftype = 14
6685+
else:
6686+
ffullmode = member.mode | stat.S_IFREG
6687+
ftype = 0
6688+
flinkname = ""
6689+
fcurfid = format(int(curfid), 'x').lower()
6690+
fcurinode = format(int(curfid), 'x').lower()
6691+
curfid = curfid + 1
6692+
if(ftype == 2):
6693+
flinkname = member.linkname
6694+
fdev = format(int("0"), 'x').lower()
6695+
frdev = format(int(member.rdev), 'x').lower()
6696+
# Types that should be considered zero-length in the archive context:
6697+
zero_length_types = {1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 13}
6698+
# Types that have actual data to read:
6699+
data_types = {0, 7}
6700+
sparse_types = {12}
6701+
if ftype in zero_length_types:
6702+
fsize = format(int("0"), 'x').lower()
6703+
elif ftype in data_types:
6704+
fsize = format(int(member.size), 'x').lower()
6705+
else:
6706+
fsize = format(int(member.size), 'x').lower()
6707+
if(hasattr(member, "atime") and member.atime is not None):
6708+
fatime = format(int(to_ns(member.atime)), 'x').lower()
6709+
else:
6710+
fatime = format(int(to_ns(member.mtime)), 'x').lower()
6711+
fmtime = format(int(to_ns(member.mtime)), 'x').lower()
6712+
if(hasattr(member, "ctime") and member.ctime is not None):
6713+
fctime = format(int(to_ns(member.ctime)), 'x').lower()
6714+
else:
6715+
fctime = format(int(to_ns(member.mtime)), 'x').lower()
6716+
if(hasattr(member, "birthtime") and member.birthtime is not None):
6717+
fbtime = format(int(to_ns(member.birthtime)), 'x').lower()
6718+
else:
6719+
fbtime = format(int(to_ns(member.mtime)), 'x').lower()
6720+
fmode = format(int(ffullmode), 'x').lower()
6721+
fchmode = format(int(stat.S_IMODE(ffullmode)), 'x').lower()
6722+
ftypemod = format(int(stat.S_IFMT(ffullmode)), 'x').lower()
6723+
fuid = format(int(member.uid), 'x').lower()
6724+
fgid = format(int(member.gid), 'x').lower()
6725+
funame = member.uname
6726+
fgname = member.gname
6727+
flinkcount = format(int(flinkcount), 'x').lower()
6728+
fwinattributes = format(int(0), 'x').lower()
6729+
fcompression = ""
6730+
fcsize = format(int(0), 'x').lower()
6731+
fcontents = MkTempFile()
6732+
fcencoding = "UTF-8"
6733+
curcompression = "none"
6734+
if ftype in data_types:
6735+
for block in member.get_blocks():
6736+
fcontents.write(block)
6737+
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6738+
fcontents.seek(0, 0)
6739+
if(typechecktest is not False):
6740+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
6741+
fcontents.seek(0, 0)
6742+
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
6743+
if(typechecktest is False and not compresswholefile):
6744+
fcontents.seek(0, 2)
6745+
ucfsize = fcontents.tell()
6746+
fcontents.seek(0, 0)
6747+
if(compression == "auto"):
6748+
ilsize = len(compressionuselist)
6749+
ilmin = 0
6750+
ilcsize = []
6751+
while(ilmin < ilsize):
6752+
cfcontents = MkTempFile()
6753+
fcontents.seek(0, 0)
6754+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6755+
fcontents.seek(0, 0)
6756+
cfcontents.seek(0, 0)
6757+
cfcontents = CompressOpenFileAlt(
6758+
cfcontents, compressionuselist[ilmin], compressionlevel, compressionuselist, formatspecs)
6759+
if(cfcontents):
6760+
cfcontents.seek(0, 2)
6761+
ilcsize.append(cfcontents.tell())
6762+
cfcontents.close()
6763+
else:
6764+
ilcsize.append(float("inf"))
6765+
ilmin = ilmin + 1
6766+
ilcmin = ilcsize.index(min(ilcsize))
6767+
curcompression = compressionuselist[ilcmin]
6768+
fcontents.seek(0, 0)
6769+
cfcontents = MkTempFile()
6770+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6771+
cfcontents.seek(0, 0)
6772+
cfcontents = CompressOpenFileAlt(
6773+
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
6774+
cfcontents.seek(0, 2)
6775+
cfsize = cfcontents.tell()
6776+
if(ucfsize > cfsize):
6777+
fcsize = format(int(cfsize), 'x').lower()
6778+
fcompression = curcompression
6779+
fcontents.close()
6780+
fcontents = cfcontents
6781+
if(fcompression == "none"):
6782+
fcompression = ""
6783+
fcontents.seek(0, 0)
6784+
if(not contentasfile):
6785+
fcontents = fcontents.read()
6786+
ftypehex = format(ftype, 'x').lower()
6787+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
6788+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
6789+
return tmpoutlist
6790+
6791+
def AppendFilesWithContentFromBSDTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
6792+
if(not hasattr(fp, "write")):
6793+
return False
6794+
GetDirList = AppendFilesWithContentFromBSDTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
6795+
numfiles = int(len(GetDirList))
6796+
fnumfiles = format(numfiles, 'x').lower()
6797+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
6798+
try:
6799+
fp.flush()
6800+
if(hasattr(os, "sync")):
6801+
os.fsync(fp.fileno())
6802+
except (io.UnsupportedOperation, AttributeError, OSError):
6803+
pass
6804+
for curfname in GetDirList:
6805+
tmpoutlist = curfname['fheaders']
6806+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
6807+
try:
6808+
fp.flush()
6809+
if(hasattr(os, "sync")):
6810+
os.fsync(fp.fileno())
6811+
except (io.UnsupportedOperation, AttributeError, OSError):
6812+
pass
6813+
return fp
6814+
65386815
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
65396816
curinode = 0
65406817
curfid = 0

0 commit comments

Comments
 (0)