@@ -8035,6 +8035,126 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
80358035 fp.close()
80368036 return True
80378037
8038+ def AppendReadInFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8039+ return ReadInFileWithContentToList(infile, "auto", 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8040+
8041+ def AppendReadInMultipleFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8042+ return ReadInMultipleFileWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8043+
8044+ def AppendReadInMultipleFilesWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8045+ return ReadInMultipleFilesWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8046+
8047+ def AppendReadInFileWithContent(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False):
8048+ if(not hasattr(fp, "write")):
8049+ return False
8050+ GetDirList = AppendReadInFileWithContentToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, insaltkey, verbose)
8051+ numfiles = int(len(GetDirList))
8052+ fnumfiles = format(numfiles, 'x').lower()
8053+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
8054+ try:
8055+ fp.flush()
8056+ if(hasattr(os, "sync")):
8057+ os.fsync(fp.fileno())
8058+ except (io.UnsupportedOperation, AttributeError, OSError):
8059+ pass
8060+ for curfname in GetDirList:
8061+ tmpoutlist = curfname['fheaders']
8062+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, outsaltkey)
8063+ try:
8064+ fp.flush()
8065+ if(hasattr(os, "sync")):
8066+ os.fsync(fp.fileno())
8067+ except (io.UnsupportedOperation, AttributeError, OSError):
8068+ pass
8069+ return fp
8070+
8071+ def AppendReadInFileWithContentToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8072+ if(IsNestedDict(formatspecs) and fmttype=="auto" and
8073+ (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
8074+ get_in_ext = os.path.splitext(outfile)
8075+ tmpfmt = GetKeyByFormatExtension(get_in_ext[1], formatspecs=__file_format_multi_dict__)
8076+ if(tmpfmt is None and get_in_ext[1]!=""):
8077+ get_in_ext = os.path.splitext(get_in_ext[0])
8078+ tmpfmt = GetKeyByFormatExtension(get_in_ext[0], formatspecs=__file_format_multi_dict__)
8079+ if(tmpfmt is None):
8080+ fmttype = __file_format_default__
8081+ formatspecs = formatspecs[fmttype]
8082+ else:
8083+ fmttype = tmpfmt
8084+ formatspecs = formatspecs[tmpfmt]
8085+ elif(IsNestedDict(formatspecs) and fmttype in formatspecs):
8086+ formatspecs = formatspecs[fmttype]
8087+ elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
8088+ fmttype = __file_format_default__
8089+ formatspecs = formatspecs[fmttype]
8090+ if(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write")):
8091+ outfile = RemoveWindowsPath(outfile)
8092+ if(os.path.exists(outfile)):
8093+ try:
8094+ os.unlink(outfile)
8095+ except OSError:
8096+ pass
8097+ if(outfile == "-" or outfile is None):
8098+ verbose = False
8099+ fp = MkTempFile()
8100+ elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
8101+ fp = outfile
8102+ elif(re.findall(__upload_proto_support__, outfile)):
8103+ fp = MkTempFile()
8104+ else:
8105+ fbasename = os.path.splitext(outfile)[0]
8106+ fextname = os.path.splitext(outfile)[1]
8107+ if(not compresswholefile and fextname in outextlistwd):
8108+ compresswholefile = True
8109+ try:
8110+ fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
8111+ except PermissionError:
8112+ return False
8113+ AppendReadInFileWithContent(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, insaltkey, outsaltkey, verbose)
8114+ if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
8115+ fp = CompressOpenFileAlt(
8116+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8117+ try:
8118+ fp.flush()
8119+ if(hasattr(os, "sync")):
8120+ os.fsync(fp.fileno())
8121+ except (io.UnsupportedOperation, AttributeError, OSError):
8122+ pass
8123+ if(outfile == "-"):
8124+ fp.seek(0, 0)
8125+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8126+ elif(outfile is None):
8127+ fp.seek(0, 0)
8128+ outvar = fp.read()
8129+ fp.close()
8130+ return outvar
8131+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
8132+ fp = CompressOpenFileAlt(
8133+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8134+ fp.seek(0, 0)
8135+ upload_file_to_internet_file(fp, outfile)
8136+ if(returnfp):
8137+ fp.seek(0, 0)
8138+ return fp
8139+ else:
8140+ fp.close()
8141+ return True
8142+
8143+ def AppendReadInFileWithContentToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8144+ if not isinstance(infiles, list):
8145+ infiles = [infiles]
8146+ returnout = False
8147+ for infileslist in infiles:
8148+ returnout = AppendReadInFileWithContentToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, insaltkey, outsaltkey, verbose, True)
8149+ if(not returnout):
8150+ break
8151+ else:
8152+ outfile = returnout
8153+ if(not returnfp and returnout):
8154+ returnout.close()
8155+ return True
8156+ return returnout
8157+
80388158def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
80398159 if(IsNestedDict(formatspecs) and fmttype=="auto" and
80408160 (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
0 commit comments