You've already forked PythonLib
mirror of
https://github.com/lifebottle/PythonLib.git
synced 2026-02-13 15:25:50 -08:00
Remove single packing functions
This commit is contained in:
@@ -460,104 +460,9 @@ class ToolsTOR(ToolsTales):
|
||||
theirsce.write( struct.pack("<H", new_value))
|
||||
|
||||
return theirsce
|
||||
|
||||
#Repack SCPK files for Story
|
||||
def pack_story_file(self, scpk_file_name) -> bytes:
|
||||
|
||||
#Open the original SCPK
|
||||
with open( self.dat_archive_extract + "SCPK/" + scpk_file_name, 'r+b') as scpk:
|
||||
#Get nb_files and files_size
|
||||
scpk.read(4)
|
||||
scpk.read(4)
|
||||
nb_files = struct.unpack("<L", scpk.read(4))[0]
|
||||
scpk.read(4)
|
||||
file_size_dict = dict()
|
||||
for i in range(nb_files):
|
||||
pointer_offset = scpk.tell()
|
||||
file_size = struct.unpack("<L", scpk.read(4))[0]
|
||||
file_size_dict[pointer_offset] = file_size
|
||||
|
||||
#Extract each files and append to the final data_final
|
||||
dataFinal = bytearray()
|
||||
sizes = []
|
||||
o = io.BytesIO()
|
||||
|
||||
i=0
|
||||
for pointer_offset, fsize in file_size_dict.items():
|
||||
|
||||
data_compressed = scpk.read(fsize)
|
||||
if comptolib.is_compressed(data_compressed):
|
||||
c_type = struct.unpack("<b", data_compressed[:1])[0]
|
||||
#print("File {} size: {} ctype: {}".format(i, fsize,c_type))
|
||||
data_uncompressed = comptolib.decompress_data(data_compressed)
|
||||
|
||||
if data_uncompressed[:8] == b"THEIRSCE":
|
||||
|
||||
#Only for debug to have the original THEIRSCE
|
||||
#with open("test_original_comp.theirsce", "wb") as f:
|
||||
# print("Size original: {}".format(len(data_uncompressed)))
|
||||
# f.write(data)
|
||||
#with open("test_original.theirsce", "wb") as f:
|
||||
# f.write(data_uncompressed)
|
||||
|
||||
#Update THEIRSCE uncompressed file
|
||||
theirsce = self.get_new_theirsce(io.BytesIO(data_uncompressed), scpk_file_name, self.story_XML_new)
|
||||
|
||||
|
||||
theirsce.seek(0)
|
||||
data_new_uncompressed = theirsce.read()
|
||||
data_compressed = comptolib.compress_data(data_new_uncompressed, version=c_type)
|
||||
|
||||
else:
|
||||
data_compressed = comptolib.compress_data(data_uncompressed, version=c_type)
|
||||
|
||||
#Updating the header of the SCPK file to adjust the size
|
||||
new_size = len(data_compressed)
|
||||
#print("File recomp {} size: {} ctype: {}".format(i, new_size,c_type))
|
||||
|
||||
dataFinal += data_compressed
|
||||
sizes.append(new_size)
|
||||
i=i+1
|
||||
|
||||
|
||||
#Write down the new SCPK from scratch
|
||||
o.write(b"\x53\x43\x50\x4B\x01\x00\x0F\x00")
|
||||
o.write(struct.pack("<L", len(sizes)))
|
||||
o.write(b"\x00" * 4)
|
||||
|
||||
for i in range(len(sizes)):
|
||||
o.write(struct.pack("<L", sizes[i]))
|
||||
|
||||
o.write(dataFinal)
|
||||
|
||||
with open(self.story_XML_patch + "New/" + scpk_file_name, "wb") as f:
|
||||
f.write(o.getvalue())
|
||||
|
||||
return o.getvalue()
|
||||
|
||||
def pack_Skit_File(self, pak2_file):
|
||||
|
||||
# Copy the original PAK2 file to the folder used for the new version
|
||||
shutil.copy(self.dat_archive_extract + "PAK2/" + pak2_file, self.skit_XML_patch + "New/" + pak2_file)
|
||||
|
||||
pak2_file_path = os.path.join(self.dat_archive_extract, "PAK2", pak2_file)
|
||||
with open(pak2_file_path,"rb") as f_pak2:
|
||||
pak2_data = f_pak2.read()
|
||||
|
||||
#Create the pak2 object
|
||||
pak2_obj = pak2lib.get_data(pak2_data)
|
||||
|
||||
#Generate the new Theirsce based on the XML and replace the original one
|
||||
theirsce_io = self.get_new_theirsce(io.BytesIO(pak2_obj.chunks.theirsce), os.path.basename(pak2_file_path).split(".")[0], self.skit_XML_new)
|
||||
theirsce_io.seek(0)
|
||||
new_data = theirsce_io.read()
|
||||
pak2_obj.chunks.theirsce = new_data
|
||||
|
||||
self.mkdir(self.skit_XML_patch+ "New")
|
||||
with open(self.skit_XML_patch+ "New/" + pak2_file, "wb") as f2:
|
||||
f2.write(pak2lib.create_pak2(pak2_obj))
|
||||
|
||||
return
|
||||
|
||||
def pack_all_skits(self):
|
||||
print("Recreating Skit files...")
|
||||
|
||||
Reference in New Issue
Block a user