diff --git a/ToolsTOR.py b/ToolsTOR.py index e7a0951..e79ec3c 100644 --- a/ToolsTOR.py +++ b/ToolsTOR.py @@ -74,12 +74,25 @@ class ToolsTOR(ToolsTales): self.extract_TheirSce_XML(scpk_file) self.id = 1 - def get_theirsce_from_scpk(self, scpk, scpk_file_name, debug=False)->bytes: - header = scpk.read(4) - - if header != b"SCPK": - # sys.exit(f"{file} is not a .scpk file!") - raise ValueError("File is not a .scpk file!") + def get_theirsce_from_scpk(self, scpk_file_name, debug=False)->bytes: + + with open(scpk_file_name,"rb") as scpk: + header = scpk.read(4) + + if header != b"SCPK": + # sys.exit(f"{file} is not a .scpk file!") + raise ValueError("File is not a .scpk file!") + + scpk.read(4) + nbFiles = struct.unpack("bytes: + offsets = struct.unpack("<3I", file[:12]) + + # Handle null 2nd offset because of course that's a thing + if offsets[1] == 0: + return file[offsets[0] : offsets[2]] + else: + return file[offsets[0] : offsets[1]] + + +def get_data(file: bytes)->pak2_file: + offsets = struct.unpack("<6I", file[:24]) + data = pak2_file() + data.char_count = struct.unpack("bytes: + output = io.BytesIO() + output.seek(0) + output.write(b"\x00" * 0x20) + offsets_new = [] + offsets_new.append(output.tell()) + + # theirsce + offsets_new.append(insert_padded_chunk(output, data.chunks.theirsce)) + + # lipsync + offsets_new.append(insert_padded_chunk(output, data.chunks.lipsync)) + + # unused + offsets_new.append(insert_padded_chunk(output, data.chunks.unused)) + + # unk1 + offsets_new.append(insert_padded_chunk(output, data.chunks.image_unk1)) + + # unk2 + offsets_new.append(insert_padded_chunk(output, data.chunks.image_unk2)) + + # images + # Create image chunk + image_chunk = b"\x00" * (data.image_count * 8) # minimum size + insert_padded_chunk(output, image_chunk, 128) + image_offsets = [] + + image_offsets.append(output.tell()) + + for blob in data.chunks.image_blobs: + image_offsets.append(insert_padded_chunk(output, blob, 128)) + + image_offsets = image_offsets[:-1] + image_offsets = [ + val for val in image_offsets for _ in (0, 1) + ] # image data offsets are duplicated + + # Write image data offsets + output.seek(offsets_new[5]) + output.write(struct.pack("<%dI" % len(image_offsets), *image_offsets)) + + # Write chunk offsets + output.seek(0) + output.write(struct.pack("<%dI" % len(offsets_new), *offsets_new)) + + # Write metadata + output.write(struct.pack(" 2: + with open(sys.argv[2], "rb+") as f: + theirsce = f.read() + + with open(sys.argv[2] + ".new", "wb+") as output: + output.write(create_pak2(pak2)) + + print("Done!")