import subprocess from dicttoxml import dicttoxml import json import struct import shutil import os import re import fps4 import pandas as pd import xml.etree.ElementTree as ET import lxml.etree as etree from xml.dom import minidom import re import collections import comptolib import lxml.etree as ET import string class ToolsTales: COMMON_TAG = r"(<\w+:?\w+>)" HEX_TAG = r"(\{[0-9A-F]{2}\})" PRINTABLE_CHARS = "".join( (string.digits, string.ascii_letters, string.punctuation, " ") ) VALID_FILE_NAME = r"([0-9]{2,5})(?:\.)?([1,3])?\.(\w+)$" def __init__(self, gameName, tblFile, repo_name): self.gameName = gameName self.repo_name = repo_name self.basePath = os.getcwd() with open("../{}/Data/{}/Misc/{}".format(repo_name, gameName, tblFile), encoding="utf-8") as f: jsonRaw = json.load(f) if self.repo_name == "Tales-of-Destiny-DC": self.jsonTblTags ={ k1:{ int(k2) if (k1 != "TBL") else k2:v2 for k2,v2 in jsonRaw[k1].items()} for k1,v1 in jsonRaw.items()} else: self.jsonTblTags ={ k1:{ int(k2,16) if (k1 != "TBL") else k2:v2 for k2,v2 in jsonRaw[k1].items()} for k1,v1 in jsonRaw.items()} self.itable = dict([[i, struct.pack(">H", int(j))] for j, i in self.jsonTblTags['TBL'].items()]) self.itags = dict([[i, j] for j, i in self.jsonTblTags['TAGS'].items()]) self.inames = dict([[i, j] for j, i in self.jsonTblTags['NAME'].items()]) self.icolors = dict([[i, j] for j, i in self.jsonTblTags['COLOR'].items()]) with open("../{}/Data/{}/Menu/MenuFiles.json".format(repo_name, gameName)) as f: self.menu_files_json = json.load(f) def mkdir(self, d): try: os.mkdir(d) except: pass ############################## # # Utility functions # ############################## # Compress and Decompress pak files # action is -d or -c # fileType : -0, -1 or -3 # basePath is the location of the PAK file you want to compress/decompress def pakComposer_Comptoe(self, file_name, action, file_type, do_comptoe, working): #Delete the file if already there file_number = file_name.split(".")[0] if (action == '-c'): if os.path.exists(file_name): os.remove( file_name.replace(".pak{}", file_type[1])) else: if os.path.exists(working+"/"+file_number): shutil.rmtree(working+"/"+file_number) #Run Pakcomposer with parameters args = [ "pakcomposer", action, file_name, file_type, "-x"] listFile = subprocess.run( args, cwd=working ) if do_comptoe: files = [ele for ele in os.listdir(working+"/"+file_number) if ".compress" in ele] for ele in files: ctype=0 with open(working+"/{}/".format(file_number)+ele, "rb") as f: ctype = ord(f.read(1)) args = ["comptoe", "-d{}".format(ctype), ele, ele.split(".")[0]+"d.unknown"] listFile = subprocess.run( args, cwd=working+"/"+file_number ) def fps4_action(self, action, b_file, dat_file, destination): fps4.dump_fps4(b_file, dat_file, destination) def comptoe(self, fileName, action): #Run Pakcomposer with parameters args = [ "comptoe", action, fileName, fileName+".res"] listFile = subprocess.run( args ) with open(fileName+".res", "rb") as f: data = f.read() return data def get_pointers(self, start_offset): f = open(self.elfOriginal , "rb") f.seek(start_offset, 0) pointers = [] while f.tell() < self.POINTERS_END: p = struct.unpack("> 11) + 3 p &= 0x7FF p += 1 for i in range(l): out.append(out[-p]) else: out.append(next(d)) c >>= 1 return bytes(out) def _search(self, data, pos, sz): ml = min(0x22, sz - pos) if ml < 3: return 0, 0 mp = max(0, pos - 0x800) hitp, hitl = 0, 3 if mp < pos: hl = data[mp:pos+hitl].find(data[pos:pos+hitl]) while hl < (pos - mp): while (hitl < ml) and (data[pos + hitl] == data[mp + hl + hitl]): hitl += 1 mp += hl hitp = mp if hitl == ml: return hitp, hitl mp += 1 hitl += 1 if mp >= pos: break hl = data[mp:pos+hitl].find(data[pos:pos+hitl]) # If length less than 4, return miss. if hitl < 4: hitl = 1 return hitp, hitl-1 def encode(self, data): """""" from struct import Struct HW = Struct(" 0) and (bValidation == b'\x00'): pointers_offset.append(theirsce.tell() - 2) texts_offset.append(addr + strings_offset) previous_addr = addr return pointers_offset, texts_offset def get_extension(self, data): if data[:4] == b"SCPK": return "scpk" if data[:4] == b"TIM2": return "tm2" if data[:4] == b"\x7FELF": return "irx" if data[:8] == b"IECSsreV": if data[0x50:0x58] == b"IECSigaV": return "hd" elif data[0x30:0x38] == b"IECSidiM": return "sq" if data[:16] == b"\x00" * 0x10: if data[16:18] != b"\x00\x00": return "bd" if data[:8] == b"THEIRSCE": return "theirsce" if data[:3] == b"MFH": return "mfh" if data[:4] == b"MSCF": return "cab" if data[:4] == b"EBG\x00": return "ebg" if data[:4] == b"anp3": return "anp3" if data[:4] == b"EFFE": return "effe" # 0x####BD27 is the masked addiu sp,sp,#### mips instruction # These are overlay files, containing compiled MIPS assembly if data[2:4] == b"\xBD\x27": return "ovl" if data[6:8] == b"\xBD\x27": return "ovl" is_pak = self.get_pak_type(data) if is_pak != None: return is_pak if len(data) > 0x400: size = struct.unpack(" pakN_header_size: calculated_size = 0 for i in range(4, (files + 1) * 4, 4): calculated_size += struct.unpack(" 0): fileRead.seek(offset, 0) pos = fileRead.tell() b = fileRead.read(1) while b != end_strings: #print(hex(fileRead.tell())) b = ord(b) if (b >= 0x99 and b <= 0x9F) or (b >= 0xE0 and b <= 0xEB): c = (b << 8) + ord(fileRead.read(1)) # if str(c) not in json_data.keys(): # json_data[str(c)] = char_index[decode(c)] try: finalText += (self.jsonTblTags['TBL'][str(c)]) except KeyError: b_u = (c >> 8) & 0xff b_l = c & 0xff finalText += ("{%02X}" % b_u) finalText += ("{%02X}" % b_l) elif b == 0x1: finalText += ("\n") elif b in (0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xB, 0xC, 0xD, 0xE, 0xF): b2 = struct.unpack("" % (tag_name, b2)) else: finalText += "<%02X:%08X>" % (b, b2) elif chr(b) in self.PRINTABLE_CHARS: finalText += chr(b) elif b >= 0xA1 and b < 0xE0: finalText += struct.pack("B", b).decode("cp932") elif b in (0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19): finalText += "{%02X}" % b next_b = b"" while next_b != b"\x80": next_b = fileRead.read(1) finalText += "{%02X}" % ord(next_b) #if next_b != b'': #else: # next_b = b"\x80" elif b == 0x81: next_b = fileRead.read(1) if next_b == b"\x40": finalText += " " else: finalText += "{%02X}" % b finalText += "{%02X}" % ord(next_b) else: finalText += "{%02X}" % b b = fileRead.read(1) end = fileRead.tell() size = fileRead.tell() - pos - 1 fileRead.seek(pos) hex_string = fileRead.read(size).hex() hex_values = ' '.join(a+b for a,b in zip(hex_string[::2], hex_string[1::2])) fileRead.seek(end) return finalText, hex_values #Convert text to Bytes object to reinsert text into THEIRSCE and other files def text_to_bytes(self, text): unames = [] splitLineBreak = text.split('\x0A') nb = len(splitLineBreak) bytesFinal = b'' i=0 for line in splitLineBreak: string_hex = re.split(self.HEX_TAG, line) string_hex = [sh for sh in string_hex if sh] for s in string_hex: if re.match(self.HEX_TAG, s): bytesFinal += struct.pack("B", int(s[1:3], 16)) else: s_com = re.split(self.COMMON_TAG, s) s_com = [sc for sc in s_com if sc] for c in s_com: if re.match(self.COMMON_TAG, c): if ":" in c: split = c.split(":") if split[0][1:] in self.itags.keys(): bytesFinal += struct.pack("B", self.itags[split[0][1:]]) bytesFinal += struct.pack("=2 and i 0 ] sections_end = [ section['Text_End'] for section in file_node['Sections'] if section['Text_Start'] > 0 ] base_offset = file_node['Base_Offset'] #Copy the original file new_file_path = "../Data/{}/Menu/New/{}".format(self.repo_name, os.path.basename(file_node['File_Original'])) shutil.copy( file_node['File_Extract'], new_file_path) #Open the new file with r+b section_id = 0 with open(new_file_path, "r+b") as menu_file: menu_file.seek(sections_start[section_id]) section_max = max( sections_end ) ele = [ele for ele in root.findall("Strings") if ele.find('Section').text == "Armor"][0] for entry_node in root.iter("Entry"): if menu_file.tell() < section_max: #Calculate the number of bytes #Grab the fields from the Entry in the XML status = entry_node.find("Status").text japanese_text = entry_node.find("JapaneseText").text english_text = entry_node.find("EnglishText").text #Use the values only for Status = Done and use English if non empty final_text = '' if (status == "Done"): final_text = english_text or japanese_text or '' else: final_text = japanese_text or '' #Convert the text values to bytes using TBL, TAGS, COLORS, ... bytesEntry = self.text_to_bytes(final_text) nb_bytes = len(bytesEntry) new_offset = menu_file.tell() + nb_bytes pos=0 if new_offset < sections_end[section_id]: pos = menu_file.tell() else: section_id = section_id+1 if (section_id < len( sections_start )): print("Going at : {} ({})".format( sections_start[section_id] , hex( sections_start[section_id] ))) menu_file.seek( sections_start[section_id] ) pos = menu_file.tell() else: break; #Add the PointerOffset and TextOffset new_text_offsets[entry_node.find("PointerOffset").text] = pos #Write to the file menu_file.write(bytesEntry + b'\x00') #Update the pointers for pointer_offset, text_offset in new_text_offsets.items(): pointers_list = pointer_offset.split(",") new_value = text_offset - base_offset for pointer in pointers_list: menu_file.seek(int(pointer)) menu_file.write( struct.pack(" 0: ele_found[0].find("EnglishText").text = text ele_found[0].find("Status").text = "Done" else: print(pointer_offset) if modify_xml: txt=etree.tostring(root, encoding="UTF-8", pretty_print=True) with open(xml_file_name, "wb") as xmlFile: xmlFile.write(txt) return final_list #start_offset : where the pointers start for the section # nb_per_block : number of pointers per block before adding step # step : number of bytes before the next block def get_special_pointers(self, text_start, text_max, base_offset, start_offset, nb_per_block, step, section,file_path=''): if file_path == '': file_path = self.elfOriginal f = open(file_path , "rb") f.seek(start_offset, 0) pointers_offset = [] pointers_value = [] list_test = [] is_bad_count = 0 while f.tell() < text_start and is_bad_count <3: block_pointers_offset = [f.tell()+4*i for i in range(nb_per_block)] block_pointers_value = struct.unpack(f"<{nb_per_block}L", f.read(4 * nb_per_block)) list_test.extend(block_pointers_value) for i in range(len(block_pointers_offset)): if (block_pointers_value[i] + base_offset >= text_start and block_pointers_value[i] + base_offset < text_max): #print(block_pointers_value[i]) pointers_offset.append(block_pointers_offset[i]) pointers_value.append(block_pointers_value[i]) is_bad_count = 0 else: is_bad_count = is_bad_count = 1 f.read(step) f.close() #Only grab the good pointers good_indexes = [index for index,ele in enumerate(pointers_value) if ele != 0] pointers_offset = [pointers_offset[i] for i in good_indexes] pointers_value = [pointers_value[i] for i in good_indexes] return [pointers_offset, pointers_value] def prepare_Menu_File(self, file_original): file_name = os.path.basename(file_original) #Copy the files under Menu Folder menu_path = "../Data/{}/Menu/New/".format(self.gameName) shutil.copy( file_original, menu_path+file_name) #Extract if needed (PakComposer or other) if "pak" in file_name: self.pakComposer_Comptoe(file_name, "-d", "-{}".format(file_name[-1]), True, menu_path) def extract_Menu_File(self, file_definition): section_list = [] pointers_offset_list = [] texts_list = [] base_offset = file_definition['Base_Offset'] file_path = file_definition['File_Extract'] with open(file_path, "rb") as f: for section in file_definition['Sections']: text_start = section['Text_Start'] text_end = section['Text_End'] #Extract Pointers of the file print("Extract Pointers") pointers_offset, pointers_value = self.get_special_pointers( text_start, text_end, base_offset, section['Pointer_Offset_Start'], section['Nb_Per_Block'], section['Step'], section['Section'], file_path) #Extract Text from the pointers print("Extract Text") texts = [ self.bytes_to_text(f, ele + base_offset)[0] for ele in pointers_value] print(texts) #Make a list section_list.extend( [section['Section']] * len(texts)) pointers_offset_list.extend( pointers_offset) texts_list.extend( texts ) #Remove duplicates list_informations = self.remove_duplicates(section_list, pointers_offset_list, texts_list) #Build the XML Structure with the information root = self.create_Node_XML(file_path, list_informations, "MenuText") #Write to XML file txt=etree.tostring(root, encoding="UTF-8", pretty_print=True) with open(file_definition['File_XML'].replace("/{}".format(self.repo_name),"").replace("/Data","/Data/{}".format(self.repo_name)), "wb") as xmlFile: xmlFile.write(txt) def extract_All_Menu(self): print("Extracting Menu Files") self.mkdir("../Data/{}/Menu/New".format(self.repo_name)) #Prepare the menu files (Unpack PAK files and use comptoe) files_to_prepare = list(dict.fromkeys([ele['File_Original'] for ele in self.menu_files_json])) res = [ self.prepare_Menu_File(ele) for ele in files_to_prepare] for file_definition in self.menu_files_json: print("...{}".format(file_definition['File_Extract'])) self.extract_Menu_File(file_definition) def extractAllSkits(self): print("Extracting Skits") def extract_Main_Archive(self): print("Main Archive") def unpackGame(self): self.extractMainArchive() self.extractAllStory() self.extractAllSkits() #Create the final Iso or Folder that will help us run the game translated def packGame(self): #Insert the text translated and repack the files at the correct place self.insertAll() #