Update Scripts to extract and insert the menu

This commit is contained in:
fortiersteven
2022-02-10 20:13:15 -05:00
parent 3c6c455abb
commit 33ea51c6fa
21 changed files with 14036 additions and 178 deletions

BIN
00013.pak3 Normal file

Binary file not shown.

205
MenuFiles.json Normal file
View File

@@ -0,0 +1,205 @@
[
{
"File_Extract": "../Data/TOR/Disc/Original/SLPS_254.50",
"File_XML": "../Data/TOR/Menu/XML/SLPS_254.xml",
"File_New": "../Data/TOR/Disc/New/SLPS_254.50",
"Text_Start": 1120640,
"Text_End": 1247839,
"Base_Offset": -1044480,
"Sections":
[
{
"Section": "Character Names",
"Pointer_Offset_Start": 830472,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 1120640,
"Text_End": 1120990
},
{
"Section": "Action 1",
"Pointer_Offset_Start": 945440,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 1124438,
"Text_End": 1124910
},
{
"Section": "Skit Names and Other",
"Pointer_Offset_Start": 945440,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 1126048,
"Text_End": 1152415
},
{
"Section": "Artes",
"Pointer_Offset_Start": 1051588,
"Nb_Per_Block": 3,
"Step": 40,
"Text_Start": 1153174,
"Text_End": 1165478
},
{
"Section": "Attribute",
"Pointer_Offset_Start": 1063172,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 1179432,
"Text_End": 1181078
},
{
"Section": "Armor",
"Pointer_Offset_Start": 1068892,
"Nb_Per_Block": 2,
"Step": 20,
"Text_Start": 1190704,
"Text_End": 1200222
},
{
"Section": "Weapon",
"Pointer_Offset_Start": 1072116,
"Nb_Per_Block": 2,
"Step": 20,
"Text_Start": 1200224,
"Text_End": 1210143
},
{
"Section": "Jewel",
"Pointer_Offset_Start": 1075500,
"Nb_Per_Block": 2,
"Step": 12,
"Text_Start": 1210144,
"Text_End": 1211055
},
{
"Section": "Charm",
"Pointer_Offset_Start": 1075888,
"Nb_Per_Block": 2,
"Step": 16,
"Text_Start": 1211056,
"Text_End": 1213751
},
{
"Section": "Consumables",
"Pointer_Offset_Start": 1076680,
"Nb_Per_Block": 2,
"Step": 16,
"Text_Start": 1213752,
"Text_End": 1222206
},
{
"Section": "Cooking",
"Pointer_Offset_Start": 1094844,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 1236128,
"Text_End": 1240958
},
{
"Section": "Title",
"Pointer_Offset_Start": 1095064,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 1241072,
"Text_End": 1247838
}
]
},
{
"File_Extract": "../Data/TOR/DAT/BIN/11181.bin",
"File_XML": "../Data/TOR/Menu/XML/11181.xml",
"File_New": "../Data/TOR/Menu/New/11181.bin",
"Text_Start": 60980,
"Text_End": 75466,
"Base_Offset": 58880,
"Sections":
[
{
"Section": "Triangle Menu",
"Pointer_Offset_Start": 58888,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 60980,
"Text_End": 75466
}
]
},
{
"File_Extract": "../Data/TOR/Menu/New/00013/00013_0000d.unknown",
"File_XML": "../Data/TOR/Menu/XML/00013_0000.xml",
"File_New": "../Data/TOR/Menu/New/",
"Base_Offset": -3077760,
"Sections":
[
{
"Section": "Battle Tutorial 1",
"Pointer_Offset_Start": 352272,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 360160,
"Text_End": 376318
},
{
"Section": "Battle Tutorial 2",
"Pointer_Offset_Start": 353104,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 363944,
"Text_End": 363974
},
{
"Section": "Battle Tutorial 3",
"Pointer_Offset_Start": 354648,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 368608,
"Text_End": 368638
},
{
"Section": "Battle Tutorial 4",
"Pointer_Offset_Start": 354880,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 368672,
"Text_End": 368830
},
{
"Section": "Battle Tutorial 5",
"Pointer_Offset_Start": 354944,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 368976,
"Text_End": 369582
},
{
"Section": "Battle Tutorial 6",
"Pointer_Offset_Start": 355068,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 369904,
"Text_End": 376318
}
]
},
{
"File_Extract": "../Data/TOR/DAT/BIN/11217.bin",
"File_XML": "../Data/TOR/Menu/XML/11217.xml",
"File_New": "../Data/TOR/Menu/New/",
"Base_Offset": -2984192,
"Sections":
[
{
"Section": "Stuff 1",
"Pointer_Offset_Start": 19536,
"Nb_Per_Block": 1,
"Step": 0,
"Text_Start": 27504,
"Text_End": 79101
}
]
}
]

10822
SLPS.xml Normal file

File diff suppressed because it is too large Load Diff

BIN
SLPS_text.xlsx Normal file

Binary file not shown.

View File

@@ -1,6 +1,77 @@
{
"TBL":
{
"39313":"\uFF1C",
"39314":"\uFF1E",
"39292":"\uFF58",
"39645":"\u25CF",
"58299":"\u708A",
"59219":"\u8C5A",
"57788":"\u8102",
"40611":"\u811A",
"40132":"\u5869",
"58019":"\u719F",
"40350":"\u7A6B",
"59242":"\u8EDF",
"40812":"\u4E32",
"57501":"\u7DB1",
"58759":"\u4E39",
"59100":"\u8C46",
"59806":"\u80AA",
"60071":"\u66DC",
"60366":"\u714C",
"40648":"\u92F8",
"60055":"\u878D",
"60364":"\u7A62",
"60367":"\u7953",
"58286":"\u976D",
"60225":"\u731F",
"59746":"\u7A42",
"60368":"\u9264",
"40392":"\u938C",
"59289":"\u8FB2",
"60353":"\u8D04",
"57534":"\u8F5F",
"57447":"\u4FAF",
"58778":"\u935B",
"60242":"\u71D0",
"58037":"\u6F64",
"59980":"\u8302",
"58468":"\u5C16",
"57417":"\u864E",
"59776":"\u7E2B",
"40576":"\u8ECC",
"60143":"\u7ADC",
"57973":"\u5C31",
"40867":"\u7E4B",
"60247":"\u9C57",
"60007":"\u51B6",
"60370":"\u8B33",
"59974":"\u7DBF",
"59986":"\u7DB2",
"40340":"\u5687",
"40631":"\u7AAE",
"59301":"\u5A46",
"60307":"\u7BED",
"59027":"\u7E8F",
"60362":"\u6E3E",
"40185":"\u67B6",
"57938":"\u707C",
"60344":"\u70B8",
"60357":"\u5486",
"60358":"\u54EE",
"58482":"\u65CB",
"59231":"\u8B0E",
"58008":"\u7E26",
"40955":"\u5F27",
"57991":"\u8E74",
"57709":"\u685C",
"57924":"\u659C",
"58295":"\u5782",
"59328":"\u9019",
"40109":"\u5186",
"59560": "\u7015",
"59536": "\u4ff5",
"57820": "\u81ea",
"60047": "\u7531",
"58359": "\u9752",

1989
TBL_All_Backup.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -6,40 +6,25 @@ import io
import re
import string
import pandas as pd
import json
tool = ToolsTOR.ToolsTOR("tbl")
tool.extractMainArchive()
tool.bytes_to_text_with_offset("../Data/TOR/Disc/Original/SLPS_254.50", 0x119890)
tool.extractAllStory()
tool.insertStoryFile("10247.scpk")
tool.insertMainArchive()
text = '自由青年'
text = '<Blue><Eugene> is awesome'
bytesFinal = tool.textToBytes(text)
def is_compressed(data):
if len(data) < 0x09:
return False
expected_size = struct.unpack("<L", data[1:5])[0]
tail_data = abs(len(data) - (expected_size + 9))
if expected_size == len(data) - 9:
return True
elif tail_data <= 0x10 and data[expected_size + 9 :] == b"#" * tail_data:
return True # SCPK files have these trailing "#" bytes :(
return False
with open("MenuFiles.json") as f:
menu_files_json = json.load(f)
file_def = [ele for ele in menu_files_json if ele['File_Extract'] == "../Data/TOR/Menu/New/00013/00013_0000d.unknown" ][0]
tool.extract_Menu_File(file_def)
#tool.insert_Menu_File("../Data/TOR/Disc/Original/SLPS_254.50")
with open("event.dat", "rb") as f:
with open("00013.pak3", "rb") as f:
data = f.read()
is_compressed(data)
comptolib.decompress_data(data)
t = tool.get_pak_type(data)

View File

@@ -30,44 +30,39 @@ class ToolsTOR(ToolsTales):
#Path to used
datBinOriginal = '../Data/Disc/Original/DAT.BIN'
datBinNew = '../Data/Disc/New/DAT.BIN'
elfOriginal = '../Data/Disc/Original/SLPS_254.50'
elfNew = '../Data/Disc/New/SLPS_254.50'
storyPathArchives= '../Data/Story/SCPK/' #Story XML files will be extracted here
storyPathXML = '../Data/Story/XML/' #SCPK will be repacked here
skitPathArchives = '../Data//Skits/' #Skits XML files will be extracted here
datPathExtract = '../Data/DAT/'
datBinOriginal = '../Data/TOR/Disc/Original/DAT.BIN'
datBinNew = '../Data/TOR/Disc/New/DAT.BIN'
elfOriginal = '../Data/TOR/Disc/Original/SLPS_254.50'
elfNew = '../Data/TOR/Disc/New/SLPS_254.50'
storyPathArchives= '../Data/TOR/Story/SCPK/' #Story XML files will be extracted here
storyPathXML = '../Data/TOR/Story/XML/' #SCPK will be repacked here
skitPathArchives = '../Data/TOR/Skits/' #Skits XML files will be extracted here
datPathExtract = '../Data/TOR/DAT/'
def __init__(self, tbl):
super().__init__("TOR", tbl)
print("Loading TBL json")
with open("TBL_All.json") as f:
jsonRaw = json.load(f)
jsonTblTags = jsonTblTags ={ k1:{ int(k2,16) if (k1 != "TBL") else k2:v2 for k2,v2 in jsonRaw[k1].items()} for k1,v1 in jsonRaw.items()}
self.jsonTblTags ={ k1:{ int(k2,16) if (k1 != "TBL") else k2:v2 for k2,v2 in jsonRaw[k1].items()} for k1,v1 in jsonRaw.items()}
print("TBL json is loaded")
with open("MenuFiles.json") as f:
self.menu_files_json = json.load(f)
self.itable = dict([[i, struct.pack(">H", int(j))] for j, i in self.jsonTblTags['TBL'].items()])
self.itags = dict([[i, j] for j, i in self.jsonTblTags['TAGS'].items()])
self.inames = dict([[i, j] for j, i in self.jsonTblTags['NAMES'].items()])
self.icolors = dict([[i, j] for j, i in self.jsonTblTags['COLORS'].items()])
def get_pointers(self):
f = open(self.elfOriginal , "rb")
f.seek(self.POINTERS_BEGIN, 0)
pointers = []
while f.tell() < self.POINTERS_END:
p = struct.unpack("<L", f.read(4))[0]
pointers.append(p)
f.close()
return pointers
# Extract the story files
@@ -130,119 +125,7 @@ class ToolsTOR(ToolsTales):
return pointers_offset, texts_offset
#Convert a bytes object to text using TAGS and TBL in the json file
def bytesToText(self, theirsce):
finalText = ''
TAGS = self.jsonTblTags['TAGS']
b = theirsce.read(1)
while b != b"\x00":
b = ord(b)
if (b >= 0x99 and b <= 0x9F) or (b >= 0xE0 and b <= 0xEB):
c = (b << 8) + ord(theirsce.read(1))
# if str(c) not in json_data.keys():
# json_data[str(c)] = char_index[decode(c)]
try:
finalText += (self.jsonTblTags['TBL'][str(c)])
except KeyError:
b_u = (c >> 8) & 0xff
b_l = c & 0xff
finalText += ("{%02X}" % b_u)
finalText += ("{%02X}" % b_l)
elif b == 0x1:
finalText += ("\n")
elif b in (0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xB, 0xC, 0xD, 0xE, 0xF):
b2 = struct.unpack("<L", theirsce.read(4))[0]
if b in TAGS:
tag_name = TAGS.get(b)
tag_param = None
tag_search = tag_name.upper()+'S'
if (tag_search in self.jsonTblTags.keys()):
tags2 = self.jsonTblTags[tag_search]
tag_param = tags2.get(b2, None)
if tag_param != None:
finalText += tag_param
else:
finalText += ("<%s:%08X>" % (tag_name, b2))
else:
finalText += "<%02X:%08X>" % (b, b2)
elif chr(b) in self.PRINTABLE_CHARS:
finalText += chr(b)
elif b >= 0xA1 and b < 0xE0:
finalText += struct.pack("B", b).decode("cp932")
elif b in (0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19):
finalText += "{%02X}" % b
next_b = b""
while next_b != b"\x80":
next_b = theirsce.read(1)
finalText += "{%02X}" % ord(next_b)
elif b == 0x81:
next_b = theirsce.read(1)
if next_b == b"\x40":
finalText += " "
else:
finalText += "{%02X}" % b
finalText += "{%02X}" % ord(next_b)
else:
finalText += "{%02X}" % b
b = theirsce.read(1)
return finalText
#Convert text to Bytes object to reinsert text into THEIRSCE and other files
def textToBytes(self, text):
unames = []
splitLineBreak = text.split('\x0A')
nb = len(splitLineBreak)
bytesFinal = b''
i=0
for line in splitLineBreak:
string_hex = re.split(self.HEX_TAG, line)
string_hex = [sh for sh in string_hex if sh]
for s in string_hex:
if re.match(self.HEX_TAG, s):
bytesFinal += struct.pack("B", int(s[1:3], 16))
else:
s_com = re.split(self.COMMON_TAG, s)
s_com = [sc for sc in s_com if sc]
for c in s_com:
if re.match(self.COMMON_TAG, c):
if ":" in c:
split = c.split(":")
if split[0][1:] in self.itags.keys():
bytesFinal += struct.pack("B", self.itags[split[0][1:]])
bytesFinal += struct.pack("<I", int(split[1][:8], 16))
else:
bytesFinal += struct.pack("B", int(split[0][1:], 16))
bytesFinal += struct.pack("<I", int(split[1][:8], 16))
if c in self.inames:
bytesFinal += struct.pack("B", 0xB)
bytesFinal += struct.pack("<I", self.inames[c])
if c in self.icolors:
bytesFinal += struct.pack("B", 0x5)
bytesFinal += struct.pack("<I", self.icolors[c])
else:
for c2 in c:
if c2 in self.itable.keys():
bytesFinal += self.itable[c2]
else:
bytesFinal += c2.encode("cp932")
i=i+1
if (nb >=2 and i<nb):
bytesFinal += b'\x01'
return bytesFinal
# Extract THEIRSCE to XML
def extractTheirSceXML(self, scpkFileName):
@@ -432,12 +315,12 @@ class ToolsTOR(ToolsTales):
# Extract the file DAT.BIn to the different directorties
def extractMainArchive(self):
def extract_Main_Archive(self):
f = open( self.datBinOriginal, "rb")
pointers = self.get_pointers()
pointers = self.get_pointers(self.POINTERS_BEGIN)
total_files = len(pointers)
for i in range(total_files - 1):
@@ -480,7 +363,7 @@ class ToolsTOR(ToolsTales):
print("Writing file %05d/%05d..." % (i, total_files))
f.close()
def insertMainArchive(self):
def insert_Main_Archive(self):
sectors = [0]
remainders = []
buffer = 0

View File

@@ -63,6 +63,23 @@ class ToolsTales:
args
)
def get_pointers(self, start_offset):
f = open(self.elfOriginal , "rb")
f.seek(start_offset, 0)
pointers = []
while f.tell() < self.POINTERS_END:
p = struct.unpack("<L", f.read(4))[0]
pointers.append(p)
f.close()
return pointers
def get_extension(self, data):
if data[:4] == b"SCPK":
return "scpk"
@@ -145,8 +162,16 @@ class ToolsTales:
# Check for alignment
if first_entry % 0x10 == 0:
is_aligned = True
aligned_pak1_size = pak1_header_size + (0x10 - (pak1_header_size % 0x10))
aligned_pakN_size = pakN_header_size + (0x10 - (pakN_header_size % 0x10))
if pak1_header_size % 0x10 != 0:
pak1_check = pak1_header_size + (0x10 - (pak1_header_size % 0x10))
else:
pak1_check = pak1_header_size
if pakN_header_size % 0x10 != 0:
pakN_check = pakN_header_size + (0x10 - (pakN_header_size % 0x10))
else:
pakN_check = pakN_header_size
# First test pak0 (hope there are no aligned pak0 files...)
if len(data) > pakN_header_size:
@@ -158,9 +183,9 @@ class ToolsTales:
# Test for pak1 & pak3
if is_aligned:
if aligned_pak1_size == first_entry:
if pak1_check == first_entry:
return "pak1"
elif aligned_pakN_size == first_entry:
elif pakN_check == first_entry:
return "pak3"
else:
if pak1_header_size == first_entry:
@@ -199,26 +224,314 @@ class ToolsTales:
def get_file_name(self, path):
return os.path.splitext(os.path.basename(path))[0]
def findall(self, p, s):
'''Yields all the positions of
the pattern p in the string s.'''
i = s.find(p)
while i != -1:
yield i
i = s.find(p, i+1)
def bytes_to_text_with_offset(self, file_name, start_offset):
#Open file
f = open(file_name, "rb")
fsize = os.path.getsize(file_name)
f.seek(start_offset)
root = etree.Element('Text')
val = b'02'
while( f.tell() < 0x1198B8 ):
pos = f.tell()
if (val != b'\x00'):
offset = hex(pos).replace("0x","")
text = self.bytesToText(f)
node = etree.SubElement( root, "Entry")
etree.SubElement(node, "TextOffset").text = offset
etree.SubElement(node, "Japanese").text = text
val = f.read(1)
f.seek( f.tell()-1)
else:
val = f.read(1)
if (val != b'\x00'):
f.seek( f.tell()-1)
f.close()
txt=etree.tostring(root, encoding="UTF-8", pretty_print=True)
with open('test.xml', "wb") as xmlFile:
xmlFile.write(txt)
#Convert a bytes object to text using TAGS and TBL in the json file
def bytesToText(self, fileRead, end_strings = b"\x00", offset=-1):
finalText = ''
TAGS = self.jsonTblTags['TAGS']
if (offset > 0):
fileRead.seek(offset, 0)
b = fileRead.read(1)
while b != end_strings:
b = ord(b)
if (b >= 0x99 and b <= 0x9F) or (b >= 0xE0 and b <= 0xEB):
c = (b << 8) + ord(fileRead.read(1))
# if str(c) not in json_data.keys():
# json_data[str(c)] = char_index[decode(c)]
try:
finalText += (self.jsonTblTags['TBL'][str(c)])
except KeyError:
b_u = (c >> 8) & 0xff
b_l = c & 0xff
finalText += ("{%02X}" % b_u)
finalText += ("{%02X}" % b_l)
elif b == 0x1:
finalText += ("\n")
elif b in (0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xB, 0xC, 0xD, 0xE, 0xF):
b2 = struct.unpack("<L", fileRead.read(4))[0]
if b in TAGS:
tag_name = TAGS.get(b)
tag_param = None
tag_search = tag_name.upper()+'S'
if (tag_search in self.jsonTblTags.keys()):
tags2 = self.jsonTblTags[tag_search]
tag_param = tags2.get(b2, None)
if tag_param != None:
finalText += tag_param
else:
finalText += ("<%s:%08X>" % (tag_name, b2))
else:
finalText += "<%02X:%08X>" % (b, b2)
elif chr(b) in self.PRINTABLE_CHARS:
finalText += chr(b)
elif b >= 0xA1 and b < 0xE0:
finalText += struct.pack("B", b).decode("cp932")
elif b in (0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19):
finalText += "{%02X}" % b
next_b = b""
while next_b != b"\x80":
next_b = fileRead.read(1)
finalText += "{%02X}" % ord(next_b)
elif b == 0x81:
next_b = fileRead.read(1)
if next_b == b"\x40":
finalText += " "
else:
finalText += "{%02X}" % b
finalText += "{%02X}" % ord(next_b)
else:
finalText += "{%02X}" % b
b = fileRead.read(1)
return finalText
#Convert text to Bytes object to reinsert text into THEIRSCE and other files
def textToBytes(self, text):
unames = []
splitLineBreak = text.split('\x0A')
nb = len(splitLineBreak)
bytesFinal = b''
i=0
for line in splitLineBreak:
string_hex = re.split(self.HEX_TAG, line)
string_hex = [sh for sh in string_hex if sh]
for s in string_hex:
if re.match(self.HEX_TAG, s):
bytesFinal += struct.pack("B", int(s[1:3], 16))
else:
s_com = re.split(self.COMMON_TAG, s)
s_com = [sc for sc in s_com if sc]
for c in s_com:
if re.match(self.COMMON_TAG, c):
if ":" in c:
split = c.split(":")
if split[0][1:] in self.itags.keys():
bytesFinal += struct.pack("B", self.itags[split[0][1:]])
bytesFinal += struct.pack("<I", int(split[1][:8], 16))
else:
bytesFinal += struct.pack("B", int(split[0][1:], 16))
bytesFinal += struct.pack("<I", int(split[1][:8], 16))
if c in self.inames:
bytesFinal += struct.pack("B", 0xB)
bytesFinal += struct.pack("<I", self.inames[c])
if c in self.icolors:
bytesFinal += struct.pack("B", 0x5)
bytesFinal += struct.pack("<I", self.icolors[c])
else:
for c2 in c:
if c2 in self.itable.keys():
bytesFinal += self.itable[c2]
else:
bytesFinal += c2.encode("cp932")
i=i+1
if (nb >=2 and i<nb):
bytesFinal += b'\x01'
return bytesFinal
def create_Menu_Entry(self, strings_node, section, pointer_offset, text):
#Add it to the XML node
entry_node = etree.SubElement(strings_node, "Entry")
etree.SubElement(entry_node,"PointerOffset").text = str(pointer_offset)
etree.SubElement(entry_node,"JapaneseText").text = text
etree.SubElement(entry_node,"EnglishText").text = ''
etree.SubElement(entry_node,"Notes").text = ''
if text == '':
statusText = 'Done'
else:
statusText = 'To Do'
etree.SubElement(entry_node,"Status").text = statusText
def create_Menu_XML(self, fileName, list_informations):
root = etree.Element('MenuText')
sections = set([item[0] for item in list_informations])
for section in sections:
strings_node = etree.SubElement(root, 'Strings')
etree.SubElement(strings_node, "Section").text = section
list_section = [ele for ele in list_informations if ele[0] == section]
for s, pointers_offset, text in list_section:
self.create_Menu_Entry( strings_node, s, pointers_offset, text)
return root
def remove_duplicates(self, section_list, pointers_offset, texts_list):
final_list = []
unique_text = set(texts_list)
for text in unique_text:
indexes = [index for index,ele in enumerate(texts_list) if ele == text]
found = [str(pointers_offset[i]) for i in indexes]
found.sort(reverse=False)
found = list( set(found))
pointers_found = ",".join(found)
section = [section_list[i] for i in indexes][0]
final_list.append( (section, pointers_found, text))
final_list.sort()
return final_list
#############################
#
# Insertion of texts and packing of files
#
#############################
def insert_Menu_File(self, menu_file_path):
#Load all the banks for insertion and load XML
new_text_offsets = dict()
file_node = [ele for ele in self.menu_files_json if ele['File_Extract'] == menu_file_path][0]
xml_file_name = "../Data/{}/Menu/XML/".format(self.gameName) + self.get_file_name(menu_file_path)+'.xml'
tree = etree.parse(xml_file_name)
root = tree.getroot()
sections_start = [ section['Text_Start'] for section in file_node['Sections'] if section['Text_Start'] > 0 ]
sections_end = [ section['Text_End'] for section in file_node['Sections'] if section['Text_Start'] > 0 ]
base_offset = file_node['Base_Offset']
print(sections_start)
#Copy the original file
new_file_path = file_node['File_New']
shutil.copy( file_node['File_Extract'], new_file_path)
#Open the new file with r+b
section_id = 0
with open(new_file_path, "r+b") as menu_file:
menu_file.seek(sections_start[section_id])
section_max = max( sections_end )
ele = [ele for ele in root.findall("Strings") if ele.find('Section').text == "Armor"][0]
print(ele)
for entry_node in root.iter("Entry"):
if menu_file.tell() < section_max:
#Calculate the number of bytes
#Grab the fields from the Entry in the XML
status = entry_node.find("Status").text
japanese_text = entry_node.find("JapaneseText").text
english_text = entry_node.find("EnglishText").text
#Use the values only for Status = Done and use English if non empty
final_text = ''
if (status == "Done"):
final_text = english_text or japanese_text or ''
else:
final_text = japanese_text or ''
#Convert the text values to bytes using TBL, TAGS, COLORS, ...
bytesEntry = self.textToBytes(final_text)
nb_bytes = len(bytesEntry)
new_offset = menu_file.tell() + nb_bytes
pos=0
if new_offset < sections_end[section_id]:
pos = menu_file.tell()
else:
section_id = section_id+1
if (section_id < len( sections_start )):
print("Going at : {} ({})".format( sections_start[section_id] , hex( sections_start[section_id] )))
menu_file.seek( sections_start[section_id] )
pos = menu_file.tell()
else:
break;
#Add the PointerOffset and TextOffset
new_text_offsets[entry_node.find("PointerOffset").text] = pos
#Write to the file
menu_file.write(bytesEntry + b'\x00')
#Update the pointers
for pointer_offset, text_offset in new_text_offsets.items():
pointers_list = pointer_offset.split(",")
new_value = text_offset - base_offset
for pointer in pointers_list:
menu_file.seek(int(pointer))
menu_file.write( struct.pack("<L", new_value))
def insertAllMenu(self):
print("Inserting Menus")
@@ -256,9 +569,112 @@ class ToolsTales:
cwd= self.basePath,
)
#start_offset : where the pointers start for the section
# nb_per_block : number of pointers per block before adding step
# step : number of bytes before the next block
def get_special_pointers(self, text_start, text_max, base_offset, start_offset, nb_per_block, step, section,file_path=''):
if file_path == '':
file_path = self.elfOriginal
print("Elf changed")
f = open(file_path , "rb")
f.seek(start_offset, 0)
pointers_offset = []
pointers_value = []
list_test = []
is_bad_count = 0
while f.tell() < text_start and is_bad_count <3:
block_pointers_offset = [f.tell()+4*i for i in range(nb_per_block)]
if section == "Artes":
print( [hex(ele) for ele in block_pointers_offset])
block_pointers_value = struct.unpack(f"<{nb_per_block}L", f.read(4 * nb_per_block))
list_test.extend(block_pointers_value)
for i in range(len(block_pointers_offset)):
if (block_pointers_value[i] + base_offset >= text_start and block_pointers_value[i] + base_offset < text_max):
#print(block_pointers_value[i])
pointers_offset.append(block_pointers_offset[i])
pointers_value.append(block_pointers_value[i])
is_bad_count = 0
else:
is_bad_count = is_bad_count = 1
f.read(step)
f.close()
def extractAllMenu(self):
print("Extracting menu")
#Only grab the good pointers
good_indexes = [index for index,ele in enumerate(pointers_value) if ele != 0]
pointers_offset = [pointers_offset[i] for i in good_indexes]
pointers_value = [pointers_value[i] for i in good_indexes]
return [pointers_offset, pointers_value]
def extract_Menu_File(self, file_definition):
section_list = []
pointers_offset_list = []
texts_list = []
base_offset = file_definition['Base_Offset']
file_path = file_definition['File_Extract']
with open(file_path, "rb") as f:
for section in file_definition['Sections']:
print("Section: {}".format(section))
text_start = section['Text_Start']
text_end = section['Text_End']
#Extract Pointers of the file
pointers_offset, pointers_value = self.get_special_pointers( text_start, text_end, base_offset, section['Pointer_Offset_Start'], section['Nb_Per_Block'], section['Step'], section['Section'], file_path)
#Extract Text from the pointers
texts = [ self.bytesToText(f, ele + base_offset) for ele in pointers_value]
texts_offset = [ele + base_offset for ele in pointers_value]
#Make a list
section_list.extend( [section['Section']] * len(texts))
pointers_offset_list.extend( pointers_offset)
texts_list.extend( texts )
#Remove duplicates
list_informations = self.remove_duplicates(section_list, pointers_offset_list, texts_list)
#Build the XML Structure with the information
root = self.create_Menu_XML(file_path, list_informations)
#Write to XML file
txt=etree.tostring(root, encoding="UTF-8", pretty_print=True)
with open(file_definition['File_XML'], "wb") as xmlFile:
xmlFile.write(txt)
def extract_All_Menu(self):
section_list = []
texts_offsets_list = []
texts_list = []
for file_definition in self.menu_files_json:
print("Extracting...{}".format(file_definition['File_Extract']))
self.extract_Menu_File(file_definition)
def extractAllStory(self):
print("Extracting Story")

213
ToolsTales_Executable.py Normal file
View File

@@ -0,0 +1,213 @@
import ToolsTOR
import json
import argparse
import textwrap
import os
import io
import re
SCRIPT_VERSION = "0.3"
def get_directory_path(path):
return os.path.dirname(os.path.abspath(path))
def check_arguments(parser, args):
if hasattr(args, "elf_path") and not args.elf_path:
args.elf_path = get_directory_path(args.input) + "/SLPS_254.50"
if hasattr(args, "elf_out") and not args.elf_out:
args.elf_out = get_directory_path(args.input) + "/NEW_SLPS_254.50"
if not args.output:
if not os.path.isdir(args.input):
args.output = get_directory_path(args.input)
args.output += "/" + args.input.split("/")[-1]
else:
args.output = args.input
def get_arguments(argv=None):
# Init argument parser
parser = argparse.ArgumentParser()
# Add arguments, obviously
parser.add_argument(
"--version", action="version", version="%(prog)s " + SCRIPT_VERSION
)
parser.add_argument(
"--game",
required=True,
metavar="game_name",
help="Options: TOR, TOPX, TOH",
)
sp = parser.add_subparsers(title="Available actions", required=True, dest="action")
# Utility commands
sp_utility = sp.add_parser(
"utility",
description="Usefull functions to be called from Translation App",
formatter_class=argparse.RawTextHelpFormatter,
)
sp_utility.add_argument(
"function",
choices=["hex2bytes"],
metavar="function_name",
help="Options: hex2bytes",
)
sp_utility.add_argument(
"param1",
metavar="param1",
help="First parameter of a function",
)
# Unpack commands
sp_unpack = sp.add_parser(
"unpack",
description="Unpacks some file types into more useful ones.",
formatter_class=argparse.RawTextHelpFormatter,
)
sp_unpack.add_argument(
"file",
choices=["all", "dat", "mfh", "theirsce", "scpk"],
metavar="FILE",
help="Options: all, dat, mfh, theirsce, scpk",
)
sp_unpack.add_argument(
"--input",
metavar="input_path",
required=True,
help="Specify input file path.",
type=os.path.abspath,
)
sp_unpack.add_argument(
"--output",
metavar="output_path",
help="Specify output path.",
type=os.path.abspath,
)
sp_unpack.add_argument(
"--elf",
metavar="elf_path",
dest="elf_path",
help="Specify custom SLPS_254.50 (a.k.a ELF) file path.",
type=os.path.abspath,
)
sp_unpack.add_argument(
"--no-decompress",
action="store_true",
help="Don't decompress compto files.",
)
# PAK commands
sp_pack = sp.add_parser("pack", help="Packs some file types into the originals.")
sp_pack.add_argument(
"file",
choices=["All", "Menu", "Story", "Skits"],
metavar="FILE",
help="Inserts files back into their containers.",
)
sp_pack.add_argument(
"--input",
metavar="input_path",
default="DAT.BIN",
help="Specify custom DAT.BIN output file path.",
type=os.path.abspath,
)
sp_pack.add_argument(
"--output",
metavar="output_path",
default="DAT",
help="Specify custom dat folder path.",
type=os.path.abspath,
)
sp_pack.add_argument(
"--elf",
metavar="elf_path",
default="../Data/TOR/Disc/Original/SLPS_254.50",
help="Specify custom SLPS_254.50 (a.k.a ELF) file path.",
type=os.path.abspath,
)
sp_pack.add_argument(
"--elf-out",
metavar="elf_output_path",
default="../Data/TOR/Disc/New/SLPS_254.50",
help="Specify custom SLPS_254.50 (a.k.a ELF) output file path.",
type=os.path.abspath,
)
# Export commands
sp_export = sp.add_parser("export", help="Exports, I guess.")
sp_export.add_argument(
"file", choices=["table"], metavar="file type", help="Exports data."
)
args = parser.parse_args()
#check_arguments(parser, args)
return args
def hex2bytes(tales_instance, hex_value):
bytes_value = bytes.fromhex(hex_value + " 00")
#print(bytes_value)
f = io.BytesIO(bytes_value)
f.seek(0)
txt = tales_instance.bytesToText(f, b'', -1)
txt = "\n\n".join([ele for ele in txt.split("{00}") if ele != ""])
with open("text_dump.txt", "w",encoding="utf-8") as f:
f.write(txt)
def getTalesInstance(game_name):
talesInstance = ToolsTOR.ToolsTOR("tbl")
if game_name == "TOR":
talesInstance = ToolsTOR.ToolsTOR("tbl")
return talesInstance
if __name__ == "__main__":
args = get_arguments()
#print(vars(args))
game_name = args.game
tales_instance = getTalesInstance(game_name)
#Utility function
if args.action == "utility":
if args.function == "hex2bytes":
hex2bytes(tales_instance, args.param1)
if args.action == "pack":
if args.file == "Menu":
#SLPS
tales_instance.insert_Menu_File("../Data/TOR/Disc/Original/SLPS_254.50")
print("new SLPS is found inside Data/{}/Disc/New".format(game_name))
#Other files for menu stuff

Binary file not shown.

BIN
mapping.xlsx Normal file

Binary file not shown.

67
tbl_missing.json Normal file
View File

@@ -0,0 +1,67 @@
{
"58299":"\u708A",
"59219":"\u8C5A",
"57788":"\u8102",
"40611":"\u811A",
"40132":"\u5869",
"58019":"\u719F",
"40350":"\u7A6B",
"59242":"\u8EDF",
"40812":"\u4E32",
"57501":"\u7DB1",
"58759":"\u4E39",
"59100":"\u8C46",
"59806":"\u80AA",
"60071":"\u66DC",
"60366":"\u714C",
"40648":"\u92F8",
"60055":"\u878D",
"60364":"\u7A62",
"60367":"\u7953",
"58286":"\u976D",
"60225":"\u731F",
"59746":"\u7A42",
"60368":"\u9264",
"40392":"\u938C",
"59289":"\u8FB2",
"60353":"\u8D04",
"57534":"\u8F5F",
"57447":"\u4FAF",
"58778":"\u935B",
"60242":"\u71D0",
"58037":"\u6F64",
"59980":"\u8302",
"58468":"\u5C16",
"57417":"\u864E",
"59776":"\u7E2B",
"40576":"\u8ECC",
"60143":"\u7ADC",
"57973":"\u5C31",
"40867":"\u7E4B",
"60247":"\u9C57",
"60007":"\u51B6",
"60370":"\u8B33",
"59974":"\u7DBF",
"59986":"\u7DB2",
"40340":"\u5687",
"40631":"\u7AAE",
"59301":"\u5A46",
"60307":"\u7BED",
"59027":"\u7E8F",
"60362":"\u6E3E",
"40185":"\u67B6",
"57938":"\u707C",
"60344":"\u70B8",
"60357":"\u5486",
"60358":"\u54EE",
"58482":"\u65CB",
"59231":"\u8B0E",
"58008":"\u7E26",
"40955":"\u5F27",
"57991":"\u8E74",
"57709":"\u685C",
"57924":"\u659C",
"58295":"\u5782",
"59328":"\u9019",
"40109":"\u5186"
}

BIN
test.xlsx Normal file

Binary file not shown.

10
test.xml Normal file
View File

@@ -0,0 +1,10 @@
<Text>
<Entry>
<TextOffset>119890</TextOffset>
<Japanese>敵法陣</Japanese>
</Entry>
<Entry>
<TextOffset>119898</TextOffset>
<Japanese>ファイデリティ・テスタメント</Japanese>
</Entry>
</Text>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More