Basic set of files to extract TOR SCPK files

- Basic example in TOR_Test.py to extract the Scenario files to XML
This commit is contained in:
fortiersteven
2022-01-23 08:25:40 -05:00
parent 832feb64f3
commit 4f59122bac
16 changed files with 14224 additions and 0 deletions

BIN
10199.theirsce Normal file

Binary file not shown.

BIN
10726.theirsce Normal file

Binary file not shown.

1951
TBL.json Normal file

File diff suppressed because it is too large Load Diff

1989
TBL_All.json Normal file

File diff suppressed because it is too large Load Diff

6
TOPX_Tests.py Normal file
View File

@@ -0,0 +1,6 @@
import ToolsTOPX
tool = ToolsTOPX.ToolsTOPX("tbl")
tool.extractAllSkits()

13
TOR_Test.py Normal file
View File

@@ -0,0 +1,13 @@
import ToolsTOR
import json
import struct
import comptolib
import io
import re
import string
import pandas as pd
tool = ToolsTOR.ToolsTOR("tbl")
tool.extractAllStory()

4796
ToR.tbl Normal file

File diff suppressed because it is too large Load Diff

4591
ToR_utf8.tbl Normal file

File diff suppressed because it is too large Load Diff

157
ToolsTOPX.py Normal file
View File

@@ -0,0 +1,157 @@
from ToolsTales import ToolsTales
import subprocess
from dicttoxml import dicttoxml
import json
import struct
import shutil
import os
import re
import pandas as pd
import xml.etree.ElementTree as ET
import lxml.etree as etree
from xml.dom import minidom
from pathlib import Path
class ToolsTOPX(ToolsTales):
def __init__(self, tbl):
super().__init__("TOPX", tbl)
#Load the hash table for the files
json_file = open('../Data/Misc/hashes.json', 'r')
self.hashes = json.load(json_file)
json_file.close()
self.discPath = '../Data/Disc'
self.storyPathExtract = '../Data/Archives/Story/' #Files are the result of PAKCOMPOSER + Comptoe here
self.storyPathInsert = '../Data/Archives/All/map/pack' #Files need to be .CAB here
self.skitPathExtract = '../Data/' #Files are the result of PAKCOMPOSER + Comptoe here
self.allPathExtract = '../Data/Archives/All/'
self.allPathInsert = '../Data/Disc/PSP_GAME/USRDIR' #File is all.dat
#############################
#
# Extraction of files and unpacking
#
#############################
# Make the basic directories for extracting all.dat
def make_dirs(self):
self.mkdir('../Data/Archives/All')
self.mkdir('../Data/Archives/All/battle')
self.mkdir('../Data/Archives/All/battle/character')
self.mkdir('../Data/Archives/All/battle/charsnd')
self.mkdir('../Data/Archives/All/battle/data')
self.mkdir('../Data/Archives/All/battle/effect')
self.mkdir('../Data/Archives/All/battle/event')
self.mkdir('../Data/Archives/All/battle/gui')
self.mkdir('../Data/Archives/All/battle/map')
self.mkdir('../Data/Archives/All/battle/resident')
self.mkdir('../Data/Archives/All/battle/tutorial')
self.mkdir('../Data/Archives/All/chat')
self.mkdir('../Data/Archives/All/gim')
self.mkdir('../Data/Archives/All/map')
self.mkdir('../Data/Archives/All/map/data')
self.mkdir('../Data/Archives/All/map/pack')
self.mkdir('../Data/Archives/All/movie')
self.mkdir('../Data/Archives/All/snd')
self.mkdir('../Data/Archives/All/snd/init')
self.mkdir('../Data/Archives/All/snd/se3')
self.mkdir('../Data/Archives/All/snd/se3/map_mus')
self.mkdir('../Data/Archives/All/snd/strpck')
self.mkdir('../Data/Archives/All/sysdata')
# Extract each of the file from the all.dat
def extract_files(self, start, size, filename):
if filename in self.hashes.keys():
filename = self.hashes[filename]
input_file = open( '../Data/Disc/Original/PSP_GAME/USRDIR/all.dat', 'rb')
input_file.seek(start, 0)
data = input_file.read(size)
output_file = open( os.path.join(self.allPathExtract, filename), 'wb')
output_file.write(data)
output_file.close()
input_file.close()
# Extract the story files
def extractAllStory(self):
print("Extracting Story")
path = os.path.join( self.allPathExtract, 'map/pack/')
storyPath = '../Data/Archives/Story/'
self.mkdir(storyPath)
for f in os.listdir( path ):
if os.path.isfile( path+f) and '.cab' in f:
#Unpack the CAB into PAK3 file
fileName = storyPath+f.replace(".cab", ".pak3")
subprocess.run(['expand', path+f, fileName])
#Decompress using PAKCOMPOSER + Comptoe
super().pakComposerAndComptoe(fileName, "-d", "-3")
def extractAllSkits(self):
print("Extracting Skits")
path = os.path.join( self.allPathExtract, 'chat/')
skitsPath ='../Data/Archives/Skits/'
self.mkdir(skitsPath)
for f in os.listdir(path):
if os.path.isfile(path + f):
#Unpack the CAB into PAK3 file
fileName = skitsPath + f.replace(".cab", ".pak3")
subprocess.run(['expand', path + f, fileName])
#Decompress using PAKCOMPOSER + Comptoe
super().pakComposerAndComptoe(fileName, "-d", "-3")
def extractAllEvents(self):
print("Extract Events")
path = os.path.join( self.allPathExtract, 'map/')
eventsPath = '..Data/Archives/Events/'
self.mkdir(eventsPath)
for f in os.listdir(path):
if os.path.isfile( path + f):
#Unpack the CAB into PAK3 file
fileName = eventsPath + f.replace(".cab", ".pak3")
subprocess.run(['expand', path + f, fileName])
#Decompress using PAKCOMPOSER + Comptoe
super().pakComposerAndComptoe(fileName, "-d", "-3")
# Extract the file all.dat to the different directorties
def extractMainArchive(self):
self.make_dirs()
order = {}
order['order'] = []
order_json = open( os.path.join( self.miscPath, 'order.json'), 'w')
#Extract decrypted eboot
super().extractDecryptedEboot()
print("Extract All.dat")
#Open the eboot
eboot = open( os.path.join( self.miscPath, 'EBOOT_DEC.BIN'), 'rb')
eboot.seek(0x1FF624)
while True:
file_info = struct.unpack('<3I', eboot.read(12))
if(file_info[2] == 0):
break
hash_ = '%08X' % file_info[2]
self.extract_files(file_info[0], file_info[1], hash_)
order['order'].append(hash_)
json.dump(order, order_json, indent = 4)
order_json.close()
def extractDecryptedEboot(self):
super().extractDecryptedEboot()

283
ToolsTOR.py Normal file
View File

@@ -0,0 +1,283 @@
from ToolsTales import ToolsTales
import subprocess
from dicttoxml import dicttoxml
import json
import struct
import shutil
import os
import re
import pandas as pd
import xml.etree.ElementTree as ET
import lxml.etree as etree
import comptolib
from xml.dom import minidom
from pathlib import Path
import string
import io
class ToolsTOR(ToolsTales):
POINTERS_BEGIN = 0xD76B0 # Offset to DAT.BIN pointer list start in SLPS_254.50 file
POINTERS_END = 0xE60C8 # Offset to DAT.BIN pointer list end in SLPS_254.50 file
HIGH_BITS = 0xFFFFFFC0
LOW_BITS = 0x3F
PRINTABLE_CHARS = "".join(
(string.digits, string.ascii_letters, string.punctuation, " ")
)
#Path to used
datBinPath = '../Data/Disc/Original/DAT.BIN'
elfPathExtract = '../Data/Disc/Original/SLPS_254.50'
storyPathArchives= '../Data/Story/SCPK' #Story XML files will be extracted here
storyPathXML = '../Data/Story/XML/' #SCPK will be repacked here
skitPathArchives = '../Data//Skits/' #Skits XML files will be extracted here
datPathExtract = '../Data/DAT/'
allPathInsert = '../Data/Disc/PSP_GAME/USRDIR'
def __init__(self, tbl):
super().__init__("TOR", tbl)
with open("TBL_All.json") as f:
jsonRaw = json.load(f)
self.jsonTblTags ={ k1:{ int(k2,16) if (k1 != "TBL") else k2:v2 for k2,v2 in jsonRaw[k1].items()} for k1,v1 in jsonRaw.items()}
def get_pointers(self):
f = open(self.elfPathExtract , "rb")
f.seek(self.POINTERS_BEGIN, 0)
pointers = []
while f.tell() < self.POINTERS_END:
p = struct.unpack("<L", f.read(4))[0]
pointers.append(p)
f.close()
return pointers
# Extract the story files
def extractAllStory(self):
print("Extracting Story")
self.mkdir( self.storyPathXML)
listFiles = [self.datPathExtract + 'SCPK/' + ele for ele in os.listdir( os.path.join(self.datPathExtract, "SCPK"))]
for scpkFile in listFiles:
self.extractTheirSceXML(scpkFile)
def get_theirsce_from_scpk(self, scpk)->bytes:
header = scpk.read(4)
if header != b"SCPK":
# sys.exit(f"{file} is not a .scpk file!")
raise ValueError("File is not a .scpk file!")
scpk.read(4)
nbFiles = struct.unpack("<L", scpk.read(4))[0]
scpk.read(4)
filesSize = []
for i in range(nbFiles):
filesSize.append(struct.unpack("<L", scpk.read(4))[0])
for i in range(nbFiles):
data = scpk.read(filesSize[i])
if self.is_compressed(data):
data = comptolib.decompress_data(data)
if data[:8] == b"THEIRSCE":
return io.BytesIO(data)
return None
def extraxtStoryPointers(self, theirsce, strings_offset, fsize):
pointers_offset = []
texts_offset = []
while theirsce.tell() < strings_offset:
b = theirsce.read(1)
if b == b"\xF8":
addr = struct.unpack("<H", theirsce.read(2))[0]
if (addr < fsize - strings_offset) and (addr > 0):
# theirsce_data[name].append(theirsce.tell() - 2)
pointers_offset.append(theirsce.tell() - 2)
texts_offset.append(addr + strings_offset)
return pointers_offset, texts_offset
#Convert a bytes object to text using TAGS and TBL in the json file
def bytesToText(self, theirsce):
finalText = ''
TAGS = self.jsonTblTags['TAGS']
b = theirsce.read(1)
while b != b"\x00":
b = ord(b)
if (b >= 0x99 and b <= 0x9F) or (b >= 0xE0 and b <= 0xEB):
c = (b << 8) + ord(theirsce.read(1))
# if str(c) not in json_data.keys():
# json_data[str(c)] = char_index[decode(c)]
try:
finalText += (self.jsonTblTags['TBL'][str(c)])
except KeyError:
b_u = (c >> 8) & 0xff
b_l = c & 0xff
finalText += ("{%02X}" % b_u)
finalText += ("{%02X}" % b_l)
elif b == 0x1:
finalText += ("\n")
elif b in (0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xB, 0xC, 0xD, 0xE, 0xF):
b2 = struct.unpack("<L", theirsce.read(4))[0]
if b in TAGS:
tag_name = TAGS.get(b)
tag_param = None
if (tag_name.upper() + "S") in globals():
tag_param = eval("%sS.get(b2, None)" % tag_name.upper())
if tag_param != None:
finalText += "<%s>" % tag_param
else:
finalText += ("<%s:%08X>" % (tag_name, b2))
else:
finalText += "<%02X:%08X>" % (b, b2)
elif chr(b) in self.PRINTABLE_CHARS:
finalText += chr(b)
elif b >= 0xA1 and b < 0xE0:
finalText += struct.pack("B", b).decode("cp932")
elif b in (0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19):
finalText += "{%02X}" % b
next_b = b""
while next_b != b"\x80":
next_b = theirsce.read(1)
finalText += "{%02X}" % ord(next_b)
elif b == 0x81:
next_b = theirsce.read(1)
if next_b == b"\x40":
finalText += " "
else:
finalText += "{%02X}" % b
finalText += "{%02X}" % ord(next_b)
else:
finalText += "{%02X}" % b
b = theirsce.read(1)
return finalText
# Extract THEIRSCE to XML
def extractTheirSceXML(self, scpkFileName):
#Create the XML file
root = etree.Element('SceneText')
etree.SubElement(root, "OriginalName").text = scpkFileName
stringsNode = etree.SubElement(root, "Strings")
etree.SubElement(stringsNode, "Type").text = "Static"
#Open the SCPK file to grab the THEIRSCE file
with open(scpkFileName, "rb") as scpk:
theirsce = self.get_theirsce_from_scpk(scpk)
#Validate the header
header = theirsce.read(8)
if header != b"THEIRSCE":
raise ValueError("No THEIRSCE header")
#Start of the pointer
pointer_block = struct.unpack("<L", theirsce.read(4))[0]
#Start of the text and baseOffset
strings_offset = struct.unpack("<L", theirsce.read(4))[0]
#File size
fsize = theirsce.getbuffer().nbytes
theirsce.seek(pointer_block, 0) #Go the the start of the pointer section
pointers_offset, texts_offset = self.extraxtStoryPointers(theirsce, strings_offset, fsize)
#Extract the text from each pointers
textList = []
for i in range(len(texts_offset)):
#Extract the text
theirsce.seek(texts_offset[i], 0)
text = self.bytesToText(theirsce)
#Add it to the XML node
entry_node = etree.SubElement(stringsNode, "Entry")
etree.SubElement(entry_node,"PointerOffset").text = str(pointers_offset[i])
etree.SubElement(entry_node,"JapaneseText").text = text
etree.SubElement(entry_node,"EnglishText").text = ''
etree.SubElement(entry_node,"Notes").text = ''
if text == '':
statusText = 'Done'
else:
statusText = 'To Do'
etree.SubElement(entry_node,"Status").text = statusText
#Write the XML file
txt=etree.tostring(root, encoding="UTF-8", pretty_print=True)
with open(os.path.join( self.storyPathXML, self.get_file_name(scpkFileName)+".xml"), "wb") as xmlFile:
xmlFile.write(txt)
# Extract the file DAT.BIn to the different directorties
def extractMainArchive(self):
f = open( self.datBinPath, "rb")
pointers = self.get_pointers()
total_files = len(pointers)
for i in range(total_files - 1):
remainder = pointers[i] & self.LOW_BITS
start = pointers[i] & self.HIGH_BITS
end = (pointers[i + 1] & self.HIGH_BITS) - remainder
f.seek(start, 0)
size = end - start
if size == 0:
# Ignore 0 byte files
continue
data = f.read(size)
file_name = "%05d" % i
if self.is_compressed(data):
c_type = struct.unpack("<b", data[:1])[0]
data = comptolib.decompress_data(data)
extension = self.get_extension(data)
final_path = self.datPathExtract + "/%s/%s.%d.%s" % (
extension.upper(),
file_name,
c_type,
extension,
)
else:
extension = self.get_extension(data)
final_path = self.datPathExtract + "/%s/%s.%s" % (
extension.upper(),
file_name,
extension,
)
folderPath = os.path.join( self.datPathExtract, extension.upper())
self.mkdir( folderPath )
with open(final_path, "wb") as output:
output.write(data)
print("Writing file %05d/%05d..." % (i, total_files), end="\r")
print("Writing file %05d/%05d..." % (i, total_files))
f.close()

334
ToolsTales.py Normal file
View File

@@ -0,0 +1,334 @@
import subprocess
from dicttoxml import dicttoxml
import json
import struct
import shutil
import os
import re
import pandas as pd
import xml.etree.ElementTree as ET
import lxml.etree as etree
from xml.dom import minidom
import re
import collections
import comptolib
import lxml.etree as ET
class ToolsTales:
def __init__(self, gameName, tblFile):
self.gameName = gameName
self.basePath = os.getcwd()
self.miscPath = os.path.join( self.basePath, "../Data/Misc/")
#Load tbl file
tblList = []
with open(r"ToR.tbl", "r", encoding="utf-8") as tblFile:
lines = tblFile.readlines()
tblList = [ [ bytes.fromhex(ele.split("=",1)[0]), ele.split("=",1)[1].replace("\n","")] for ele in lines]
tempDict = dict( tblList)
tblDict = dict()
for k in sorted( tempDict, key=len, reverse=True):
tblDict[k] = tempDict[k]
def mkdir(self, d):
try: os.mkdir(d)
except: pass
##############################
#
# Utility functions
#
##############################
# Compress and Decompress pak files
# action is -d or -c
# fileType : -0, -1 or -3
# basePath is the location of the PAK file you want to compress/decompress
def pakComposerAndComptoe(self, fileName, action, fileType):
#Delete the file if already there
if (action == '-c'):
if os.path.exists(fileName):
os.remove( fileName.replace(".pak{}", fileType[1]))
#Run Pakcomposer with parameters
args = [ "pakcomposer", action, fileName, fileType, "-v", "-u", "-x"]
listFile = subprocess.run(
args
)
def get_extension(self, data):
if data[:4] == b"SCPK":
return "scpk"
if data[:4] == b"TIM2":
return "tm2"
if data[:4] == b"\x7FELF":
return "irx"
if data[:8] == b"IECSsreV":
if data[0x50:0x58] == b"IECSigaV":
return "hd"
elif data[0x30:0x38] == b"IECSidiM":
return "sq"
if data[:16] == b"\x00" * 0x10:
if data[16:18] != b"\x00\x00":
return "bd"
if data[:8] == b"THEIRSCE":
return "theirsce"
if data[:3] == b"MFH":
return "mfh"
if data[:4] == b"EBG\x00":
return "ebg"
if data[:4] == b"anp3":
return "anp3"
if data[:4] == b"EFFE":
return "effe"
# 0x####BD27 is the masked addiu sp,sp,#### mips instruction
# These are overlay files, containing compiled MIPS assembly
if data[2:4] == b"\xBD\x27":
return "ovl"
if data[6:8] == b"\xBD\x27":
return "ovl"
is_pak = self.get_pak_type(data)
if is_pak != None:
return is_pak
if len(data) > 0x400:
size = struct.unpack("<I", data[0x400:0x404])[0]
if len(data) == size + 0x400:
return "tmsk"
# Didn't match anything
return "bin"
def is_compressed(self, data):
if len(data) < 0x09:
return False
expected_size = struct.unpack("<L", data[1:5])[0]
tail_data = abs(len(data) - (expected_size + 9))
if expected_size == len(data) - 9:
return True
elif tail_data <= 0x10 and data[expected_size + 9 :] == b"#" * tail_data:
return True # SCPK files have these trailing "#" bytes :(
return False
def get_pak_type(self,data):
is_aligned = False
if len(data) < 0x8:
return None
files = struct.unpack("<I", data[:4])[0]
first_entry = struct.unpack("<I", data[4:8])[0]
# Expectations
pak1_header_size = 4 + (files * 8)
pakN_header_size = 4 + (files * 4)
# Check for alignment
if first_entry % 0x10 == 0:
is_aligned = True
aligned_pak1_size = pak1_header_size + (0x10 - (pak1_header_size % 0x10))
aligned_pakN_size = pakN_header_size + (0x10 - (pakN_header_size % 0x10))
# First test pak0 (hope there are no aligned pak0 files...)
if len(data) > pakN_header_size:
calculated_size = 0
for i in range(4, (files + 1) * 4, 4):
calculated_size += struct.unpack("<I", data[i : i + 4])[0]
if calculated_size == len(data) - pakN_header_size:
return "pak0"
# Test for pak1 & pak3
if is_aligned:
if aligned_pak1_size == first_entry:
return "pak1"
elif aligned_pakN_size == first_entry:
return "pak3"
else:
if pak1_header_size == first_entry:
return "pak1"
elif pakN_header_size == first_entry:
return "pak3"
# Test for pak2
offset = struct.unpack("<I", data[0:4])[0]
if data[offset:offset+8] == b"THEIRSCE":
return "pak2"
elif data[offset:offset+8] == b"IECSsreV":
return "apak"
# Didn't match anything
return None
def is_compressed(self,data):
if len(data) < 0x09:
return False
expected_size = struct.unpack("<L", data[1:5])[0]
tail_data = abs(len(data) - (expected_size + 9))
if expected_size == len(data) - 9:
return True
elif tail_data <= 0x10 and data[expected_size + 9 :] == b"#" * tail_data:
return True # SCPK files have these trailing "#" bytes :(
return False
def makeCab(self):
print("CAB")
def get_file_name(self, path):
return os.path.splitext(os.path.basename(path))[0]
def findall(self, p, s):
'''Yields all the positions of
the pattern p in the string s.'''
i = s.find(p)
while i != -1:
yield i
i = s.find(p, i+1)
def bytesToText(self, text):
print("Converting")
def findall(p, s):
'''Yields all the positions of
the pattern p in the string s.'''
i = s.find(p)
while i != -1:
yield i
i = s.find(p, i+1)
text = '[Veigue] is a nice guy'
textInitial = text
dictFound = dict()
listFoundPositions = []
listKeys = []
listValues = []
#Loop over all elements of the tbl file
#key is in bytes
#value is the text
tblDict = dict()
for key,value in tblDict.items():
#Look for all the matches
matches = [i for i in findall(value, textInitial) if i not in listFoundPositions]
lenMatches = len(matches)
if lenMatches > 0:
print(value)
text = text.replace(value, '')
lenValue = len(value)
x = [listFoundPositions.extend( list(range(posStart, posStart+lenValue))) for posStart in matches]
listKeys.extend( matches)
listValues.extend( [key] * lenMatches)
if text == "":
break
b''.join([listValues[pos] for pos in sorted( listKeys)])
#############################
#
# Insertion of texts and packing of files
#
#############################
def insertAllMenu(self):
print("Inserting Menus")
def insertStoryFile(fileName):
print("Inserting story file: {}".format(fileName))
def insertAllStory(self):
print("Inserting Story")
def insertAllSkits(self):
print("Inserting Skits")
def insertAll(self):
self.insertAllMenu()
self.insertAllStory()
self.insertAllSkits()
#############################
#
# Extraction of files and unpacking
#
#############################
def extractDecryptedEboot(self):
print("Extracting Eboot")
args = ["deceboot", os.path.join(self.basePath,"../Data/Disc/Original/PSP_GAME/SYSDIR/EBOOT.BIN"), os.path.join("../Data/Misc/EBOOT_DEC.BIN")]
listFile = subprocess.run(
args,
cwd= self.basePath,
)
def extractAllMenu(self):
print("Extracting menu")
def extractAllStory(self):
print("Extracting Story")
def extractAllSkits(self):
print("Extracting Skits")
def extractMainArchive(self):
print("Main Archive")
def unpackGame(self):
self.extractMainArchive()
self.extractAllStory()
self.extractAllSkits()
#Create the final Iso or Folder that will help us run the game translated
def packGame(self):
#Insert the text translated and repack the files at the correct place
self.insertAll()
#

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
comptolib.dll Normal file

Binary file not shown.

104
comptolib.py Normal file
View File

@@ -0,0 +1,104 @@
import ctypes, os, struct
# Error codes
SUCCESS = 0
ERROR_FILE_IN = -1
ERROR_FILE_OUT = -2
ERROR_MALLOC = -3
ERROR_BAD_INPUT = -4
ERROR_UNKNOWN_VERSION = -5
ERROR_FILES_MISMATCH = -6
class ComptoFileInputError(Exception):
pass
class ComptoFileOutputError(Exception):
pass
class ComptoMemoryAllocationError(Exception):
pass
class ComptoBadInputError(Exception):
pass
class ComptoUnknownVersionError(Exception):
pass
class ComptoMismatchedFilesError(Exception):
pass
class ComptoUnknownError(Exception):
pass
def RaiseError(error: int):
if error == SUCCESS:
return
elif error == ERROR_FILE_IN:
raise ComptoFileInputError("Error with input file")
elif error == ERROR_FILE_OUT:
raise ComptoFileOutputError("Error with output file")
elif error == ERROR_MALLOC:
raise ComptoMemoryAllocationError("Malloc failure")
elif error == ERROR_BAD_INPUT:
raise ComptoBadInputError("Bad Input")
elif error == ERROR_UNKNOWN_VERSION:
raise ComptoUnknownVersionError("Unknown version")
elif error == ERROR_FILES_MISMATCH:
raise ComptoMismatchedFilesError("Mismatch")
else:
raise ComptoUnknownError("Unknown error")
comptolib_path = os.path.dirname(os.path.abspath(__file__)) + "/comptolib.dll"
comptolib = ctypes.cdll.LoadLibrary(comptolib_path)
compto_decode = comptolib.Decode
compto_decode.argtypes = ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.POINTER(ctypes.c_uint)
compto_decode.restype = ctypes.c_int
compto_encode = comptolib.Encode
compto_encode.argtypes = ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.POINTER(ctypes.c_uint)
compto_encode.restype = ctypes.c_int
compto_fdecode = comptolib.DecodeFile
compto_fdecode.argtypes = ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ctypes.c_int
compto_fdecode.restype = ctypes.c_int
compto_fencode = comptolib.EncodeFile
compto_fencode.argtypes = ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ctypes.c_int
compto_fencode.restype = ctypes.c_int
def compress_data(input: bytes, raw: bool=False, version: int=3):
input_size = len(input)
output_size = ((input_size * 9) // 8) + 10
output = b"\x00" * output_size
output_size = ctypes.c_uint(output_size)
error = compto_encode(version, input, input_size, output, ctypes.byref(output_size))
RaiseError(error)
if not raw:
output = struct.pack("<b", version) + struct.pack("<2L", *(input_size, output_size.value)) + output[:output_size.value]
return output
def decompress_data(input: bytes, raw: bool=False, version: int=3)->bytes:
if raw:
input_size = len(input)
output_size = input_size * 10
else:
version ,= struct.unpack("<b", input[:1])
input_size, output_size = struct.unpack("<2L", input[1:9])
output = b"\x00" * output_size
input = input[9:]
error = compto_decode(version, input, input_size, output, ctypes.byref(ctypes.c_uint(output_size)))
RaiseError(error)
return output
def compress_file(input: str, output: str, raw: bool=False, version: int=3):
error = compto_fencode(input.encode("utf-8"), output.encode("utf-8"), raw, version)
RaiseError(error)
def decompress_file(input: str, output: str, raw: bool=False, version: int=3):
error = compto_fdecode(input.encode("utf-8"), output.encode("utf-8"), raw, version)
RaiseError(error)