Merge pull request #15 from jsenior10/main

LZX decompression maybe
This commit is contained in:
rjkiv
2026-02-12 19:53:41 -07:00
committed by GitHub
3 changed files with 68 additions and 1 deletions
Generated
+7
View File
@@ -382,6 +382,7 @@ dependencies = [
"indexmap",
"itertools",
"log",
"lzxd",
"memchr",
"memmap2",
"mimalloc",
@@ -801,6 +802,12 @@ version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "lzxd"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b29dffab797218e12e4df08ef5d15ab9efca2504038b1b32b9b32fc844b39c9"
[[package]]
name = "matchers"
version = "0.1.0"
+1
View File
@@ -80,6 +80,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] }
xxhash-rust = { version = "0.8", features = ["xxh3"] }
zerocopy = { version = "0.8", features = ["derive"] }
pdb = "0.8.0"
lzxd = "0.2.6"
[target.'cfg(target_env = "musl")'.dependencies]
mimalloc = "0.1"
+60 -1
View File
@@ -8,6 +8,8 @@ use std::{
};
use anyhow::{anyhow, bail, ensure, Result};
use byteorder::{BigEndian, ReadBytesExt};
use lzxd::Lzxd;
use memchr::memmem;
use num_enum::{IntoPrimitive, TryFromPrimitive};
use object::{
@@ -649,7 +651,64 @@ impl XexInfo {
pe_image = compressed.to_vec();
}
XexCompression::Compressed => {
bail!("This xex is compressed using LZX, which is not currently supported.");
let comp = bff.normal.as_ref().unwrap();
let window_size = comp.window_size as usize;
let lzx_window = lzxd::WindowSize::KB32;
let mut lzxd_state = Lzxd::new(lzx_window);
let mut current_block_size = comp.block_size as usize;
while current_block_size != 0 {
if pos_in + current_block_size > compressed.len() {
bail!("LZX: block needs {} bytes at 0x{:X} but only {} remain", current_block_size, pos_in, compressed.len() - pos_in);
}
let block = &compressed[pos_in..pos_in + current_block_size];
pos_in += current_block_size;
if block.len() < 24 {
bail!("LZX: block too small for header: {} bytes", block.len());
}
let next_block_size = u32::from_be_bytes([
block[0], block[1], block[2], block[3],
]) as usize;
let mut off = 24usize;
while off + 2 <= block.len() {
let chunk_len = u16::from_be_bytes([
block[off], block[off + 1],
]) as usize;
off += 2;
if chunk_len == 0 {
break;
}
if off + chunk_len > block.len() {
bail!("LZX: sub-chunk at offset {} wants {} bytes but only {} remain", off, chunk_len, block.len() - off);
}
let chunk_data = &block[off..off + chunk_len];
off += chunk_len;
let expected = min(window_size, pe_image.len().saturating_sub(pos_out), );
if expected == 0 {
break;
}
let decompressed = lzxd_state.decompress_next(chunk_data, expected).map_err(|e| anyhow::anyhow!(
"LZX: decompress failed at pos_out=0x{:X} \
(chunk_len={}, expected={}, block_off={}): {:?}",
pos_out, chunk_len, expected, off - chunk_len, e))?;
if decompressed.is_empty() {
bail!("LZX: decompression returned zero bytes at pos_out=0x{:X}", pos_out);
}
let copy_len = min(decompressed.len(), pe_image.len() - pos_out);
pe_image[pos_out..pos_out + copy_len]
.copy_from_slice(&decompressed[..copy_len]);
pos_out += copy_len;
}
current_block_size = next_block_size;
}
if pos_out == 0 {
bail!("LZX: produced zero output bytes");
}
//bail!("This xex is compressed using LZX, which is not currently supported.");
// this is actually pretty hard to implement, it involves use of the NormalCompression we retrieved earlier,
// plus the use of microsoft's LZX decompression algorithms
// here are some references if you try to attempt this