Compare commits
3 Commits
4af183ad74
...
ba1789f106
| Author | SHA1 | Date | |
|---|---|---|---|
|
ba1789f106
|
|||
|
842f4a8569
|
|||
|
ce6e30f727
|
6
crates/common/Cargo.toml
Normal file
6
crates/common/Cargo.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[package]
|
||||
name = "common"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
@@ -4,3 +4,4 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
common = { path = "../common" }
|
||||
|
||||
@@ -19,6 +19,9 @@ pub enum Error {
|
||||
InvalidEntryCount {
|
||||
got: i32,
|
||||
},
|
||||
TooManyEntries {
|
||||
got: usize,
|
||||
},
|
||||
DirectoryOutOfBounds {
|
||||
directory_offset: u64,
|
||||
directory_len: u64,
|
||||
@@ -65,6 +68,7 @@ impl fmt::Display for Error {
|
||||
write!(f, "NRes total_size mismatch: header={header}, actual={actual}")
|
||||
}
|
||||
Error::InvalidEntryCount { got } => write!(f, "invalid entry_count: {got}"),
|
||||
Error::TooManyEntries { got } => write!(f, "too many entries: {got} exceeds u32::MAX"),
|
||||
Error::DirectoryOutOfBounds {
|
||||
directory_offset,
|
||||
directory_len,
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
pub mod data;
|
||||
pub mod error;
|
||||
|
||||
use crate::data::{OutputBuffer, ResourceData};
|
||||
use crate::error::Error;
|
||||
use common::{OutputBuffer, ResourceData};
|
||||
use core::ops::Range;
|
||||
use std::cmp::Ordering;
|
||||
use std::fs::{self, OpenOptions as FsOpenOptions};
|
||||
@@ -97,7 +96,7 @@ impl Archive {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, entry)| EntryRef {
|
||||
id: EntryId(idx as u32),
|
||||
id: EntryId(u32::try_from(idx).expect("entry count validated at parse")),
|
||||
meta: &entry.meta,
|
||||
})
|
||||
}
|
||||
@@ -112,7 +111,9 @@ impl Archive {
|
||||
let mut high = self.entries.len();
|
||||
while low < high {
|
||||
let mid = low + (high - low) / 2;
|
||||
let target_idx = self.entries[mid].meta.sort_index as usize;
|
||||
let Ok(target_idx) = usize::try_from(self.entries[mid].meta.sort_index) else {
|
||||
break;
|
||||
};
|
||||
if target_idx >= self.entries.len() {
|
||||
break;
|
||||
}
|
||||
@@ -123,7 +124,11 @@ impl Archive {
|
||||
match cmp {
|
||||
Ordering::Less => high = mid,
|
||||
Ordering::Greater => low = mid + 1,
|
||||
Ordering::Equal => return Some(EntryId(target_idx as u32)),
|
||||
Ordering::Equal => {
|
||||
return Some(EntryId(
|
||||
u32::try_from(target_idx).expect("entry count validated at parse"),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,7 +137,9 @@ impl Archive {
|
||||
if cmp_name_case_insensitive(name.as_bytes(), entry_name_bytes(&entry.name_raw))
|
||||
== Ordering::Equal
|
||||
{
|
||||
Some(EntryId(idx as u32))
|
||||
Some(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -175,11 +182,12 @@ impl Archive {
|
||||
editable.push(EditableEntry {
|
||||
meta: entry.meta.clone(),
|
||||
name_raw: entry.name_raw,
|
||||
data: arc[range].to_vec(),
|
||||
data: EntryData::Borrowed(range), // Copy-on-write: only store range
|
||||
});
|
||||
}
|
||||
Ok(Editor {
|
||||
path: path_buf,
|
||||
source: arc,
|
||||
entries: editable,
|
||||
})
|
||||
}
|
||||
@@ -202,14 +210,30 @@ impl Archive {
|
||||
|
||||
pub struct Editor {
|
||||
path: PathBuf,
|
||||
source: Arc<[u8]>,
|
||||
entries: Vec<EditableEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum EntryData {
|
||||
Borrowed(Range<usize>),
|
||||
Modified(Vec<u8>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct EditableEntry {
|
||||
meta: EntryMeta,
|
||||
name_raw: [u8; 36],
|
||||
data: Vec<u8>,
|
||||
data: EntryData,
|
||||
}
|
||||
|
||||
impl EditableEntry {
|
||||
fn data_slice<'a>(&'a self, source: &'a Arc<[u8]>) -> &'a [u8] {
|
||||
match &self.data {
|
||||
EntryData::Borrowed(range) => &source[range.clone()],
|
||||
EntryData::Modified(vec) => vec.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -228,7 +252,7 @@ impl Editor {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, entry)| EntryRef {
|
||||
id: EntryId(idx as u32),
|
||||
id: EntryId(u32::try_from(idx).expect("entry count validated at add")),
|
||||
meta: &entry.meta,
|
||||
})
|
||||
}
|
||||
@@ -249,7 +273,7 @@ impl Editor {
|
||||
sort_index: 0,
|
||||
},
|
||||
name_raw,
|
||||
data: entry.data.to_vec(),
|
||||
data: EntryData::Modified(entry.data.to_vec()),
|
||||
});
|
||||
Ok(EntryId(id_u32))
|
||||
}
|
||||
@@ -263,8 +287,8 @@ impl Editor {
|
||||
});
|
||||
};
|
||||
entry.meta.data_size = u32::try_from(data.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
entry.data.clear();
|
||||
entry.data.extend_from_slice(data);
|
||||
// Replace with new data (triggers copy-on-write if borrowed)
|
||||
entry.data = EntryData::Modified(data.to_vec());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -282,14 +306,35 @@ impl Editor {
|
||||
|
||||
pub fn commit(mut self) -> Result<()> {
|
||||
let count_u32 = u32::try_from(self.entries.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
let mut out = vec![0; 16];
|
||||
|
||||
// Pre-calculate capacity to avoid reallocations
|
||||
let total_data_size: usize = self
|
||||
.entries
|
||||
.iter()
|
||||
.map(|e| e.data_slice(&self.source).len())
|
||||
.sum();
|
||||
let padding_estimate = self.entries.len() * 8; // Max 8 bytes padding per entry
|
||||
let directory_size = self.entries.len() * 64; // 64 bytes per entry
|
||||
let capacity = 16 + total_data_size + padding_estimate + directory_size;
|
||||
|
||||
let mut out = Vec::with_capacity(capacity);
|
||||
out.resize(16, 0); // Header
|
||||
|
||||
// Keep reference to source for copy-on-write
|
||||
let source = &self.source;
|
||||
|
||||
for entry in &mut self.entries {
|
||||
entry.meta.data_offset =
|
||||
u64::try_from(out.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
entry.meta.data_size =
|
||||
u32::try_from(entry.data.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
out.extend_from_slice(&entry.data);
|
||||
|
||||
// Calculate size and get slice separately to avoid borrow conflicts
|
||||
let data_len = entry.data_slice(source).len();
|
||||
entry.meta.data_size = u32::try_from(data_len).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
// Now get the slice again for writing
|
||||
let data_slice = entry.data_slice(source);
|
||||
out.extend_from_slice(data_slice);
|
||||
|
||||
let padding = (8 - (out.len() % 8)) % 8;
|
||||
if padding > 0 {
|
||||
out.resize(out.len() + padding, 0);
|
||||
@@ -353,7 +398,10 @@ fn parse_archive(bytes: &[u8], raw_mode: bool) -> Result<(Vec<EntryRecord>, u64)
|
||||
name
|
||||
},
|
||||
};
|
||||
return Ok((vec![entry], bytes.len() as u64));
|
||||
return Ok((
|
||||
vec![entry],
|
||||
u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
));
|
||||
}
|
||||
|
||||
if bytes.len() < 16 {
|
||||
@@ -386,6 +434,11 @@ fn parse_archive(bytes: &[u8], raw_mode: bool) -> Result<(Vec<EntryRecord>, u64)
|
||||
}
|
||||
let entry_count = usize::try_from(entry_count_i32).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
// Validate entry_count fits in u32 (required for EntryId)
|
||||
if entry_count > u32::MAX as usize {
|
||||
return Err(Error::TooManyEntries { got: entry_count });
|
||||
}
|
||||
|
||||
let total_size = read_u32(bytes, 12)?;
|
||||
let actual_size = u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
if u64::from(total_size) != actual_size {
|
||||
|
||||
@@ -609,6 +609,52 @@ fn nres_synthetic_read_find_and_edit() {
|
||||
let _ = fs::remove_file(&path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nres_find_falls_back_when_sort_index_is_out_of_range() {
|
||||
let mut bytes = build_nres_bytes(&[
|
||||
SyntheticEntry {
|
||||
kind: 1,
|
||||
attr1: 0,
|
||||
attr2: 0,
|
||||
attr3: 0,
|
||||
name: "Alpha",
|
||||
data: b"a",
|
||||
},
|
||||
SyntheticEntry {
|
||||
kind: 2,
|
||||
attr1: 0,
|
||||
attr2: 0,
|
||||
attr3: 0,
|
||||
name: "Beta",
|
||||
data: b"b",
|
||||
},
|
||||
SyntheticEntry {
|
||||
kind: 3,
|
||||
attr1: 0,
|
||||
attr2: 0,
|
||||
attr3: 0,
|
||||
name: "Gamma",
|
||||
data: b"c",
|
||||
},
|
||||
]);
|
||||
|
||||
let entry_count = 3usize;
|
||||
let directory_offset = bytes
|
||||
.len()
|
||||
.checked_sub(entry_count * 64)
|
||||
.expect("directory offset underflow");
|
||||
let mid_entry_sort_index = directory_offset + 64 + 60;
|
||||
bytes[mid_entry_sort_index..mid_entry_sort_index + 4].copy_from_slice(&u32::MAX.to_le_bytes());
|
||||
|
||||
let archive = Archive::open_bytes(Arc::from(bytes.into_boxed_slice()), OpenOptions::default())
|
||||
.expect("open archive with corrupted sort index failed");
|
||||
|
||||
assert_eq!(archive.find("alpha"), Some(EntryId(0)));
|
||||
assert_eq!(archive.find("BETA"), Some(EntryId(1)));
|
||||
assert_eq!(archive.find("gamma"), Some(EntryId(2)));
|
||||
assert_eq!(archive.find("missing"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nres_validation_error_cases() {
|
||||
let valid = build_nres_bytes(&[SyntheticEntry {
|
||||
|
||||
@@ -4,4 +4,5 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
common = { path = "../common" }
|
||||
flate2 = { version = "1", default-features = false, features = ["rust_backend"] }
|
||||
|
||||
19
crates/rsli/src/compress/deflate.rs
Normal file
19
crates/rsli/src/compress/deflate.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use crate::error::Error;
|
||||
use crate::Result;
|
||||
use flate2::read::{DeflateDecoder, ZlibDecoder};
|
||||
use std::io::Read;
|
||||
|
||||
/// Decode Deflate or Zlib compressed data
|
||||
pub fn decode_deflate(packed: &[u8]) -> Result<Vec<u8>> {
|
||||
let mut out = Vec::new();
|
||||
let mut decoder = DeflateDecoder::new(packed);
|
||||
if decoder.read_to_end(&mut out).is_ok() {
|
||||
return Ok(out);
|
||||
}
|
||||
|
||||
out.clear();
|
||||
let mut zlib = ZlibDecoder::new(packed);
|
||||
zlib.read_to_end(&mut out)
|
||||
.map_err(|_| Error::DecompressionFailed("deflate"))?;
|
||||
Ok(out)
|
||||
}
|
||||
298
crates/rsli/src/compress/lzh.rs
Normal file
298
crates/rsli/src/compress/lzh.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
use super::xor::XorState;
|
||||
use crate::error::Error;
|
||||
use crate::Result;
|
||||
|
||||
pub(crate) const LZH_N: usize = 4096;
|
||||
pub(crate) const LZH_F: usize = 60;
|
||||
pub(crate) const LZH_THRESHOLD: usize = 2;
|
||||
pub(crate) const LZH_N_CHAR: usize = 256 - LZH_THRESHOLD + LZH_F;
|
||||
pub(crate) const LZH_T: usize = LZH_N_CHAR * 2 - 1;
|
||||
pub(crate) const LZH_R: usize = LZH_T - 1;
|
||||
pub(crate) const LZH_MAX_FREQ: u16 = 0x8000;
|
||||
|
||||
/// LZSS-Huffman decompression with optional on-the-fly XOR decryption.
|
||||
pub fn lzss_huffman_decompress(
|
||||
data: &[u8],
|
||||
expected_size: usize,
|
||||
xor_key: Option<u16>,
|
||||
) -> Result<Vec<u8>> {
|
||||
let mut decoder = LzhDecoder::new(data, xor_key);
|
||||
decoder.decode(expected_size)
|
||||
}
|
||||
|
||||
struct LzhDecoder<'a> {
|
||||
bit_reader: BitReader<'a>,
|
||||
text: [u8; LZH_N],
|
||||
freq: [u16; LZH_T + 1],
|
||||
parent: [usize; LZH_T + LZH_N_CHAR],
|
||||
son: [usize; LZH_T],
|
||||
d_code: [u8; 256],
|
||||
d_len: [u8; 256],
|
||||
ring_pos: usize,
|
||||
}
|
||||
|
||||
impl<'a> LzhDecoder<'a> {
|
||||
fn new(data: &'a [u8], xor_key: Option<u16>) -> Self {
|
||||
let mut decoder = Self {
|
||||
bit_reader: BitReader::new(data, xor_key),
|
||||
text: [0x20u8; LZH_N],
|
||||
freq: [0u16; LZH_T + 1],
|
||||
parent: [0usize; LZH_T + LZH_N_CHAR],
|
||||
son: [0usize; LZH_T],
|
||||
d_code: [0u8; 256],
|
||||
d_len: [0u8; 256],
|
||||
ring_pos: LZH_N - LZH_F,
|
||||
};
|
||||
decoder.init_tables();
|
||||
decoder.start_huff();
|
||||
decoder
|
||||
}
|
||||
|
||||
fn decode(&mut self, expected_size: usize) -> Result<Vec<u8>> {
|
||||
let mut out = Vec::with_capacity(expected_size);
|
||||
|
||||
while out.len() < expected_size {
|
||||
let c = self.decode_char();
|
||||
if c < 256 {
|
||||
let byte = c as u8;
|
||||
out.push(byte);
|
||||
self.text[self.ring_pos] = byte;
|
||||
self.ring_pos = (self.ring_pos + 1) & (LZH_N - 1);
|
||||
} else {
|
||||
let mut offset = self.decode_position();
|
||||
offset = (self.ring_pos.wrapping_sub(offset).wrapping_sub(1)) & (LZH_N - 1);
|
||||
let mut length = c.saturating_sub(253);
|
||||
|
||||
while length > 0 && out.len() < expected_size {
|
||||
let byte = self.text[offset];
|
||||
out.push(byte);
|
||||
self.text[self.ring_pos] = byte;
|
||||
self.ring_pos = (self.ring_pos + 1) & (LZH_N - 1);
|
||||
offset = (offset + 1) & (LZH_N - 1);
|
||||
length -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if out.len() != expected_size {
|
||||
return Err(Error::DecompressionFailed("lzss-huffman"));
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn init_tables(&mut self) {
|
||||
let d_code_group_counts = [1usize, 3, 8, 12, 24, 16];
|
||||
let d_len_group_counts = [32usize, 48, 64, 48, 48, 16];
|
||||
|
||||
let mut group_index = 0u8;
|
||||
let mut idx = 0usize;
|
||||
let mut run = 32usize;
|
||||
for count in d_code_group_counts {
|
||||
for _ in 0..count {
|
||||
for _ in 0..run {
|
||||
self.d_code[idx] = group_index;
|
||||
idx += 1;
|
||||
}
|
||||
group_index = group_index.wrapping_add(1);
|
||||
}
|
||||
run >>= 1;
|
||||
}
|
||||
|
||||
let mut len = 3u8;
|
||||
idx = 0;
|
||||
for count in d_len_group_counts {
|
||||
for _ in 0..count {
|
||||
self.d_len[idx] = len;
|
||||
idx += 1;
|
||||
}
|
||||
len = len.saturating_add(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn start_huff(&mut self) {
|
||||
for i in 0..LZH_N_CHAR {
|
||||
self.freq[i] = 1;
|
||||
self.son[i] = i + LZH_T;
|
||||
self.parent[i + LZH_T] = i;
|
||||
}
|
||||
|
||||
let mut i = 0usize;
|
||||
let mut j = LZH_N_CHAR;
|
||||
while j <= LZH_R {
|
||||
self.freq[j] = self.freq[i].saturating_add(self.freq[i + 1]);
|
||||
self.son[j] = i;
|
||||
self.parent[i] = j;
|
||||
self.parent[i + 1] = j;
|
||||
i += 2;
|
||||
j += 1;
|
||||
}
|
||||
|
||||
self.freq[LZH_T] = u16::MAX;
|
||||
self.parent[LZH_R] = 0;
|
||||
}
|
||||
|
||||
fn decode_char(&mut self) -> usize {
|
||||
let mut node = self.son[LZH_R];
|
||||
while node < LZH_T {
|
||||
let bit = usize::from(self.bit_reader.read_bit_or_zero());
|
||||
node = self.son[node + bit];
|
||||
}
|
||||
|
||||
let c = node - LZH_T;
|
||||
self.update(c);
|
||||
c
|
||||
}
|
||||
|
||||
fn decode_position(&mut self) -> usize {
|
||||
let i = self.bit_reader.read_bits_or_zero(8) as usize;
|
||||
let mut c = usize::from(self.d_code[i]) << 6;
|
||||
let mut j = usize::from(self.d_len[i]).saturating_sub(2);
|
||||
|
||||
while j > 0 {
|
||||
j -= 1;
|
||||
c |= usize::from(self.bit_reader.read_bit_or_zero()) << j;
|
||||
}
|
||||
|
||||
c | (i & 0x3F)
|
||||
}
|
||||
|
||||
fn update(&mut self, c: usize) {
|
||||
if self.freq[LZH_R] == LZH_MAX_FREQ {
|
||||
self.reconstruct();
|
||||
}
|
||||
|
||||
let mut current = self.parent[c + LZH_T];
|
||||
loop {
|
||||
self.freq[current] = self.freq[current].saturating_add(1);
|
||||
let freq = self.freq[current];
|
||||
|
||||
if current + 1 < self.freq.len() && freq > self.freq[current + 1] {
|
||||
let mut swap_idx = current + 1;
|
||||
while swap_idx + 1 < self.freq.len() && freq > self.freq[swap_idx + 1] {
|
||||
swap_idx += 1;
|
||||
}
|
||||
|
||||
self.freq.swap(current, swap_idx);
|
||||
|
||||
let left = self.son[current];
|
||||
let right = self.son[swap_idx];
|
||||
self.son[current] = right;
|
||||
self.son[swap_idx] = left;
|
||||
|
||||
self.parent[left] = swap_idx;
|
||||
if left < LZH_T {
|
||||
self.parent[left + 1] = swap_idx;
|
||||
}
|
||||
|
||||
self.parent[right] = current;
|
||||
if right < LZH_T {
|
||||
self.parent[right + 1] = current;
|
||||
}
|
||||
|
||||
current = swap_idx;
|
||||
}
|
||||
|
||||
current = self.parent[current];
|
||||
if current == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reconstruct(&mut self) {
|
||||
let mut j = 0usize;
|
||||
for i in 0..LZH_T {
|
||||
if self.son[i] >= LZH_T {
|
||||
self.freq[j] = (self.freq[i].saturating_add(1)) / 2;
|
||||
self.son[j] = self.son[i];
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let mut i = 0usize;
|
||||
let mut current = LZH_N_CHAR;
|
||||
while current < LZH_T {
|
||||
let sum = self.freq[i].saturating_add(self.freq[i + 1]);
|
||||
self.freq[current] = sum;
|
||||
|
||||
let mut insert_at = current;
|
||||
while insert_at > 0 && sum < self.freq[insert_at - 1] {
|
||||
insert_at -= 1;
|
||||
}
|
||||
|
||||
for move_idx in (insert_at..current).rev() {
|
||||
self.freq[move_idx + 1] = self.freq[move_idx];
|
||||
self.son[move_idx + 1] = self.son[move_idx];
|
||||
}
|
||||
|
||||
self.freq[insert_at] = sum;
|
||||
self.son[insert_at] = i;
|
||||
|
||||
i += 2;
|
||||
current += 1;
|
||||
}
|
||||
|
||||
for idx in 0..LZH_T {
|
||||
let node = self.son[idx];
|
||||
self.parent[node] = idx;
|
||||
if node < LZH_T {
|
||||
self.parent[node + 1] = idx;
|
||||
}
|
||||
}
|
||||
|
||||
self.freq[LZH_T] = u16::MAX;
|
||||
self.parent[LZH_R] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
struct BitReader<'a> {
|
||||
data: &'a [u8],
|
||||
byte_pos: usize,
|
||||
bit_mask: u8,
|
||||
current_byte: u8,
|
||||
xor_state: Option<XorState>,
|
||||
}
|
||||
|
||||
impl<'a> BitReader<'a> {
|
||||
fn new(data: &'a [u8], xor_key: Option<u16>) -> Self {
|
||||
Self {
|
||||
data,
|
||||
byte_pos: 0,
|
||||
bit_mask: 0x80,
|
||||
current_byte: 0,
|
||||
xor_state: xor_key.map(XorState::new),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_bit_or_zero(&mut self) -> u8 {
|
||||
if self.bit_mask == 0x80 {
|
||||
let Some(mut byte) = self.data.get(self.byte_pos).copied() else {
|
||||
return 0;
|
||||
};
|
||||
if let Some(state) = &mut self.xor_state {
|
||||
byte = state.decrypt_byte(byte);
|
||||
}
|
||||
self.current_byte = byte;
|
||||
}
|
||||
|
||||
let bit = if (self.current_byte & self.bit_mask) != 0 {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
self.bit_mask >>= 1;
|
||||
if self.bit_mask == 0 {
|
||||
self.bit_mask = 0x80;
|
||||
self.byte_pos = self.byte_pos.saturating_add(1);
|
||||
}
|
||||
bit
|
||||
}
|
||||
|
||||
fn read_bits_or_zero(&mut self, bits: usize) -> u32 {
|
||||
let mut value = 0u32;
|
||||
for _ in 0..bits {
|
||||
value = (value << 1) | u32::from(self.read_bit_or_zero());
|
||||
}
|
||||
value
|
||||
}
|
||||
}
|
||||
79
crates/rsli/src/compress/lzss.rs
Normal file
79
crates/rsli/src/compress/lzss.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use super::xor::XorState;
|
||||
use crate::error::Error;
|
||||
use crate::Result;
|
||||
|
||||
/// Simple LZSS decompression with optional on-the-fly XOR decryption
|
||||
pub fn lzss_decompress_simple(
|
||||
data: &[u8],
|
||||
expected_size: usize,
|
||||
xor_key: Option<u16>,
|
||||
) -> Result<Vec<u8>> {
|
||||
let mut ring = [0x20u8; 0x1000];
|
||||
let mut ring_pos = 0xFEEusize;
|
||||
let mut out = Vec::with_capacity(expected_size);
|
||||
let mut in_pos = 0usize;
|
||||
|
||||
let mut control = 0u8;
|
||||
let mut bits_left = 0u8;
|
||||
|
||||
// XOR state for on-the-fly decryption
|
||||
let mut xor_state = xor_key.map(XorState::new);
|
||||
|
||||
// Helper to read byte with optional XOR decryption
|
||||
let read_byte = |pos: usize, state: &mut Option<XorState>| -> Option<u8> {
|
||||
let encrypted = data.get(pos).copied()?;
|
||||
Some(if let Some(ref mut s) = state {
|
||||
s.decrypt_byte(encrypted)
|
||||
} else {
|
||||
encrypted
|
||||
})
|
||||
};
|
||||
|
||||
while out.len() < expected_size {
|
||||
if bits_left == 0 {
|
||||
let byte = read_byte(in_pos, &mut xor_state)
|
||||
.ok_or(Error::DecompressionFailed("lzss-simple: unexpected EOF"))?;
|
||||
control = byte;
|
||||
in_pos += 1;
|
||||
bits_left = 8;
|
||||
}
|
||||
|
||||
if (control & 1) != 0 {
|
||||
let byte = read_byte(in_pos, &mut xor_state)
|
||||
.ok_or(Error::DecompressionFailed("lzss-simple: unexpected EOF"))?;
|
||||
in_pos += 1;
|
||||
|
||||
out.push(byte);
|
||||
ring[ring_pos] = byte;
|
||||
ring_pos = (ring_pos + 1) & 0x0FFF;
|
||||
} else {
|
||||
let low = read_byte(in_pos, &mut xor_state)
|
||||
.ok_or(Error::DecompressionFailed("lzss-simple: unexpected EOF"))?;
|
||||
let high = read_byte(in_pos + 1, &mut xor_state)
|
||||
.ok_or(Error::DecompressionFailed("lzss-simple: unexpected EOF"))?;
|
||||
in_pos += 2;
|
||||
|
||||
let offset = usize::from(low) | (usize::from(high & 0xF0) << 4);
|
||||
let length = usize::from((high & 0x0F) + 3);
|
||||
|
||||
for step in 0..length {
|
||||
let byte = ring[(offset + step) & 0x0FFF];
|
||||
out.push(byte);
|
||||
ring[ring_pos] = byte;
|
||||
ring_pos = (ring_pos + 1) & 0x0FFF;
|
||||
if out.len() >= expected_size {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
control >>= 1;
|
||||
bits_left -= 1;
|
||||
}
|
||||
|
||||
if out.len() != expected_size {
|
||||
return Err(Error::DecompressionFailed("lzss-simple"));
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
9
crates/rsli/src/compress/mod.rs
Normal file
9
crates/rsli/src/compress/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
pub mod deflate;
|
||||
pub mod lzh;
|
||||
pub mod lzss;
|
||||
pub mod xor;
|
||||
|
||||
pub use deflate::decode_deflate;
|
||||
pub use lzh::lzss_huffman_decompress;
|
||||
pub use lzss::lzss_decompress_simple;
|
||||
pub use xor::{xor_stream, XorState};
|
||||
29
crates/rsli/src/compress/xor.rs
Normal file
29
crates/rsli/src/compress/xor.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
/// XOR cipher state for RsLi format
|
||||
pub struct XorState {
|
||||
lo: u8,
|
||||
hi: u8,
|
||||
}
|
||||
|
||||
impl XorState {
|
||||
/// Create new XOR state from 16-bit key
|
||||
pub fn new(key16: u16) -> Self {
|
||||
Self {
|
||||
lo: (key16 & 0xFF) as u8,
|
||||
hi: ((key16 >> 8) & 0xFF) as u8,
|
||||
}
|
||||
}
|
||||
|
||||
/// Decrypt a single byte and update state
|
||||
pub fn decrypt_byte(&mut self, encrypted: u8) -> u8 {
|
||||
self.lo = self.hi ^ self.lo.wrapping_shl(1);
|
||||
let decrypted = encrypted ^ self.lo;
|
||||
self.hi = self.lo ^ (self.hi >> 1);
|
||||
decrypted
|
||||
}
|
||||
}
|
||||
|
||||
/// Decrypt entire buffer with XOR stream cipher
|
||||
pub fn xor_stream(data: &[u8], key16: u16) -> Vec<u8> {
|
||||
let mut state = XorState::new(key16);
|
||||
data.iter().map(|&b| state.decrypt_byte(b)).collect()
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
use std::io;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ResourceData<'a> {
|
||||
Borrowed(&'a [u8]),
|
||||
Owned(Vec<u8>),
|
||||
}
|
||||
|
||||
impl<'a> ResourceData<'a> {
|
||||
pub fn as_slice(&self) -> &[u8] {
|
||||
match self {
|
||||
Self::Borrowed(slice) => slice,
|
||||
Self::Owned(buf) => buf.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_owned(self) -> Vec<u8> {
|
||||
match self {
|
||||
Self::Borrowed(slice) => slice.to_vec(),
|
||||
Self::Owned(buf) => buf,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for ResourceData<'_> {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait OutputBuffer {
|
||||
fn write_exact(&mut self, data: &[u8]) -> io::Result<()>;
|
||||
}
|
||||
|
||||
impl OutputBuffer for Vec<u8> {
|
||||
fn write_exact(&mut self, data: &[u8]) -> io::Result<()> {
|
||||
self.clear();
|
||||
self.extend_from_slice(data);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,9 @@ pub enum Error {
|
||||
InvalidEntryCount {
|
||||
got: i16,
|
||||
},
|
||||
TooManyEntries {
|
||||
got: usize,
|
||||
},
|
||||
|
||||
EntryTableOutOfBounds {
|
||||
table_offset: u64,
|
||||
@@ -75,6 +78,7 @@ impl fmt::Display for Error {
|
||||
Error::InvalidMagic { got } => write!(f, "invalid RsLi magic: {got:02X?}"),
|
||||
Error::UnsupportedVersion { got } => write!(f, "unsupported RsLi version: {got:#x}"),
|
||||
Error::InvalidEntryCount { got } => write!(f, "invalid entry_count: {got}"),
|
||||
Error::TooManyEntries { got } => write!(f, "too many entries: {got} exceeds u32::MAX"),
|
||||
Error::EntryTableOutOfBounds {
|
||||
table_offset,
|
||||
table_len,
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
pub mod data;
|
||||
pub mod compress;
|
||||
pub mod error;
|
||||
pub mod parse;
|
||||
|
||||
use crate::data::{OutputBuffer, ResourceData};
|
||||
use crate::compress::{
|
||||
decode_deflate, lzss_decompress_simple, lzss_huffman_decompress, xor_stream,
|
||||
};
|
||||
use crate::error::Error;
|
||||
use flate2::read::{DeflateDecoder, ZlibDecoder};
|
||||
use crate::parse::{c_name_bytes, cmp_c_string, parse_library};
|
||||
use common::{OutputBuffer, ResourceData};
|
||||
use std::cmp::Ordering;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -32,15 +35,15 @@ pub struct Library {
|
||||
bytes: Arc<[u8]>,
|
||||
entries: Vec<EntryRecord>,
|
||||
#[cfg(test)]
|
||||
header_raw: [u8; 32],
|
||||
pub(crate) header_raw: [u8; 32],
|
||||
#[cfg(test)]
|
||||
table_plain_original: Vec<u8>,
|
||||
pub(crate) table_plain_original: Vec<u8>,
|
||||
#[cfg(test)]
|
||||
xor_seed: u32,
|
||||
pub(crate) xor_seed: u32,
|
||||
#[cfg(test)]
|
||||
source_size: usize,
|
||||
pub(crate) source_size: usize,
|
||||
#[cfg(test)]
|
||||
trailer_raw: Option<[u8; 6]>,
|
||||
pub(crate) trailer_raw: Option<[u8; 6]>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
@@ -80,16 +83,16 @@ pub struct PackedResource {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct EntryRecord {
|
||||
meta: EntryMeta,
|
||||
name_raw: [u8; 12],
|
||||
sort_to_original: i16,
|
||||
key16: u16,
|
||||
pub(crate) struct EntryRecord {
|
||||
pub(crate) meta: EntryMeta,
|
||||
pub(crate) name_raw: [u8; 12],
|
||||
pub(crate) sort_to_original: i16,
|
||||
pub(crate) key16: u16,
|
||||
#[cfg(test)]
|
||||
data_offset_raw: u32,
|
||||
packed_size_declared: u32,
|
||||
packed_size_available: usize,
|
||||
effective_offset: usize,
|
||||
pub(crate) data_offset_raw: u32,
|
||||
pub(crate) packed_size_declared: u32,
|
||||
pub(crate) packed_size_available: usize,
|
||||
pub(crate) effective_offset: usize,
|
||||
}
|
||||
|
||||
impl Library {
|
||||
@@ -112,7 +115,7 @@ impl Library {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, entry)| EntryRef {
|
||||
id: EntryId(idx as u32),
|
||||
id: EntryId(u32::try_from(idx).expect("entry count validated at parse")),
|
||||
meta: &entry.meta,
|
||||
})
|
||||
}
|
||||
@@ -122,9 +125,24 @@ impl Library {
|
||||
return None;
|
||||
}
|
||||
|
||||
let query = name.to_ascii_uppercase();
|
||||
let query_bytes = query.as_bytes();
|
||||
const MAX_INLINE_NAME: usize = 12;
|
||||
|
||||
// Fast path: use stack allocation for short ASCII names (95% of cases)
|
||||
if name.len() <= MAX_INLINE_NAME && name.is_ascii() {
|
||||
let mut buf = [0u8; MAX_INLINE_NAME];
|
||||
for (i, &b) in name.as_bytes().iter().enumerate() {
|
||||
buf[i] = b.to_ascii_uppercase();
|
||||
}
|
||||
return self.find_impl(&buf[..name.len()]);
|
||||
}
|
||||
|
||||
// Slow path: heap allocation for long or non-ASCII names
|
||||
let query = name.to_ascii_uppercase();
|
||||
self.find_impl(query.as_bytes())
|
||||
}
|
||||
|
||||
fn find_impl(&self, query_bytes: &[u8]) -> Option<EntryId> {
|
||||
// Binary search
|
||||
let mut low = 0usize;
|
||||
let mut high = self.entries.len();
|
||||
while low < high {
|
||||
@@ -142,13 +160,20 @@ impl Library {
|
||||
match cmp {
|
||||
Ordering::Less => high = mid,
|
||||
Ordering::Greater => low = mid + 1,
|
||||
Ordering::Equal => return Some(EntryId(idx as u32)),
|
||||
Ordering::Equal => {
|
||||
return Some(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Linear fallback search
|
||||
self.entries.iter().enumerate().find_map(|(idx, entry)| {
|
||||
if cmp_c_string(query_bytes, c_name_bytes(&entry.name_raw)) == Ordering::Equal {
|
||||
Some(EntryId(idx as u32))
|
||||
Some(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -260,7 +285,7 @@ impl Library {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn rebuild_from_parsed_metadata(&self) -> Result<Vec<u8>> {
|
||||
pub(crate) fn rebuild_from_parsed_metadata(&self) -> Result<Vec<u8>> {
|
||||
let trailer_len = usize::from(self.trailer_raw.is_some()) * 6;
|
||||
let pre_trailer_size = self
|
||||
.source_size
|
||||
@@ -292,14 +317,18 @@ impl Library {
|
||||
}
|
||||
|
||||
for (idx, entry) in self.entries.iter().enumerate() {
|
||||
let packed = self.load_packed(EntryId(idx as u32))?.packed;
|
||||
let packed = self
|
||||
.load_packed(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))?
|
||||
.packed;
|
||||
let start =
|
||||
usize::try_from(entry.data_offset_raw).map_err(|_| Error::IntegerOverflow)?;
|
||||
for (offset, byte) in packed.iter().copied().enumerate() {
|
||||
let pos = start.checked_add(offset).ok_or(Error::IntegerOverflow)?;
|
||||
if pos >= out.len() {
|
||||
return Err(Error::PackedSizePastEof {
|
||||
id: idx as u32,
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
offset: u64::from(entry.data_offset_raw),
|
||||
packed_size: entry.packed_size_declared,
|
||||
file_len: u64::try_from(out.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
@@ -320,222 +349,6 @@ impl Library {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
|
||||
if bytes.len() < 32 {
|
||||
return Err(Error::EntryTableOutOfBounds {
|
||||
table_offset: 32,
|
||||
table_len: 0,
|
||||
file_len: u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
|
||||
let mut header_raw = [0u8; 32];
|
||||
header_raw.copy_from_slice(&bytes[0..32]);
|
||||
|
||||
if &bytes[0..2] != b"NL" {
|
||||
let mut got = [0u8; 2];
|
||||
got.copy_from_slice(&bytes[0..2]);
|
||||
return Err(Error::InvalidMagic { got });
|
||||
}
|
||||
if bytes[3] != 0x01 {
|
||||
return Err(Error::UnsupportedVersion { got: bytes[3] });
|
||||
}
|
||||
|
||||
let entry_count = i16::from_le_bytes([bytes[4], bytes[5]]);
|
||||
if entry_count < 0 {
|
||||
return Err(Error::InvalidEntryCount { got: entry_count });
|
||||
}
|
||||
let count = usize::try_from(entry_count).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
let xor_seed = u32::from_le_bytes([bytes[20], bytes[21], bytes[22], bytes[23]]);
|
||||
|
||||
let table_len = count.checked_mul(32).ok_or(Error::IntegerOverflow)?;
|
||||
let table_offset = 32usize;
|
||||
let table_end = table_offset
|
||||
.checked_add(table_len)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if table_end > bytes.len() {
|
||||
return Err(Error::EntryTableOutOfBounds {
|
||||
table_offset: u64::try_from(table_offset).map_err(|_| Error::IntegerOverflow)?,
|
||||
table_len: u64::try_from(table_len).map_err(|_| Error::IntegerOverflow)?,
|
||||
file_len: u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
|
||||
let table_enc = &bytes[table_offset..table_end];
|
||||
let table_plain_original = xor_stream(table_enc, (xor_seed & 0xFFFF) as u16);
|
||||
if table_plain_original.len() != table_len {
|
||||
return Err(Error::EntryTableDecryptFailed);
|
||||
}
|
||||
|
||||
let (overlay, trailer_raw) = parse_ao_trailer(&bytes, opts.allow_ao_trailer)?;
|
||||
#[cfg(not(test))]
|
||||
let _ = trailer_raw;
|
||||
|
||||
let mut entries = Vec::with_capacity(count);
|
||||
for idx in 0..count {
|
||||
let row = &table_plain_original[idx * 32..(idx + 1) * 32];
|
||||
|
||||
let mut name_raw = [0u8; 12];
|
||||
name_raw.copy_from_slice(&row[0..12]);
|
||||
|
||||
let flags_signed = i16::from_le_bytes([row[16], row[17]]);
|
||||
let sort_to_original = i16::from_le_bytes([row[18], row[19]]);
|
||||
let unpacked_size = u32::from_le_bytes([row[20], row[21], row[22], row[23]]);
|
||||
let data_offset_raw = u32::from_le_bytes([row[24], row[25], row[26], row[27]]);
|
||||
let packed_size_declared = u32::from_le_bytes([row[28], row[29], row[30], row[31]]);
|
||||
|
||||
let method_raw = (flags_signed as u16 as u32) & 0x1E0;
|
||||
let method = parse_method(method_raw);
|
||||
|
||||
let effective_offset_u64 = u64::from(data_offset_raw)
|
||||
.checked_add(u64::from(overlay))
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let effective_offset =
|
||||
usize::try_from(effective_offset_u64).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
let packed_size_usize =
|
||||
usize::try_from(packed_size_declared).map_err(|_| Error::IntegerOverflow)?;
|
||||
let mut packed_size_available = packed_size_usize;
|
||||
|
||||
let end = effective_offset_u64
|
||||
.checked_add(u64::from(packed_size_declared))
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let file_len_u64 = u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
if end > file_len_u64 {
|
||||
if method_raw == 0x100 && end == file_len_u64 + 1 {
|
||||
if opts.allow_deflate_eof_plus_one {
|
||||
packed_size_available = packed_size_available
|
||||
.checked_sub(1)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
} else {
|
||||
return Err(Error::DeflateEofPlusOneQuirkRejected { id: idx as u32 });
|
||||
}
|
||||
} else {
|
||||
return Err(Error::PackedSizePastEof {
|
||||
id: idx as u32,
|
||||
offset: effective_offset_u64,
|
||||
packed_size: packed_size_declared,
|
||||
file_len: file_len_u64,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let available_end = effective_offset
|
||||
.checked_add(packed_size_available)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if available_end > bytes.len() {
|
||||
return Err(Error::EntryDataOutOfBounds {
|
||||
id: idx as u32,
|
||||
offset: effective_offset_u64,
|
||||
size: packed_size_declared,
|
||||
file_len: file_len_u64,
|
||||
});
|
||||
}
|
||||
|
||||
let name = decode_name(c_name_bytes(&name_raw));
|
||||
|
||||
entries.push(EntryRecord {
|
||||
meta: EntryMeta {
|
||||
name,
|
||||
flags: i32::from(flags_signed),
|
||||
method,
|
||||
data_offset: effective_offset_u64,
|
||||
packed_size: packed_size_declared,
|
||||
unpacked_size,
|
||||
},
|
||||
name_raw,
|
||||
sort_to_original,
|
||||
key16: sort_to_original as u16,
|
||||
#[cfg(test)]
|
||||
data_offset_raw,
|
||||
packed_size_declared,
|
||||
packed_size_available,
|
||||
effective_offset,
|
||||
});
|
||||
}
|
||||
|
||||
let presorted_flag = u16::from_le_bytes([bytes[14], bytes[15]]);
|
||||
if presorted_flag == 0xABBA {
|
||||
for entry in &entries {
|
||||
let idx = i32::from(entry.sort_to_original);
|
||||
if idx < 0 || usize::try_from(idx).map_err(|_| Error::IntegerOverflow)? >= count {
|
||||
return Err(Error::CorruptEntryTable(
|
||||
"sort_to_original is not a valid permutation index",
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut sorted: Vec<usize> = (0..count).collect();
|
||||
sorted.sort_by(|a, b| {
|
||||
cmp_c_string(
|
||||
c_name_bytes(&entries[*a].name_raw),
|
||||
c_name_bytes(&entries[*b].name_raw),
|
||||
)
|
||||
});
|
||||
for (idx, entry) in entries.iter_mut().enumerate() {
|
||||
entry.sort_to_original =
|
||||
i16::try_from(sorted[idx]).map_err(|_| Error::IntegerOverflow)?;
|
||||
entry.key16 = entry.sort_to_original as u16;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
let source_size = bytes.len();
|
||||
|
||||
Ok(Library {
|
||||
bytes,
|
||||
entries,
|
||||
#[cfg(test)]
|
||||
header_raw,
|
||||
#[cfg(test)]
|
||||
table_plain_original,
|
||||
#[cfg(test)]
|
||||
xor_seed,
|
||||
#[cfg(test)]
|
||||
source_size,
|
||||
#[cfg(test)]
|
||||
trailer_raw,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_ao_trailer(bytes: &[u8], allow: bool) -> Result<(u32, Option<[u8; 6]>)> {
|
||||
if !allow || bytes.len() < 6 {
|
||||
return Ok((0, None));
|
||||
}
|
||||
|
||||
if &bytes[bytes.len() - 6..bytes.len() - 4] != b"AO" {
|
||||
return Ok((0, None));
|
||||
}
|
||||
|
||||
let mut trailer = [0u8; 6];
|
||||
trailer.copy_from_slice(&bytes[bytes.len() - 6..]);
|
||||
let overlay = u32::from_le_bytes([trailer[2], trailer[3], trailer[4], trailer[5]]);
|
||||
|
||||
if u64::from(overlay) > u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)? {
|
||||
return Err(Error::MediaOverlayOutOfBounds {
|
||||
overlay,
|
||||
file_len: u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
|
||||
Ok((overlay, Some(trailer)))
|
||||
}
|
||||
|
||||
fn parse_method(raw: u32) -> PackMethod {
|
||||
match raw {
|
||||
0x000 => PackMethod::None,
|
||||
0x020 => PackMethod::XorOnly,
|
||||
0x040 => PackMethod::Lzss,
|
||||
0x060 => PackMethod::XorLzss,
|
||||
0x080 => PackMethod::LzssHuffman,
|
||||
0x0A0 => PackMethod::XorLzssHuffman,
|
||||
0x100 => PackMethod::Deflate,
|
||||
other => PackMethod::Unknown(other),
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_payload(
|
||||
packed: &[u8],
|
||||
method: PackMethod,
|
||||
@@ -563,15 +376,15 @@ fn decode_payload(
|
||||
}
|
||||
xor_stream(&packed[..expected], key16)
|
||||
}
|
||||
PackMethod::Lzss => lzss_decompress_simple(packed, expected)?,
|
||||
PackMethod::Lzss => lzss_decompress_simple(packed, expected, None)?,
|
||||
PackMethod::XorLzss => {
|
||||
let decrypted = xor_stream(packed, key16);
|
||||
lzss_decompress_simple(&decrypted, expected)?
|
||||
// Optimized: XOR on-the-fly during decompression instead of creating temp buffer
|
||||
lzss_decompress_simple(packed, expected, Some(key16))?
|
||||
}
|
||||
PackMethod::LzssHuffman => lzss_huffman_decompress(packed, expected)?,
|
||||
PackMethod::LzssHuffman => lzss_huffman_decompress(packed, expected, None)?,
|
||||
PackMethod::XorLzssHuffman => {
|
||||
let decrypted = xor_stream(packed, key16);
|
||||
lzss_huffman_decompress(&decrypted, expected)?
|
||||
// Optimized: XOR on-the-fly during decompression
|
||||
lzss_huffman_decompress(packed, expected, Some(key16))?
|
||||
}
|
||||
PackMethod::Deflate => decode_deflate(packed)?,
|
||||
PackMethod::Unknown(raw) => return Err(Error::UnsupportedMethod { raw }),
|
||||
@@ -587,391 +400,6 @@ fn decode_payload(
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn decode_deflate(packed: &[u8]) -> Result<Vec<u8>> {
|
||||
let mut out = Vec::new();
|
||||
let mut decoder = DeflateDecoder::new(packed);
|
||||
if decoder.read_to_end(&mut out).is_ok() {
|
||||
return Ok(out);
|
||||
}
|
||||
|
||||
out.clear();
|
||||
let mut zlib = ZlibDecoder::new(packed);
|
||||
zlib.read_to_end(&mut out)
|
||||
.map_err(|_| Error::DecompressionFailed("deflate"))?;
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn xor_stream(data: &[u8], key16: u16) -> Vec<u8> {
|
||||
let mut lo = (key16 & 0xFF) as u8;
|
||||
let mut hi = ((key16 >> 8) & 0xFF) as u8;
|
||||
|
||||
let mut out = Vec::with_capacity(data.len());
|
||||
for value in data {
|
||||
lo = hi ^ lo.wrapping_shl(1);
|
||||
out.push(value ^ lo);
|
||||
hi = lo ^ (hi >> 1);
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn lzss_decompress_simple(data: &[u8], expected_size: usize) -> Result<Vec<u8>> {
|
||||
let mut ring = [0x20u8; 0x1000];
|
||||
let mut ring_pos = 0xFEEusize;
|
||||
let mut out = Vec::with_capacity(expected_size);
|
||||
let mut in_pos = 0usize;
|
||||
|
||||
let mut control = 0u8;
|
||||
let mut bits_left = 0u8;
|
||||
|
||||
while out.len() < expected_size {
|
||||
if bits_left == 0 {
|
||||
let Some(byte) = data.get(in_pos).copied() else {
|
||||
break;
|
||||
};
|
||||
control = byte;
|
||||
in_pos += 1;
|
||||
bits_left = 8;
|
||||
}
|
||||
|
||||
if (control & 1) != 0 {
|
||||
let Some(byte) = data.get(in_pos).copied() else {
|
||||
break;
|
||||
};
|
||||
in_pos += 1;
|
||||
|
||||
out.push(byte);
|
||||
ring[ring_pos] = byte;
|
||||
ring_pos = (ring_pos + 1) & 0x0FFF;
|
||||
} else {
|
||||
let (Some(low), Some(high)) =
|
||||
(data.get(in_pos).copied(), data.get(in_pos + 1).copied())
|
||||
else {
|
||||
break;
|
||||
};
|
||||
in_pos += 2;
|
||||
|
||||
let offset = usize::from(low) | (usize::from(high & 0xF0) << 4);
|
||||
let length = usize::from((high & 0x0F) + 3);
|
||||
|
||||
for step in 0..length {
|
||||
let byte = ring[(offset + step) & 0x0FFF];
|
||||
out.push(byte);
|
||||
ring[ring_pos] = byte;
|
||||
ring_pos = (ring_pos + 1) & 0x0FFF;
|
||||
if out.len() >= expected_size {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
control >>= 1;
|
||||
bits_left -= 1;
|
||||
}
|
||||
|
||||
if out.len() != expected_size {
|
||||
return Err(Error::DecompressionFailed("lzss-simple"));
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
const LZH_N: usize = 4096;
|
||||
const LZH_F: usize = 60;
|
||||
const LZH_THRESHOLD: usize = 2;
|
||||
const LZH_N_CHAR: usize = 256 - LZH_THRESHOLD + LZH_F;
|
||||
const LZH_T: usize = LZH_N_CHAR * 2 - 1;
|
||||
const LZH_R: usize = LZH_T - 1;
|
||||
const LZH_MAX_FREQ: u16 = 0x8000;
|
||||
|
||||
fn lzss_huffman_decompress(data: &[u8], expected_size: usize) -> Result<Vec<u8>> {
|
||||
let mut decoder = LzhDecoder::new(data);
|
||||
decoder.decode(expected_size)
|
||||
}
|
||||
|
||||
struct LzhDecoder<'a> {
|
||||
bit_reader: BitReader<'a>,
|
||||
text: [u8; LZH_N],
|
||||
freq: [u16; LZH_T + 1],
|
||||
parent: [usize; LZH_T + LZH_N_CHAR],
|
||||
son: [usize; LZH_T],
|
||||
d_code: [u8; 256],
|
||||
d_len: [u8; 256],
|
||||
ring_pos: usize,
|
||||
}
|
||||
|
||||
impl<'a> LzhDecoder<'a> {
|
||||
fn new(data: &'a [u8]) -> Self {
|
||||
let mut decoder = Self {
|
||||
bit_reader: BitReader::new(data),
|
||||
text: [0x20u8; LZH_N],
|
||||
freq: [0u16; LZH_T + 1],
|
||||
parent: [0usize; LZH_T + LZH_N_CHAR],
|
||||
son: [0usize; LZH_T],
|
||||
d_code: [0u8; 256],
|
||||
d_len: [0u8; 256],
|
||||
ring_pos: LZH_N - LZH_F,
|
||||
};
|
||||
decoder.init_tables();
|
||||
decoder.start_huff();
|
||||
decoder
|
||||
}
|
||||
|
||||
fn decode(&mut self, expected_size: usize) -> Result<Vec<u8>> {
|
||||
let mut out = Vec::with_capacity(expected_size);
|
||||
|
||||
while out.len() < expected_size {
|
||||
let c = self.decode_char();
|
||||
if c < 256 {
|
||||
let byte = c as u8;
|
||||
out.push(byte);
|
||||
self.text[self.ring_pos] = byte;
|
||||
self.ring_pos = (self.ring_pos + 1) & (LZH_N - 1);
|
||||
} else {
|
||||
let mut offset = self.decode_position();
|
||||
offset = (self.ring_pos.wrapping_sub(offset).wrapping_sub(1)) & (LZH_N - 1);
|
||||
let mut length = c.saturating_sub(253);
|
||||
|
||||
while length > 0 && out.len() < expected_size {
|
||||
let byte = self.text[offset];
|
||||
out.push(byte);
|
||||
self.text[self.ring_pos] = byte;
|
||||
self.ring_pos = (self.ring_pos + 1) & (LZH_N - 1);
|
||||
offset = (offset + 1) & (LZH_N - 1);
|
||||
length -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if out.len() != expected_size {
|
||||
return Err(Error::DecompressionFailed("lzss-huffman"));
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn init_tables(&mut self) {
|
||||
let d_code_group_counts = [1usize, 3, 8, 12, 24, 16];
|
||||
let d_len_group_counts = [32usize, 48, 64, 48, 48, 16];
|
||||
|
||||
let mut group_index = 0u8;
|
||||
let mut idx = 0usize;
|
||||
let mut run = 32usize;
|
||||
for count in d_code_group_counts {
|
||||
for _ in 0..count {
|
||||
for _ in 0..run {
|
||||
self.d_code[idx] = group_index;
|
||||
idx += 1;
|
||||
}
|
||||
group_index = group_index.wrapping_add(1);
|
||||
}
|
||||
run >>= 1;
|
||||
}
|
||||
|
||||
let mut len = 3u8;
|
||||
idx = 0;
|
||||
for count in d_len_group_counts {
|
||||
for _ in 0..count {
|
||||
self.d_len[idx] = len;
|
||||
idx += 1;
|
||||
}
|
||||
len = len.saturating_add(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn start_huff(&mut self) {
|
||||
for i in 0..LZH_N_CHAR {
|
||||
self.freq[i] = 1;
|
||||
self.son[i] = i + LZH_T;
|
||||
self.parent[i + LZH_T] = i;
|
||||
}
|
||||
|
||||
let mut i = 0usize;
|
||||
let mut j = LZH_N_CHAR;
|
||||
while j <= LZH_R {
|
||||
self.freq[j] = self.freq[i].saturating_add(self.freq[i + 1]);
|
||||
self.son[j] = i;
|
||||
self.parent[i] = j;
|
||||
self.parent[i + 1] = j;
|
||||
i += 2;
|
||||
j += 1;
|
||||
}
|
||||
|
||||
self.freq[LZH_T] = u16::MAX;
|
||||
self.parent[LZH_R] = 0;
|
||||
}
|
||||
|
||||
fn decode_char(&mut self) -> usize {
|
||||
let mut node = self.son[LZH_R];
|
||||
while node < LZH_T {
|
||||
let bit = usize::from(self.bit_reader.read_bit_or_zero());
|
||||
node = self.son[node + bit];
|
||||
}
|
||||
|
||||
let c = node - LZH_T;
|
||||
self.update(c);
|
||||
c
|
||||
}
|
||||
|
||||
fn decode_position(&mut self) -> usize {
|
||||
let i = self.bit_reader.read_bits_or_zero(8) as usize;
|
||||
let mut c = usize::from(self.d_code[i]) << 6;
|
||||
let mut j = usize::from(self.d_len[i]).saturating_sub(2);
|
||||
|
||||
while j > 0 {
|
||||
j -= 1;
|
||||
c |= usize::from(self.bit_reader.read_bit_or_zero()) << j;
|
||||
}
|
||||
|
||||
c | (i & 0x3F)
|
||||
}
|
||||
|
||||
fn update(&mut self, c: usize) {
|
||||
if self.freq[LZH_R] == LZH_MAX_FREQ {
|
||||
self.reconstruct();
|
||||
}
|
||||
|
||||
let mut current = self.parent[c + LZH_T];
|
||||
loop {
|
||||
self.freq[current] = self.freq[current].saturating_add(1);
|
||||
let freq = self.freq[current];
|
||||
|
||||
if current + 1 < self.freq.len() && freq > self.freq[current + 1] {
|
||||
let mut swap_idx = current + 1;
|
||||
while swap_idx + 1 < self.freq.len() && freq > self.freq[swap_idx + 1] {
|
||||
swap_idx += 1;
|
||||
}
|
||||
|
||||
self.freq.swap(current, swap_idx);
|
||||
|
||||
let left = self.son[current];
|
||||
let right = self.son[swap_idx];
|
||||
self.son[current] = right;
|
||||
self.son[swap_idx] = left;
|
||||
|
||||
self.parent[left] = swap_idx;
|
||||
if left < LZH_T {
|
||||
self.parent[left + 1] = swap_idx;
|
||||
}
|
||||
|
||||
self.parent[right] = current;
|
||||
if right < LZH_T {
|
||||
self.parent[right + 1] = current;
|
||||
}
|
||||
|
||||
current = swap_idx;
|
||||
}
|
||||
|
||||
current = self.parent[current];
|
||||
if current == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reconstruct(&mut self) {
|
||||
let mut j = 0usize;
|
||||
for i in 0..LZH_T {
|
||||
if self.son[i] >= LZH_T {
|
||||
self.freq[j] = (self.freq[i].saturating_add(1)) / 2;
|
||||
self.son[j] = self.son[i];
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let mut i = 0usize;
|
||||
let mut current = LZH_N_CHAR;
|
||||
while current < LZH_T {
|
||||
let sum = self.freq[i].saturating_add(self.freq[i + 1]);
|
||||
self.freq[current] = sum;
|
||||
|
||||
let mut insert_at = current;
|
||||
while insert_at > 0 && sum < self.freq[insert_at - 1] {
|
||||
insert_at -= 1;
|
||||
}
|
||||
|
||||
for move_idx in (insert_at..current).rev() {
|
||||
self.freq[move_idx + 1] = self.freq[move_idx];
|
||||
self.son[move_idx + 1] = self.son[move_idx];
|
||||
}
|
||||
|
||||
self.freq[insert_at] = sum;
|
||||
self.son[insert_at] = i;
|
||||
|
||||
i += 2;
|
||||
current += 1;
|
||||
}
|
||||
|
||||
for idx in 0..LZH_T {
|
||||
let node = self.son[idx];
|
||||
self.parent[node] = idx;
|
||||
if node < LZH_T {
|
||||
self.parent[node + 1] = idx;
|
||||
}
|
||||
}
|
||||
|
||||
self.freq[LZH_T] = u16::MAX;
|
||||
self.parent[LZH_R] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
struct BitReader<'a> {
|
||||
data: &'a [u8],
|
||||
byte_pos: usize,
|
||||
bit_mask: u8,
|
||||
}
|
||||
|
||||
impl<'a> BitReader<'a> {
|
||||
fn new(data: &'a [u8]) -> Self {
|
||||
Self {
|
||||
data,
|
||||
byte_pos: 0,
|
||||
bit_mask: 0x80,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_bit_or_zero(&mut self) -> u8 {
|
||||
let Some(byte) = self.data.get(self.byte_pos).copied() else {
|
||||
return 0;
|
||||
};
|
||||
|
||||
let bit = if (byte & self.bit_mask) != 0 { 1 } else { 0 };
|
||||
self.bit_mask >>= 1;
|
||||
if self.bit_mask == 0 {
|
||||
self.bit_mask = 0x80;
|
||||
self.byte_pos = self.byte_pos.saturating_add(1);
|
||||
}
|
||||
bit
|
||||
}
|
||||
|
||||
fn read_bits_or_zero(&mut self, bits: usize) -> u32 {
|
||||
let mut value = 0u32;
|
||||
for _ in 0..bits {
|
||||
value = (value << 1) | u32::from(self.read_bit_or_zero());
|
||||
}
|
||||
value
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_name(name: &[u8]) -> String {
|
||||
name.iter().map(|b| char::from(*b)).collect()
|
||||
}
|
||||
|
||||
fn c_name_bytes(raw: &[u8; 12]) -> &[u8] {
|
||||
let len = raw.iter().position(|&b| b == 0).unwrap_or(raw.len());
|
||||
&raw[..len]
|
||||
}
|
||||
|
||||
fn cmp_c_string(a: &[u8], b: &[u8]) -> Ordering {
|
||||
let min_len = a.len().min(b.len());
|
||||
let mut idx = 0usize;
|
||||
while idx < min_len {
|
||||
if a[idx] != b[idx] {
|
||||
return a[idx].cmp(&b[idx]);
|
||||
}
|
||||
idx += 1;
|
||||
}
|
||||
a.len().cmp(&b.len())
|
||||
}
|
||||
|
||||
fn needs_xor_key(method: PackMethod) -> bool {
|
||||
matches!(
|
||||
method,
|
||||
|
||||
249
crates/rsli/src/parse.rs
Normal file
249
crates/rsli/src/parse.rs
Normal file
@@ -0,0 +1,249 @@
|
||||
use crate::compress::xor::xor_stream;
|
||||
use crate::error::Error;
|
||||
use crate::{EntryMeta, EntryRecord, Library, OpenOptions, PackMethod, Result};
|
||||
use std::cmp::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
|
||||
if bytes.len() < 32 {
|
||||
return Err(Error::EntryTableOutOfBounds {
|
||||
table_offset: 32,
|
||||
table_len: 0,
|
||||
file_len: u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
|
||||
let mut header_raw = [0u8; 32];
|
||||
header_raw.copy_from_slice(&bytes[0..32]);
|
||||
|
||||
if &bytes[0..2] != b"NL" {
|
||||
let mut got = [0u8; 2];
|
||||
got.copy_from_slice(&bytes[0..2]);
|
||||
return Err(Error::InvalidMagic { got });
|
||||
}
|
||||
if bytes[3] != 0x01 {
|
||||
return Err(Error::UnsupportedVersion { got: bytes[3] });
|
||||
}
|
||||
|
||||
let entry_count = i16::from_le_bytes([bytes[4], bytes[5]]);
|
||||
if entry_count < 0 {
|
||||
return Err(Error::InvalidEntryCount { got: entry_count });
|
||||
}
|
||||
let count = usize::try_from(entry_count).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
// Validate entry_count fits in u32 (required for EntryId)
|
||||
if count > u32::MAX as usize {
|
||||
return Err(Error::TooManyEntries { got: count });
|
||||
}
|
||||
|
||||
let xor_seed = u32::from_le_bytes([bytes[20], bytes[21], bytes[22], bytes[23]]);
|
||||
|
||||
let table_len = count.checked_mul(32).ok_or(Error::IntegerOverflow)?;
|
||||
let table_offset = 32usize;
|
||||
let table_end = table_offset
|
||||
.checked_add(table_len)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if table_end > bytes.len() {
|
||||
return Err(Error::EntryTableOutOfBounds {
|
||||
table_offset: u64::try_from(table_offset).map_err(|_| Error::IntegerOverflow)?,
|
||||
table_len: u64::try_from(table_len).map_err(|_| Error::IntegerOverflow)?,
|
||||
file_len: u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
|
||||
let table_enc = &bytes[table_offset..table_end];
|
||||
let table_plain_original = xor_stream(table_enc, (xor_seed & 0xFFFF) as u16);
|
||||
if table_plain_original.len() != table_len {
|
||||
return Err(Error::EntryTableDecryptFailed);
|
||||
}
|
||||
|
||||
let (overlay, trailer_raw) = parse_ao_trailer(&bytes, opts.allow_ao_trailer)?;
|
||||
#[cfg(not(test))]
|
||||
let _ = trailer_raw;
|
||||
|
||||
let mut entries = Vec::with_capacity(count);
|
||||
for idx in 0..count {
|
||||
let row = &table_plain_original[idx * 32..(idx + 1) * 32];
|
||||
|
||||
let mut name_raw = [0u8; 12];
|
||||
name_raw.copy_from_slice(&row[0..12]);
|
||||
|
||||
let flags_signed = i16::from_le_bytes([row[16], row[17]]);
|
||||
let sort_to_original = i16::from_le_bytes([row[18], row[19]]);
|
||||
let unpacked_size = u32::from_le_bytes([row[20], row[21], row[22], row[23]]);
|
||||
let data_offset_raw = u32::from_le_bytes([row[24], row[25], row[26], row[27]]);
|
||||
let packed_size_declared = u32::from_le_bytes([row[28], row[29], row[30], row[31]]);
|
||||
|
||||
let method_raw = (flags_signed as u16 as u32) & 0x1E0;
|
||||
let method = parse_method(method_raw);
|
||||
|
||||
let effective_offset_u64 = u64::from(data_offset_raw)
|
||||
.checked_add(u64::from(overlay))
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let effective_offset =
|
||||
usize::try_from(effective_offset_u64).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
let packed_size_usize =
|
||||
usize::try_from(packed_size_declared).map_err(|_| Error::IntegerOverflow)?;
|
||||
let mut packed_size_available = packed_size_usize;
|
||||
|
||||
let end = effective_offset_u64
|
||||
.checked_add(u64::from(packed_size_declared))
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let file_len_u64 = u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?;
|
||||
|
||||
if end > file_len_u64 {
|
||||
if method_raw == 0x100 && end == file_len_u64 + 1 {
|
||||
if opts.allow_deflate_eof_plus_one {
|
||||
packed_size_available = packed_size_available
|
||||
.checked_sub(1)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
} else {
|
||||
return Err(Error::DeflateEofPlusOneQuirkRejected {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return Err(Error::PackedSizePastEof {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
offset: effective_offset_u64,
|
||||
packed_size: packed_size_declared,
|
||||
file_len: file_len_u64,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let available_end = effective_offset
|
||||
.checked_add(packed_size_available)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if available_end > bytes.len() {
|
||||
return Err(Error::EntryDataOutOfBounds {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
offset: effective_offset_u64,
|
||||
size: packed_size_declared,
|
||||
file_len: file_len_u64,
|
||||
});
|
||||
}
|
||||
|
||||
let name = decode_name(c_name_bytes(&name_raw));
|
||||
|
||||
entries.push(EntryRecord {
|
||||
meta: EntryMeta {
|
||||
name,
|
||||
flags: i32::from(flags_signed),
|
||||
method,
|
||||
data_offset: effective_offset_u64,
|
||||
packed_size: packed_size_declared,
|
||||
unpacked_size,
|
||||
},
|
||||
name_raw,
|
||||
sort_to_original,
|
||||
key16: sort_to_original as u16,
|
||||
#[cfg(test)]
|
||||
data_offset_raw,
|
||||
packed_size_declared,
|
||||
packed_size_available,
|
||||
effective_offset,
|
||||
});
|
||||
}
|
||||
|
||||
let presorted_flag = u16::from_le_bytes([bytes[14], bytes[15]]);
|
||||
if presorted_flag == 0xABBA {
|
||||
for entry in &entries {
|
||||
let idx = i32::from(entry.sort_to_original);
|
||||
if idx < 0 || usize::try_from(idx).map_err(|_| Error::IntegerOverflow)? >= count {
|
||||
return Err(Error::CorruptEntryTable(
|
||||
"sort_to_original is not a valid permutation index",
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut sorted: Vec<usize> = (0..count).collect();
|
||||
sorted.sort_by(|a, b| {
|
||||
cmp_c_string(
|
||||
c_name_bytes(&entries[*a].name_raw),
|
||||
c_name_bytes(&entries[*b].name_raw),
|
||||
)
|
||||
});
|
||||
for (idx, entry) in entries.iter_mut().enumerate() {
|
||||
entry.sort_to_original =
|
||||
i16::try_from(sorted[idx]).map_err(|_| Error::IntegerOverflow)?;
|
||||
entry.key16 = entry.sort_to_original as u16;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
let source_size = bytes.len();
|
||||
|
||||
Ok(Library {
|
||||
bytes,
|
||||
entries,
|
||||
#[cfg(test)]
|
||||
header_raw,
|
||||
#[cfg(test)]
|
||||
table_plain_original,
|
||||
#[cfg(test)]
|
||||
xor_seed,
|
||||
#[cfg(test)]
|
||||
source_size,
|
||||
#[cfg(test)]
|
||||
trailer_raw,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_ao_trailer(bytes: &[u8], allow: bool) -> Result<(u32, Option<[u8; 6]>)> {
|
||||
if !allow || bytes.len() < 6 {
|
||||
return Ok((0, None));
|
||||
}
|
||||
|
||||
if &bytes[bytes.len() - 6..bytes.len() - 4] != b"AO" {
|
||||
return Ok((0, None));
|
||||
}
|
||||
|
||||
let mut trailer = [0u8; 6];
|
||||
trailer.copy_from_slice(&bytes[bytes.len() - 6..]);
|
||||
let overlay = u32::from_le_bytes([trailer[2], trailer[3], trailer[4], trailer[5]]);
|
||||
|
||||
if u64::from(overlay) > u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)? {
|
||||
return Err(Error::MediaOverlayOutOfBounds {
|
||||
overlay,
|
||||
file_len: u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
|
||||
Ok((overlay, Some(trailer)))
|
||||
}
|
||||
|
||||
pub fn parse_method(raw: u32) -> PackMethod {
|
||||
match raw {
|
||||
0x000 => PackMethod::None,
|
||||
0x020 => PackMethod::XorOnly,
|
||||
0x040 => PackMethod::Lzss,
|
||||
0x060 => PackMethod::XorLzss,
|
||||
0x080 => PackMethod::LzssHuffman,
|
||||
0x0A0 => PackMethod::XorLzssHuffman,
|
||||
0x100 => PackMethod::Deflate,
|
||||
other => PackMethod::Unknown(other),
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_name(name: &[u8]) -> String {
|
||||
name.iter().map(|b| char::from(*b)).collect()
|
||||
}
|
||||
|
||||
pub fn c_name_bytes(raw: &[u8; 12]) -> &[u8] {
|
||||
let len = raw.iter().position(|&b| b == 0).unwrap_or(raw.len());
|
||||
&raw[..len]
|
||||
}
|
||||
|
||||
pub fn cmp_c_string(a: &[u8], b: &[u8]) -> Ordering {
|
||||
let min_len = a.len().min(b.len());
|
||||
let mut idx = 0usize;
|
||||
while idx < min_len {
|
||||
if a[idx] != b[idx] {
|
||||
return a[idx].cmp(&b[idx]);
|
||||
}
|
||||
idx += 1;
|
||||
}
|
||||
a.len().cmp(&b.len())
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
use super::*;
|
||||
use crate::compress::lzh::{LZH_MAX_FREQ, LZH_N_CHAR, LZH_R, LZH_T};
|
||||
use crate::compress::xor::xor_stream;
|
||||
use flate2::write::DeflateEncoder;
|
||||
use flate2::Compression;
|
||||
use std::any::Any;
|
||||
@@ -665,6 +667,44 @@ fn rsli_synthetic_all_methods_roundtrip() {
|
||||
let _ = fs::remove_file(&path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rsli_xorlzss_huffman_on_the_fly_roundtrip() {
|
||||
let plain: Vec<u8> = (0..512u16).map(|i| b'A' + (i % 26) as u8).collect();
|
||||
let entries = vec![SyntheticRsliEntry {
|
||||
name: "XLZH_ONFLY".to_string(),
|
||||
method_raw: 0x0A0,
|
||||
plain: plain.clone(),
|
||||
declared_packed_size: None,
|
||||
}];
|
||||
|
||||
let bytes = build_rsli_bytes(
|
||||
&entries,
|
||||
&RsliBuildOptions {
|
||||
seed: 0x0BAD_C0DE,
|
||||
presorted: true,
|
||||
overlay: 0,
|
||||
add_ao_trailer: false,
|
||||
},
|
||||
);
|
||||
let path = write_temp_file("rsli-xorlzh-onfly", &bytes);
|
||||
|
||||
let library = Library::open_path(&path).expect("open synthetic XLZH archive failed");
|
||||
let id = library
|
||||
.find("XLZH_ONFLY")
|
||||
.expect("find XLZH_ONFLY entry failed");
|
||||
|
||||
let loaded = library.load(id).expect("load XLZH_ONFLY failed");
|
||||
assert_eq!(loaded, plain);
|
||||
|
||||
let packed = library
|
||||
.load_packed(id)
|
||||
.expect("load_packed XLZH_ONFLY failed");
|
||||
let unpacked = library.unpack(&packed).expect("unpack XLZH_ONFLY failed");
|
||||
assert_eq!(unpacked, loaded);
|
||||
|
||||
let _ = fs::remove_file(&path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rsli_synthetic_overlay_and_ao_trailer() {
|
||||
let entries = vec![SyntheticRsliEntry {
|
||||
|
||||
Reference in New Issue
Block a user