feat(render-core): add default UV scale and refactor UV mapping logic
- Introduced a constant `DEFAULT_UV_SCALE` for UV scaling. - Refactored UV mapping in `build_render_mesh` to use the new constant. - Simplified `compute_bounds` functions by extracting common logic into `compute_bounds_impl`. test(render-core): add tests for rendering with empty and multi-node models - Added tests to verify behavior when building render meshes from models with no slots and multiple nodes. - Ensured UV scaling is correctly applied in tests. feat(render-demo): add FOV argument and improve error handling - Added a `--fov` command-line argument to set the field of view. - Enhanced error messages for texture resolution failures. - Updated MVP computation to use the new FOV parameter. fix(rsli): improve error handling in LZH decompression - Added checks to prevent out-of-bounds access in LZH decoding logic. refactor(texm): streamline texture parsing and decoding tests - Created a helper function `build_texm_payload` for constructing test payloads. - Added tests for various texture formats including RGB565, RGB556, ARGB4444, and Luminance Alpha. - Improved error handling for invalid TEXM headers and mip bounds.
This commit is contained in:
@@ -135,7 +135,12 @@ impl<'a> LzhDecoder<'a> {
|
||||
let mut node = self.son[LZH_R];
|
||||
while node < LZH_T {
|
||||
let bit = usize::from(self.bit_reader.read_bit()?);
|
||||
node = self.son[node + bit];
|
||||
let branch = node
|
||||
.checked_add(bit)
|
||||
.ok_or(Error::DecompressionFailed("lzss-huffman tree overflow"))?;
|
||||
node = *self.son.get(branch).ok_or(Error::DecompressionFailed(
|
||||
"lzss-huffman tree out of bounds",
|
||||
))?;
|
||||
}
|
||||
|
||||
let c = node - LZH_T;
|
||||
|
||||
@@ -111,13 +111,13 @@ impl Library {
|
||||
}
|
||||
|
||||
pub fn entries(&self) -> impl Iterator<Item = EntryRef<'_>> {
|
||||
self.entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, entry)| EntryRef {
|
||||
id: EntryId(u32::try_from(idx).expect("entry count validated at parse")),
|
||||
self.entries.iter().enumerate().filter_map(|(idx, entry)| {
|
||||
let id = u32::try_from(idx).ok()?;
|
||||
Some(EntryRef {
|
||||
id: EntryId(id),
|
||||
meta: &entry.meta,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find(&self, name: &str) -> Option<EntryId> {
|
||||
@@ -161,9 +161,8 @@ impl Library {
|
||||
Ordering::Less => high = mid,
|
||||
Ordering::Greater => low = mid + 1,
|
||||
Ordering::Equal => {
|
||||
return Some(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))
|
||||
let id = u32::try_from(idx).ok()?;
|
||||
return Some(EntryId(id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -171,9 +170,8 @@ impl Library {
|
||||
// Linear fallback search
|
||||
self.entries.iter().enumerate().find_map(|(idx, entry)| {
|
||||
if cmp_c_string(query_bytes, c_name_bytes(&entry.name_raw)) == Ordering::Equal {
|
||||
Some(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))
|
||||
let id = u32::try_from(idx).ok()?;
|
||||
Some(EntryId(id))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -251,7 +249,7 @@ impl Library {
|
||||
.get(idx)
|
||||
.ok_or_else(|| Error::EntryIdOutOfRange {
|
||||
id: id.0,
|
||||
entry_count: self.entries.len().try_into().unwrap_or(u32::MAX),
|
||||
entry_count: saturating_u32_len(self.entries.len()),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -317,18 +315,15 @@ impl Library {
|
||||
}
|
||||
|
||||
for (idx, entry) in self.entries.iter().enumerate() {
|
||||
let packed = self
|
||||
.load_packed(EntryId(
|
||||
u32::try_from(idx).expect("entry count validated at parse"),
|
||||
))?
|
||||
.packed;
|
||||
let id = u32::try_from(idx).map_err(|_| Error::IntegerOverflow)?;
|
||||
let packed = self.load_packed(EntryId(id))?.packed;
|
||||
let start =
|
||||
usize::try_from(entry.data_offset_raw).map_err(|_| Error::IntegerOverflow)?;
|
||||
for (offset, byte) in packed.iter().copied().enumerate() {
|
||||
let pos = start.checked_add(offset).ok_or(Error::IntegerOverflow)?;
|
||||
if pos >= out.len() {
|
||||
return Err(Error::PackedSizePastEof {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
id,
|
||||
offset: u64::from(entry.data_offset_raw),
|
||||
packed_size: entry.packed_size_declared,
|
||||
file_len: u64::try_from(out.len()).map_err(|_| Error::IntegerOverflow)?,
|
||||
@@ -407,5 +402,9 @@ fn needs_xor_key(method: PackMethod) -> bool {
|
||||
)
|
||||
}
|
||||
|
||||
fn saturating_u32_len(len: usize) -> u32 {
|
||||
u32::try_from(len).unwrap_or(u32::MAX)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
@@ -100,12 +100,12 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
} else {
|
||||
return Err(Error::DeflateEofPlusOneQuirkRejected {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
id: u32::try_from(idx).map_err(|_| Error::IntegerOverflow)?,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return Err(Error::PackedSizePastEof {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
id: u32::try_from(idx).map_err(|_| Error::IntegerOverflow)?,
|
||||
offset: effective_offset_u64,
|
||||
packed_size: packed_size_declared,
|
||||
file_len: file_len_u64,
|
||||
@@ -118,7 +118,7 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if available_end > bytes.len() {
|
||||
return Err(Error::EntryDataOutOfBounds {
|
||||
id: u32::try_from(idx).expect("entry count validated at parse"),
|
||||
id: u32::try_from(idx).map_err(|_| Error::IntegerOverflow)?,
|
||||
offset: effective_offset_u64,
|
||||
size: packed_size_declared,
|
||||
file_len: file_len_u64,
|
||||
|
||||
Reference in New Issue
Block a user