Refactor documentation structure and add new specifications
- Updated MSH documentation to reflect changes in material, wear, and texture specifications. - Introduced new `render.md` file detailing the render pipeline process. - Removed outdated sections from `runtime-pipeline.md` and redirected to `render.md`. - Added detailed specifications for `Texm` texture format and `WEAR` wear table. - Updated navigation in `mkdocs.yml` to align with new documentation structure.
This commit is contained in:
7
crates/msh-core/Cargo.toml
Normal file
7
crates/msh-core/Cargo.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "msh-core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
nres = { path = "../nres" }
|
||||
14
crates/msh-core/README.md
Normal file
14
crates/msh-core/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# msh-core
|
||||
|
||||
Парсер core-части формата `MSH`.
|
||||
|
||||
Покрывает:
|
||||
|
||||
- `Res1`, `Res2`, `Res3`, `Res6`, `Res13` (обязательные);
|
||||
- `Res4`, `Res5`, `Res10` (опциональные);
|
||||
- slot lookup по `node/lod/group`.
|
||||
|
||||
Тесты:
|
||||
|
||||
- прогон по всем `.msh` в `testdata`;
|
||||
- синтетическая минимальная модель.
|
||||
74
crates/msh-core/src/error.rs
Normal file
74
crates/msh-core/src/error.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use core::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Nres(nres::error::Error),
|
||||
MissingResource {
|
||||
kind: u32,
|
||||
label: &'static str,
|
||||
},
|
||||
InvalidResourceSize {
|
||||
label: &'static str,
|
||||
size: usize,
|
||||
stride: usize,
|
||||
},
|
||||
InvalidRes2Size {
|
||||
size: usize,
|
||||
},
|
||||
UnsupportedNodeStride {
|
||||
stride: usize,
|
||||
},
|
||||
IndexOutOfBounds {
|
||||
label: &'static str,
|
||||
index: usize,
|
||||
limit: usize,
|
||||
},
|
||||
IntegerOverflow,
|
||||
}
|
||||
|
||||
impl From<nres::error::Error> for Error {
|
||||
fn from(value: nres::error::Error) -> Self {
|
||||
Self::Nres(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Nres(err) => write!(f, "{err}"),
|
||||
Self::MissingResource { kind, label } => {
|
||||
write!(f, "missing required resource type={kind} ({label})")
|
||||
}
|
||||
Self::InvalidResourceSize {
|
||||
label,
|
||||
size,
|
||||
stride,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"invalid {label} size={size}, expected multiple of stride={stride}"
|
||||
)
|
||||
}
|
||||
Self::InvalidRes2Size { size } => {
|
||||
write!(f, "invalid Res2 size={size}, expected >= 140")
|
||||
}
|
||||
Self::UnsupportedNodeStride { stride } => {
|
||||
write!(
|
||||
f,
|
||||
"unsupported Res1 node stride={stride}, expected 38 or 24"
|
||||
)
|
||||
}
|
||||
Self::IndexOutOfBounds {
|
||||
label,
|
||||
index,
|
||||
limit,
|
||||
} => write!(
|
||||
f,
|
||||
"{label} index out of bounds: index={index}, limit={limit}"
|
||||
),
|
||||
Self::IntegerOverflow => write!(f, "integer overflow"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
392
crates/msh-core/src/lib.rs
Normal file
392
crates/msh-core/src/lib.rs
Normal file
@@ -0,0 +1,392 @@
|
||||
pub mod error;
|
||||
|
||||
use crate::error::Error;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
pub const RES1_NODE_TABLE: u32 = 1;
|
||||
pub const RES2_SLOTS: u32 = 2;
|
||||
pub const RES3_POSITIONS: u32 = 3;
|
||||
pub const RES4_NORMALS: u32 = 4;
|
||||
pub const RES5_UV0: u32 = 5;
|
||||
pub const RES6_INDICES: u32 = 6;
|
||||
pub const RES10_NAMES: u32 = 10;
|
||||
pub const RES13_BATCHES: u32 = 13;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Slot {
|
||||
pub tri_start: u16,
|
||||
pub tri_count: u16,
|
||||
pub batch_start: u16,
|
||||
pub batch_count: u16,
|
||||
pub aabb_min: [f32; 3],
|
||||
pub aabb_max: [f32; 3],
|
||||
pub sphere_center: [f32; 3],
|
||||
pub sphere_radius: f32,
|
||||
pub opaque: [u32; 5],
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Batch {
|
||||
pub batch_flags: u16,
|
||||
pub material_index: u16,
|
||||
pub opaque4: u16,
|
||||
pub opaque6: u16,
|
||||
pub index_count: u16,
|
||||
pub index_start: u32,
|
||||
pub opaque14: u16,
|
||||
pub base_vertex: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Model {
|
||||
pub node_stride: usize,
|
||||
pub node_count: usize,
|
||||
pub nodes_raw: Vec<u8>,
|
||||
pub slots: Vec<Slot>,
|
||||
pub positions: Vec<[f32; 3]>,
|
||||
pub normals: Option<Vec<[i8; 4]>>,
|
||||
pub uv0: Option<Vec<[i16; 2]>>,
|
||||
pub indices: Vec<u16>,
|
||||
pub batches: Vec<Batch>,
|
||||
pub node_names: Option<Vec<Option<String>>>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn slot_index(&self, node_index: usize, lod: usize, group: usize) -> Option<usize> {
|
||||
if node_index >= self.node_count || lod >= 3 || group >= 5 {
|
||||
return None;
|
||||
}
|
||||
if self.node_stride != 38 {
|
||||
return None;
|
||||
}
|
||||
let node_off = node_index.checked_mul(self.node_stride)?;
|
||||
let matrix_off = node_off.checked_add(8)?;
|
||||
let word_off = matrix_off.checked_add((lod * 5 + group) * 2)?;
|
||||
let raw = read_u16(&self.nodes_raw, word_off).ok()?;
|
||||
if raw == u16::MAX {
|
||||
return None;
|
||||
}
|
||||
let idx = usize::from(raw);
|
||||
if idx >= self.slots.len() {
|
||||
return None;
|
||||
}
|
||||
Some(idx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_model_payload(payload: &[u8]) -> Result<Model> {
|
||||
let archive = nres::Archive::open_bytes(
|
||||
Arc::from(payload.to_vec().into_boxed_slice()),
|
||||
nres::OpenOptions::default(),
|
||||
)?;
|
||||
|
||||
let res1 = read_required(&archive, RES1_NODE_TABLE, "Res1")?;
|
||||
let res2 = read_required(&archive, RES2_SLOTS, "Res2")?;
|
||||
let res3 = read_required(&archive, RES3_POSITIONS, "Res3")?;
|
||||
let res6 = read_required(&archive, RES6_INDICES, "Res6")?;
|
||||
let res13 = read_required(&archive, RES13_BATCHES, "Res13")?;
|
||||
|
||||
let res4 = read_optional(&archive, RES4_NORMALS)?;
|
||||
let res5 = read_optional(&archive, RES5_UV0)?;
|
||||
let res10 = read_optional(&archive, RES10_NAMES)?;
|
||||
|
||||
let node_stride = usize::try_from(res1.meta.attr3).map_err(|_| Error::IntegerOverflow)?;
|
||||
if node_stride != 38 && node_stride != 24 {
|
||||
return Err(Error::UnsupportedNodeStride {
|
||||
stride: node_stride,
|
||||
});
|
||||
}
|
||||
if res1.bytes.len() % node_stride != 0 {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res1",
|
||||
size: res1.bytes.len(),
|
||||
stride: node_stride,
|
||||
});
|
||||
}
|
||||
let node_count = res1.bytes.len() / node_stride;
|
||||
|
||||
if res2.bytes.len() < 0x8C {
|
||||
return Err(Error::InvalidRes2Size {
|
||||
size: res2.bytes.len(),
|
||||
});
|
||||
}
|
||||
let slot_blob = res2
|
||||
.bytes
|
||||
.len()
|
||||
.checked_sub(0x8C)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if slot_blob % 68 != 0 {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res2.slots",
|
||||
size: slot_blob,
|
||||
stride: 68,
|
||||
});
|
||||
}
|
||||
let slot_count = slot_blob / 68;
|
||||
let mut slots = Vec::with_capacity(slot_count);
|
||||
for i in 0..slot_count {
|
||||
let off = 0x8Cusize
|
||||
.checked_add(i.checked_mul(68).ok_or(Error::IntegerOverflow)?)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
slots.push(Slot {
|
||||
tri_start: read_u16(&res2.bytes, off)?,
|
||||
tri_count: read_u16(&res2.bytes, off + 2)?,
|
||||
batch_start: read_u16(&res2.bytes, off + 4)?,
|
||||
batch_count: read_u16(&res2.bytes, off + 6)?,
|
||||
aabb_min: [
|
||||
read_f32(&res2.bytes, off + 8)?,
|
||||
read_f32(&res2.bytes, off + 12)?,
|
||||
read_f32(&res2.bytes, off + 16)?,
|
||||
],
|
||||
aabb_max: [
|
||||
read_f32(&res2.bytes, off + 20)?,
|
||||
read_f32(&res2.bytes, off + 24)?,
|
||||
read_f32(&res2.bytes, off + 28)?,
|
||||
],
|
||||
sphere_center: [
|
||||
read_f32(&res2.bytes, off + 32)?,
|
||||
read_f32(&res2.bytes, off + 36)?,
|
||||
read_f32(&res2.bytes, off + 40)?,
|
||||
],
|
||||
sphere_radius: read_f32(&res2.bytes, off + 44)?,
|
||||
opaque: [
|
||||
read_u32(&res2.bytes, off + 48)?,
|
||||
read_u32(&res2.bytes, off + 52)?,
|
||||
read_u32(&res2.bytes, off + 56)?,
|
||||
read_u32(&res2.bytes, off + 60)?,
|
||||
read_u32(&res2.bytes, off + 64)?,
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
let positions = parse_positions(&res3.bytes)?;
|
||||
let indices = parse_u16_array(&res6.bytes, "Res6")?;
|
||||
let batches = parse_batches(&res13.bytes)?;
|
||||
|
||||
let normals = match res4 {
|
||||
Some(raw) => Some(parse_i8x4_array(&raw.bytes, "Res4")?),
|
||||
None => None,
|
||||
};
|
||||
let uv0 = match res5 {
|
||||
Some(raw) => Some(parse_i16x2_array(&raw.bytes, "Res5")?),
|
||||
None => None,
|
||||
};
|
||||
let node_names = match res10 {
|
||||
Some(raw) => Some(parse_res10_names(&raw.bytes, node_count)?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(Model {
|
||||
node_stride,
|
||||
node_count,
|
||||
nodes_raw: res1.bytes,
|
||||
slots,
|
||||
positions,
|
||||
normals,
|
||||
uv0,
|
||||
indices,
|
||||
batches,
|
||||
node_names,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_positions(data: &[u8]) -> Result<Vec<[f32; 3]>> {
|
||||
if !data.len().is_multiple_of(12) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res3",
|
||||
size: data.len(),
|
||||
stride: 12,
|
||||
});
|
||||
}
|
||||
let count = data.len() / 12;
|
||||
let mut out = Vec::with_capacity(count);
|
||||
for i in 0..count {
|
||||
let off = i * 12;
|
||||
out.push([
|
||||
read_f32(data, off)?,
|
||||
read_f32(data, off + 4)?,
|
||||
read_f32(data, off + 8)?,
|
||||
]);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_batches(data: &[u8]) -> Result<Vec<Batch>> {
|
||||
if !data.len().is_multiple_of(20) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res13",
|
||||
size: data.len(),
|
||||
stride: 20,
|
||||
});
|
||||
}
|
||||
let count = data.len() / 20;
|
||||
let mut out = Vec::with_capacity(count);
|
||||
for i in 0..count {
|
||||
let off = i * 20;
|
||||
out.push(Batch {
|
||||
batch_flags: read_u16(data, off)?,
|
||||
material_index: read_u16(data, off + 2)?,
|
||||
opaque4: read_u16(data, off + 4)?,
|
||||
opaque6: read_u16(data, off + 6)?,
|
||||
index_count: read_u16(data, off + 8)?,
|
||||
index_start: read_u32(data, off + 10)?,
|
||||
opaque14: read_u16(data, off + 14)?,
|
||||
base_vertex: read_u32(data, off + 16)?,
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_u16_array(data: &[u8], label: &'static str) -> Result<Vec<u16>> {
|
||||
if !data.len().is_multiple_of(2) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label,
|
||||
size: data.len(),
|
||||
stride: 2,
|
||||
});
|
||||
}
|
||||
let mut out = Vec::with_capacity(data.len() / 2);
|
||||
for i in (0..data.len()).step_by(2) {
|
||||
out.push(read_u16(data, i)?);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_i8x4_array(data: &[u8], label: &'static str) -> Result<Vec<[i8; 4]>> {
|
||||
if !data.len().is_multiple_of(4) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label,
|
||||
size: data.len(),
|
||||
stride: 4,
|
||||
});
|
||||
}
|
||||
let mut out = Vec::with_capacity(data.len() / 4);
|
||||
for i in (0..data.len()).step_by(4) {
|
||||
out.push([
|
||||
read_i8(data, i)?,
|
||||
read_i8(data, i + 1)?,
|
||||
read_i8(data, i + 2)?,
|
||||
read_i8(data, i + 3)?,
|
||||
]);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_i16x2_array(data: &[u8], label: &'static str) -> Result<Vec<[i16; 2]>> {
|
||||
if !data.len().is_multiple_of(4) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label,
|
||||
size: data.len(),
|
||||
stride: 4,
|
||||
});
|
||||
}
|
||||
let mut out = Vec::with_capacity(data.len() / 4);
|
||||
for i in (0..data.len()).step_by(4) {
|
||||
out.push([read_i16(data, i)?, read_i16(data, i + 2)?]);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_res10_names(data: &[u8], node_count: usize) -> Result<Vec<Option<String>>> {
|
||||
let mut out = Vec::with_capacity(node_count);
|
||||
let mut off = 0usize;
|
||||
for _ in 0..node_count {
|
||||
let len = usize::try_from(read_u32(data, off)?).map_err(|_| Error::IntegerOverflow)?;
|
||||
off = off.checked_add(4).ok_or(Error::IntegerOverflow)?;
|
||||
if len == 0 {
|
||||
out.push(None);
|
||||
continue;
|
||||
}
|
||||
let need = len.checked_add(1).ok_or(Error::IntegerOverflow)?;
|
||||
let end = off.checked_add(need).ok_or(Error::IntegerOverflow)?;
|
||||
let slice = data.get(off..end).ok_or(Error::InvalidResourceSize {
|
||||
label: "Res10",
|
||||
size: data.len(),
|
||||
stride: 1,
|
||||
})?;
|
||||
let text = if slice.last().copied() == Some(0) {
|
||||
&slice[..slice.len().saturating_sub(1)]
|
||||
} else {
|
||||
slice
|
||||
};
|
||||
let decoded = String::from_utf8_lossy(text).to_string();
|
||||
out.push(Some(decoded));
|
||||
off = end;
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
struct RawResource {
|
||||
meta: nres::EntryMeta,
|
||||
bytes: Vec<u8>,
|
||||
}
|
||||
|
||||
fn read_required(archive: &nres::Archive, kind: u32, label: &'static str) -> Result<RawResource> {
|
||||
let id = archive
|
||||
.entries()
|
||||
.find(|entry| entry.meta.kind == kind)
|
||||
.map(|entry| entry.id)
|
||||
.ok_or(Error::MissingResource { kind, label })?;
|
||||
let entry = archive.get(id).ok_or(Error::IndexOutOfBounds {
|
||||
label,
|
||||
index: usize::try_from(id.0).map_err(|_| Error::IntegerOverflow)?,
|
||||
limit: archive.entry_count(),
|
||||
})?;
|
||||
let data = archive.read(id)?.into_owned();
|
||||
Ok(RawResource {
|
||||
meta: entry.meta.clone(),
|
||||
bytes: data,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_optional(archive: &nres::Archive, kind: u32) -> Result<Option<RawResource>> {
|
||||
let Some(id) = archive
|
||||
.entries()
|
||||
.find(|entry| entry.meta.kind == kind)
|
||||
.map(|entry| entry.id)
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
let entry = archive.get(id).ok_or(Error::IndexOutOfBounds {
|
||||
label: "optional",
|
||||
index: usize::try_from(id.0).map_err(|_| Error::IntegerOverflow)?,
|
||||
limit: archive.entry_count(),
|
||||
})?;
|
||||
let data = archive.read(id)?.into_owned();
|
||||
Ok(Some(RawResource {
|
||||
meta: entry.meta.clone(),
|
||||
bytes: data,
|
||||
}))
|
||||
}
|
||||
|
||||
fn read_u16(data: &[u8], offset: usize) -> Result<u16> {
|
||||
let bytes = data.get(offset..offset + 2).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 2] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(u16::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_i16(data: &[u8], offset: usize) -> Result<i16> {
|
||||
let bytes = data.get(offset..offset + 2).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 2] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(i16::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_i8(data: &[u8], offset: usize) -> Result<i8> {
|
||||
let byte = data.get(offset).copied().ok_or(Error::IntegerOverflow)?;
|
||||
Ok(i8::from_le_bytes([byte]))
|
||||
}
|
||||
|
||||
fn read_u32(data: &[u8], offset: usize) -> Result<u32> {
|
||||
let bytes = data.get(offset..offset + 4).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 4] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(u32::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_f32(data: &[u8], offset: usize) -> Result<f32> {
|
||||
Ok(f32::from_bits(read_u32(data, offset)?))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
296
crates/msh-core/src/tests.rs
Normal file
296
crates/msh-core/src/tests.rs
Normal file
@@ -0,0 +1,296 @@
|
||||
use super::*;
|
||||
use nres::Archive;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn collect_files_recursive(root: &Path, out: &mut Vec<PathBuf>) {
|
||||
let Ok(entries) = fs::read_dir(root) else {
|
||||
return;
|
||||
};
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
collect_files_recursive(&path, out);
|
||||
} else if path.is_file() {
|
||||
out.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn nres_test_files() -> Vec<PathBuf> {
|
||||
let root = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("..")
|
||||
.join("..")
|
||||
.join("testdata");
|
||||
let mut files = Vec::new();
|
||||
collect_files_recursive(&root, &mut files);
|
||||
files.sort();
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
fs::read(path)
|
||||
.map(|bytes| bytes.get(0..4) == Some(b"NRes"))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_msh_name(name: &str) -> bool {
|
||||
name.to_ascii_lowercase().ends_with(".msh")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_all_game_msh_models() {
|
||||
let archives = nres_test_files();
|
||||
if archives.is_empty() {
|
||||
eprintln!("skipping parse_all_game_msh_models: no NRes files in testdata");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut model_count = 0usize;
|
||||
let mut renderable_count = 0usize;
|
||||
let mut legacy_stride24_count = 0usize;
|
||||
|
||||
for archive_path in archives {
|
||||
let archive = Archive::open_path(&archive_path)
|
||||
.unwrap_or_else(|err| panic!("failed to open {}: {err}", archive_path.display()));
|
||||
|
||||
for entry in archive.entries() {
|
||||
if !is_msh_name(&entry.meta.name) {
|
||||
continue;
|
||||
}
|
||||
model_count += 1;
|
||||
let payload = archive.read(entry.id).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to read model '{}' in {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
let model = parse_model_payload(payload.as_slice()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to parse model '{}' in {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
|
||||
if model.node_stride == 24 {
|
||||
legacy_stride24_count += 1;
|
||||
}
|
||||
|
||||
for node_index in 0..model.node_count {
|
||||
for lod in 0..3 {
|
||||
for group in 0..5 {
|
||||
if let Some(slot_idx) = model.slot_index(node_index, lod, group) {
|
||||
assert!(
|
||||
slot_idx < model.slots.len(),
|
||||
"slot index out of bounds in '{}' ({})",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut has_renderable_batch = false;
|
||||
for node_index in 0..model.node_count {
|
||||
let Some(slot_idx) = model.slot_index(node_index, 0, 0) else {
|
||||
continue;
|
||||
};
|
||||
let slot = &model.slots[slot_idx];
|
||||
let batch_end =
|
||||
usize::from(slot.batch_start).saturating_add(usize::from(slot.batch_count));
|
||||
if batch_end > model.batches.len() {
|
||||
continue;
|
||||
}
|
||||
for batch in &model.batches[usize::from(slot.batch_start)..batch_end] {
|
||||
let index_start = usize::try_from(batch.index_start).unwrap_or(usize::MAX);
|
||||
let index_count = usize::from(batch.index_count);
|
||||
let end = index_start.saturating_add(index_count);
|
||||
if end <= model.indices.len() && index_count >= 3 {
|
||||
has_renderable_batch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if has_renderable_batch {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if has_renderable_batch {
|
||||
renderable_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(model_count > 0, "no .msh entries found");
|
||||
assert!(
|
||||
renderable_count > 0,
|
||||
"no renderable models (lod0/group0) were detected"
|
||||
);
|
||||
assert!(
|
||||
legacy_stride24_count <= model_count,
|
||||
"internal test accounting error"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_minimal_synthetic_model() {
|
||||
// Nested NRes with required resources only.
|
||||
let mut payload = Vec::new();
|
||||
payload.extend_from_slice(b"NRes");
|
||||
payload.extend_from_slice(&0x100u32.to_le_bytes());
|
||||
payload.extend_from_slice(&5u32.to_le_bytes()); // entry_count
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // total_size placeholder
|
||||
|
||||
let mut resource_offsets = Vec::new();
|
||||
let mut resource_sizes = Vec::new();
|
||||
let mut resource_types = Vec::new();
|
||||
let mut resource_attr3 = Vec::new();
|
||||
let mut resource_names = Vec::new();
|
||||
|
||||
let add_resource = |payload: &mut Vec<u8>,
|
||||
offsets: &mut Vec<u32>,
|
||||
sizes: &mut Vec<u32>,
|
||||
types: &mut Vec<u32>,
|
||||
attr3: &mut Vec<u32>,
|
||||
names: &mut Vec<String>,
|
||||
kind: u32,
|
||||
name: &str,
|
||||
data: &[u8],
|
||||
attr3_val: u32| {
|
||||
offsets.push(u32::try_from(payload.len()).expect("offset overflow"));
|
||||
payload.extend_from_slice(data);
|
||||
while !payload.len().is_multiple_of(8) {
|
||||
payload.push(0);
|
||||
}
|
||||
sizes.push(u32::try_from(data.len()).expect("size overflow"));
|
||||
types.push(kind);
|
||||
attr3.push(attr3_val);
|
||||
names.push(name.to_string());
|
||||
};
|
||||
|
||||
let node = {
|
||||
let mut b = vec![0u8; 38];
|
||||
// slot[0][0] = 0
|
||||
b[8..10].copy_from_slice(&0u16.to_le_bytes());
|
||||
for i in 1..15 {
|
||||
let off = 8 + i * 2;
|
||||
b[off..off + 2].copy_from_slice(&u16::MAX.to_le_bytes());
|
||||
}
|
||||
b
|
||||
};
|
||||
let mut res2 = vec![0u8; 0x8C + 68];
|
||||
res2[0x8C..0x8C + 2].copy_from_slice(&0u16.to_le_bytes()); // tri_start
|
||||
res2[0x8C + 2..0x8C + 4].copy_from_slice(&0u16.to_le_bytes()); // tri_count
|
||||
res2[0x8C + 4..0x8C + 6].copy_from_slice(&0u16.to_le_bytes()); // batch_start
|
||||
res2[0x8C + 6..0x8C + 8].copy_from_slice(&1u16.to_le_bytes()); // batch_count
|
||||
let positions = [0f32, 0f32, 0f32, 1f32, 0f32, 0f32, 0f32, 1f32, 0f32]
|
||||
.iter()
|
||||
.flat_map(|v| v.to_le_bytes())
|
||||
.collect::<Vec<_>>();
|
||||
let indices = [0u16, 1, 2]
|
||||
.iter()
|
||||
.flat_map(|v| v.to_le_bytes())
|
||||
.collect::<Vec<_>>();
|
||||
let batch = {
|
||||
let mut b = vec![0u8; 20];
|
||||
b[0..2].copy_from_slice(&0u16.to_le_bytes());
|
||||
b[2..4].copy_from_slice(&0u16.to_le_bytes());
|
||||
b[8..10].copy_from_slice(&3u16.to_le_bytes()); // index_count
|
||||
b[10..14].copy_from_slice(&0u32.to_le_bytes()); // index_start
|
||||
b[16..20].copy_from_slice(&0u32.to_le_bytes()); // base_vertex
|
||||
b
|
||||
};
|
||||
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES1_NODE_TABLE,
|
||||
"Res1",
|
||||
&node,
|
||||
38,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES2_SLOTS,
|
||||
"Res2",
|
||||
&res2,
|
||||
68,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES3_POSITIONS,
|
||||
"Res3",
|
||||
&positions,
|
||||
12,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES6_INDICES,
|
||||
"Res6",
|
||||
&indices,
|
||||
2,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES13_BATCHES,
|
||||
"Res13",
|
||||
&batch,
|
||||
20,
|
||||
);
|
||||
|
||||
let directory_offset = payload.len();
|
||||
for i in 0..resource_types.len() {
|
||||
payload.extend_from_slice(&resource_types[i].to_le_bytes());
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // attr1
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // attr2
|
||||
payload.extend_from_slice(&resource_sizes[i].to_le_bytes());
|
||||
payload.extend_from_slice(&resource_attr3[i].to_le_bytes());
|
||||
let mut name_raw = [0u8; 36];
|
||||
let bytes = resource_names[i].as_bytes();
|
||||
name_raw[..bytes.len()].copy_from_slice(bytes);
|
||||
payload.extend_from_slice(&name_raw);
|
||||
payload.extend_from_slice(&resource_offsets[i].to_le_bytes());
|
||||
payload.extend_from_slice(&(i as u32).to_le_bytes()); // sort index
|
||||
}
|
||||
let total_size = u32::try_from(payload.len()).expect("size overflow");
|
||||
payload[12..16].copy_from_slice(&total_size.to_le_bytes());
|
||||
assert_eq!(
|
||||
directory_offset + resource_types.len() * 64,
|
||||
payload.len(),
|
||||
"synthetic nested NRes layout invalid"
|
||||
);
|
||||
|
||||
let model = parse_model_payload(&payload).expect("failed to parse synthetic model");
|
||||
assert_eq!(model.node_count, 1);
|
||||
assert_eq!(model.positions.len(), 3);
|
||||
assert_eq!(model.indices.len(), 3);
|
||||
assert_eq!(model.batches.len(), 1);
|
||||
assert_eq!(model.slot_index(0, 0, 0), Some(0));
|
||||
}
|
||||
Reference in New Issue
Block a user