Refactor documentation structure and add new specifications
- Updated MSH documentation to reflect changes in material, wear, and texture specifications. - Introduced new `render.md` file detailing the render pipeline process. - Removed outdated sections from `runtime-pipeline.md` and redirected to `render.md`. - Added detailed specifications for `Texm` texture format and `WEAR` wear table. - Updated navigation in `mkdocs.yml` to align with new documentation structure.
This commit is contained in:
7
crates/msh-core/Cargo.toml
Normal file
7
crates/msh-core/Cargo.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "msh-core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
nres = { path = "../nres" }
|
||||
14
crates/msh-core/README.md
Normal file
14
crates/msh-core/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# msh-core
|
||||
|
||||
Парсер core-части формата `MSH`.
|
||||
|
||||
Покрывает:
|
||||
|
||||
- `Res1`, `Res2`, `Res3`, `Res6`, `Res13` (обязательные);
|
||||
- `Res4`, `Res5`, `Res10` (опциональные);
|
||||
- slot lookup по `node/lod/group`.
|
||||
|
||||
Тесты:
|
||||
|
||||
- прогон по всем `.msh` в `testdata`;
|
||||
- синтетическая минимальная модель.
|
||||
74
crates/msh-core/src/error.rs
Normal file
74
crates/msh-core/src/error.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use core::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Nres(nres::error::Error),
|
||||
MissingResource {
|
||||
kind: u32,
|
||||
label: &'static str,
|
||||
},
|
||||
InvalidResourceSize {
|
||||
label: &'static str,
|
||||
size: usize,
|
||||
stride: usize,
|
||||
},
|
||||
InvalidRes2Size {
|
||||
size: usize,
|
||||
},
|
||||
UnsupportedNodeStride {
|
||||
stride: usize,
|
||||
},
|
||||
IndexOutOfBounds {
|
||||
label: &'static str,
|
||||
index: usize,
|
||||
limit: usize,
|
||||
},
|
||||
IntegerOverflow,
|
||||
}
|
||||
|
||||
impl From<nres::error::Error> for Error {
|
||||
fn from(value: nres::error::Error) -> Self {
|
||||
Self::Nres(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Nres(err) => write!(f, "{err}"),
|
||||
Self::MissingResource { kind, label } => {
|
||||
write!(f, "missing required resource type={kind} ({label})")
|
||||
}
|
||||
Self::InvalidResourceSize {
|
||||
label,
|
||||
size,
|
||||
stride,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"invalid {label} size={size}, expected multiple of stride={stride}"
|
||||
)
|
||||
}
|
||||
Self::InvalidRes2Size { size } => {
|
||||
write!(f, "invalid Res2 size={size}, expected >= 140")
|
||||
}
|
||||
Self::UnsupportedNodeStride { stride } => {
|
||||
write!(
|
||||
f,
|
||||
"unsupported Res1 node stride={stride}, expected 38 or 24"
|
||||
)
|
||||
}
|
||||
Self::IndexOutOfBounds {
|
||||
label,
|
||||
index,
|
||||
limit,
|
||||
} => write!(
|
||||
f,
|
||||
"{label} index out of bounds: index={index}, limit={limit}"
|
||||
),
|
||||
Self::IntegerOverflow => write!(f, "integer overflow"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
392
crates/msh-core/src/lib.rs
Normal file
392
crates/msh-core/src/lib.rs
Normal file
@@ -0,0 +1,392 @@
|
||||
pub mod error;
|
||||
|
||||
use crate::error::Error;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
pub const RES1_NODE_TABLE: u32 = 1;
|
||||
pub const RES2_SLOTS: u32 = 2;
|
||||
pub const RES3_POSITIONS: u32 = 3;
|
||||
pub const RES4_NORMALS: u32 = 4;
|
||||
pub const RES5_UV0: u32 = 5;
|
||||
pub const RES6_INDICES: u32 = 6;
|
||||
pub const RES10_NAMES: u32 = 10;
|
||||
pub const RES13_BATCHES: u32 = 13;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Slot {
|
||||
pub tri_start: u16,
|
||||
pub tri_count: u16,
|
||||
pub batch_start: u16,
|
||||
pub batch_count: u16,
|
||||
pub aabb_min: [f32; 3],
|
||||
pub aabb_max: [f32; 3],
|
||||
pub sphere_center: [f32; 3],
|
||||
pub sphere_radius: f32,
|
||||
pub opaque: [u32; 5],
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Batch {
|
||||
pub batch_flags: u16,
|
||||
pub material_index: u16,
|
||||
pub opaque4: u16,
|
||||
pub opaque6: u16,
|
||||
pub index_count: u16,
|
||||
pub index_start: u32,
|
||||
pub opaque14: u16,
|
||||
pub base_vertex: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Model {
|
||||
pub node_stride: usize,
|
||||
pub node_count: usize,
|
||||
pub nodes_raw: Vec<u8>,
|
||||
pub slots: Vec<Slot>,
|
||||
pub positions: Vec<[f32; 3]>,
|
||||
pub normals: Option<Vec<[i8; 4]>>,
|
||||
pub uv0: Option<Vec<[i16; 2]>>,
|
||||
pub indices: Vec<u16>,
|
||||
pub batches: Vec<Batch>,
|
||||
pub node_names: Option<Vec<Option<String>>>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn slot_index(&self, node_index: usize, lod: usize, group: usize) -> Option<usize> {
|
||||
if node_index >= self.node_count || lod >= 3 || group >= 5 {
|
||||
return None;
|
||||
}
|
||||
if self.node_stride != 38 {
|
||||
return None;
|
||||
}
|
||||
let node_off = node_index.checked_mul(self.node_stride)?;
|
||||
let matrix_off = node_off.checked_add(8)?;
|
||||
let word_off = matrix_off.checked_add((lod * 5 + group) * 2)?;
|
||||
let raw = read_u16(&self.nodes_raw, word_off).ok()?;
|
||||
if raw == u16::MAX {
|
||||
return None;
|
||||
}
|
||||
let idx = usize::from(raw);
|
||||
if idx >= self.slots.len() {
|
||||
return None;
|
||||
}
|
||||
Some(idx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_model_payload(payload: &[u8]) -> Result<Model> {
|
||||
let archive = nres::Archive::open_bytes(
|
||||
Arc::from(payload.to_vec().into_boxed_slice()),
|
||||
nres::OpenOptions::default(),
|
||||
)?;
|
||||
|
||||
let res1 = read_required(&archive, RES1_NODE_TABLE, "Res1")?;
|
||||
let res2 = read_required(&archive, RES2_SLOTS, "Res2")?;
|
||||
let res3 = read_required(&archive, RES3_POSITIONS, "Res3")?;
|
||||
let res6 = read_required(&archive, RES6_INDICES, "Res6")?;
|
||||
let res13 = read_required(&archive, RES13_BATCHES, "Res13")?;
|
||||
|
||||
let res4 = read_optional(&archive, RES4_NORMALS)?;
|
||||
let res5 = read_optional(&archive, RES5_UV0)?;
|
||||
let res10 = read_optional(&archive, RES10_NAMES)?;
|
||||
|
||||
let node_stride = usize::try_from(res1.meta.attr3).map_err(|_| Error::IntegerOverflow)?;
|
||||
if node_stride != 38 && node_stride != 24 {
|
||||
return Err(Error::UnsupportedNodeStride {
|
||||
stride: node_stride,
|
||||
});
|
||||
}
|
||||
if res1.bytes.len() % node_stride != 0 {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res1",
|
||||
size: res1.bytes.len(),
|
||||
stride: node_stride,
|
||||
});
|
||||
}
|
||||
let node_count = res1.bytes.len() / node_stride;
|
||||
|
||||
if res2.bytes.len() < 0x8C {
|
||||
return Err(Error::InvalidRes2Size {
|
||||
size: res2.bytes.len(),
|
||||
});
|
||||
}
|
||||
let slot_blob = res2
|
||||
.bytes
|
||||
.len()
|
||||
.checked_sub(0x8C)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if slot_blob % 68 != 0 {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res2.slots",
|
||||
size: slot_blob,
|
||||
stride: 68,
|
||||
});
|
||||
}
|
||||
let slot_count = slot_blob / 68;
|
||||
let mut slots = Vec::with_capacity(slot_count);
|
||||
for i in 0..slot_count {
|
||||
let off = 0x8Cusize
|
||||
.checked_add(i.checked_mul(68).ok_or(Error::IntegerOverflow)?)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
slots.push(Slot {
|
||||
tri_start: read_u16(&res2.bytes, off)?,
|
||||
tri_count: read_u16(&res2.bytes, off + 2)?,
|
||||
batch_start: read_u16(&res2.bytes, off + 4)?,
|
||||
batch_count: read_u16(&res2.bytes, off + 6)?,
|
||||
aabb_min: [
|
||||
read_f32(&res2.bytes, off + 8)?,
|
||||
read_f32(&res2.bytes, off + 12)?,
|
||||
read_f32(&res2.bytes, off + 16)?,
|
||||
],
|
||||
aabb_max: [
|
||||
read_f32(&res2.bytes, off + 20)?,
|
||||
read_f32(&res2.bytes, off + 24)?,
|
||||
read_f32(&res2.bytes, off + 28)?,
|
||||
],
|
||||
sphere_center: [
|
||||
read_f32(&res2.bytes, off + 32)?,
|
||||
read_f32(&res2.bytes, off + 36)?,
|
||||
read_f32(&res2.bytes, off + 40)?,
|
||||
],
|
||||
sphere_radius: read_f32(&res2.bytes, off + 44)?,
|
||||
opaque: [
|
||||
read_u32(&res2.bytes, off + 48)?,
|
||||
read_u32(&res2.bytes, off + 52)?,
|
||||
read_u32(&res2.bytes, off + 56)?,
|
||||
read_u32(&res2.bytes, off + 60)?,
|
||||
read_u32(&res2.bytes, off + 64)?,
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
let positions = parse_positions(&res3.bytes)?;
|
||||
let indices = parse_u16_array(&res6.bytes, "Res6")?;
|
||||
let batches = parse_batches(&res13.bytes)?;
|
||||
|
||||
let normals = match res4 {
|
||||
Some(raw) => Some(parse_i8x4_array(&raw.bytes, "Res4")?),
|
||||
None => None,
|
||||
};
|
||||
let uv0 = match res5 {
|
||||
Some(raw) => Some(parse_i16x2_array(&raw.bytes, "Res5")?),
|
||||
None => None,
|
||||
};
|
||||
let node_names = match res10 {
|
||||
Some(raw) => Some(parse_res10_names(&raw.bytes, node_count)?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(Model {
|
||||
node_stride,
|
||||
node_count,
|
||||
nodes_raw: res1.bytes,
|
||||
slots,
|
||||
positions,
|
||||
normals,
|
||||
uv0,
|
||||
indices,
|
||||
batches,
|
||||
node_names,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_positions(data: &[u8]) -> Result<Vec<[f32; 3]>> {
|
||||
if !data.len().is_multiple_of(12) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res3",
|
||||
size: data.len(),
|
||||
stride: 12,
|
||||
});
|
||||
}
|
||||
let count = data.len() / 12;
|
||||
let mut out = Vec::with_capacity(count);
|
||||
for i in 0..count {
|
||||
let off = i * 12;
|
||||
out.push([
|
||||
read_f32(data, off)?,
|
||||
read_f32(data, off + 4)?,
|
||||
read_f32(data, off + 8)?,
|
||||
]);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_batches(data: &[u8]) -> Result<Vec<Batch>> {
|
||||
if !data.len().is_multiple_of(20) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label: "Res13",
|
||||
size: data.len(),
|
||||
stride: 20,
|
||||
});
|
||||
}
|
||||
let count = data.len() / 20;
|
||||
let mut out = Vec::with_capacity(count);
|
||||
for i in 0..count {
|
||||
let off = i * 20;
|
||||
out.push(Batch {
|
||||
batch_flags: read_u16(data, off)?,
|
||||
material_index: read_u16(data, off + 2)?,
|
||||
opaque4: read_u16(data, off + 4)?,
|
||||
opaque6: read_u16(data, off + 6)?,
|
||||
index_count: read_u16(data, off + 8)?,
|
||||
index_start: read_u32(data, off + 10)?,
|
||||
opaque14: read_u16(data, off + 14)?,
|
||||
base_vertex: read_u32(data, off + 16)?,
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_u16_array(data: &[u8], label: &'static str) -> Result<Vec<u16>> {
|
||||
if !data.len().is_multiple_of(2) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label,
|
||||
size: data.len(),
|
||||
stride: 2,
|
||||
});
|
||||
}
|
||||
let mut out = Vec::with_capacity(data.len() / 2);
|
||||
for i in (0..data.len()).step_by(2) {
|
||||
out.push(read_u16(data, i)?);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_i8x4_array(data: &[u8], label: &'static str) -> Result<Vec<[i8; 4]>> {
|
||||
if !data.len().is_multiple_of(4) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label,
|
||||
size: data.len(),
|
||||
stride: 4,
|
||||
});
|
||||
}
|
||||
let mut out = Vec::with_capacity(data.len() / 4);
|
||||
for i in (0..data.len()).step_by(4) {
|
||||
out.push([
|
||||
read_i8(data, i)?,
|
||||
read_i8(data, i + 1)?,
|
||||
read_i8(data, i + 2)?,
|
||||
read_i8(data, i + 3)?,
|
||||
]);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_i16x2_array(data: &[u8], label: &'static str) -> Result<Vec<[i16; 2]>> {
|
||||
if !data.len().is_multiple_of(4) {
|
||||
return Err(Error::InvalidResourceSize {
|
||||
label,
|
||||
size: data.len(),
|
||||
stride: 4,
|
||||
});
|
||||
}
|
||||
let mut out = Vec::with_capacity(data.len() / 4);
|
||||
for i in (0..data.len()).step_by(4) {
|
||||
out.push([read_i16(data, i)?, read_i16(data, i + 2)?]);
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn parse_res10_names(data: &[u8], node_count: usize) -> Result<Vec<Option<String>>> {
|
||||
let mut out = Vec::with_capacity(node_count);
|
||||
let mut off = 0usize;
|
||||
for _ in 0..node_count {
|
||||
let len = usize::try_from(read_u32(data, off)?).map_err(|_| Error::IntegerOverflow)?;
|
||||
off = off.checked_add(4).ok_or(Error::IntegerOverflow)?;
|
||||
if len == 0 {
|
||||
out.push(None);
|
||||
continue;
|
||||
}
|
||||
let need = len.checked_add(1).ok_or(Error::IntegerOverflow)?;
|
||||
let end = off.checked_add(need).ok_or(Error::IntegerOverflow)?;
|
||||
let slice = data.get(off..end).ok_or(Error::InvalidResourceSize {
|
||||
label: "Res10",
|
||||
size: data.len(),
|
||||
stride: 1,
|
||||
})?;
|
||||
let text = if slice.last().copied() == Some(0) {
|
||||
&slice[..slice.len().saturating_sub(1)]
|
||||
} else {
|
||||
slice
|
||||
};
|
||||
let decoded = String::from_utf8_lossy(text).to_string();
|
||||
out.push(Some(decoded));
|
||||
off = end;
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
struct RawResource {
|
||||
meta: nres::EntryMeta,
|
||||
bytes: Vec<u8>,
|
||||
}
|
||||
|
||||
fn read_required(archive: &nres::Archive, kind: u32, label: &'static str) -> Result<RawResource> {
|
||||
let id = archive
|
||||
.entries()
|
||||
.find(|entry| entry.meta.kind == kind)
|
||||
.map(|entry| entry.id)
|
||||
.ok_or(Error::MissingResource { kind, label })?;
|
||||
let entry = archive.get(id).ok_or(Error::IndexOutOfBounds {
|
||||
label,
|
||||
index: usize::try_from(id.0).map_err(|_| Error::IntegerOverflow)?,
|
||||
limit: archive.entry_count(),
|
||||
})?;
|
||||
let data = archive.read(id)?.into_owned();
|
||||
Ok(RawResource {
|
||||
meta: entry.meta.clone(),
|
||||
bytes: data,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_optional(archive: &nres::Archive, kind: u32) -> Result<Option<RawResource>> {
|
||||
let Some(id) = archive
|
||||
.entries()
|
||||
.find(|entry| entry.meta.kind == kind)
|
||||
.map(|entry| entry.id)
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
let entry = archive.get(id).ok_or(Error::IndexOutOfBounds {
|
||||
label: "optional",
|
||||
index: usize::try_from(id.0).map_err(|_| Error::IntegerOverflow)?,
|
||||
limit: archive.entry_count(),
|
||||
})?;
|
||||
let data = archive.read(id)?.into_owned();
|
||||
Ok(Some(RawResource {
|
||||
meta: entry.meta.clone(),
|
||||
bytes: data,
|
||||
}))
|
||||
}
|
||||
|
||||
fn read_u16(data: &[u8], offset: usize) -> Result<u16> {
|
||||
let bytes = data.get(offset..offset + 2).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 2] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(u16::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_i16(data: &[u8], offset: usize) -> Result<i16> {
|
||||
let bytes = data.get(offset..offset + 2).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 2] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(i16::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_i8(data: &[u8], offset: usize) -> Result<i8> {
|
||||
let byte = data.get(offset).copied().ok_or(Error::IntegerOverflow)?;
|
||||
Ok(i8::from_le_bytes([byte]))
|
||||
}
|
||||
|
||||
fn read_u32(data: &[u8], offset: usize) -> Result<u32> {
|
||||
let bytes = data.get(offset..offset + 4).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 4] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(u32::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_f32(data: &[u8], offset: usize) -> Result<f32> {
|
||||
Ok(f32::from_bits(read_u32(data, offset)?))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
296
crates/msh-core/src/tests.rs
Normal file
296
crates/msh-core/src/tests.rs
Normal file
@@ -0,0 +1,296 @@
|
||||
use super::*;
|
||||
use nres::Archive;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn collect_files_recursive(root: &Path, out: &mut Vec<PathBuf>) {
|
||||
let Ok(entries) = fs::read_dir(root) else {
|
||||
return;
|
||||
};
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
collect_files_recursive(&path, out);
|
||||
} else if path.is_file() {
|
||||
out.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn nres_test_files() -> Vec<PathBuf> {
|
||||
let root = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("..")
|
||||
.join("..")
|
||||
.join("testdata");
|
||||
let mut files = Vec::new();
|
||||
collect_files_recursive(&root, &mut files);
|
||||
files.sort();
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
fs::read(path)
|
||||
.map(|bytes| bytes.get(0..4) == Some(b"NRes"))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_msh_name(name: &str) -> bool {
|
||||
name.to_ascii_lowercase().ends_with(".msh")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_all_game_msh_models() {
|
||||
let archives = nres_test_files();
|
||||
if archives.is_empty() {
|
||||
eprintln!("skipping parse_all_game_msh_models: no NRes files in testdata");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut model_count = 0usize;
|
||||
let mut renderable_count = 0usize;
|
||||
let mut legacy_stride24_count = 0usize;
|
||||
|
||||
for archive_path in archives {
|
||||
let archive = Archive::open_path(&archive_path)
|
||||
.unwrap_or_else(|err| panic!("failed to open {}: {err}", archive_path.display()));
|
||||
|
||||
for entry in archive.entries() {
|
||||
if !is_msh_name(&entry.meta.name) {
|
||||
continue;
|
||||
}
|
||||
model_count += 1;
|
||||
let payload = archive.read(entry.id).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to read model '{}' in {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
let model = parse_model_payload(payload.as_slice()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to parse model '{}' in {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
|
||||
if model.node_stride == 24 {
|
||||
legacy_stride24_count += 1;
|
||||
}
|
||||
|
||||
for node_index in 0..model.node_count {
|
||||
for lod in 0..3 {
|
||||
for group in 0..5 {
|
||||
if let Some(slot_idx) = model.slot_index(node_index, lod, group) {
|
||||
assert!(
|
||||
slot_idx < model.slots.len(),
|
||||
"slot index out of bounds in '{}' ({})",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut has_renderable_batch = false;
|
||||
for node_index in 0..model.node_count {
|
||||
let Some(slot_idx) = model.slot_index(node_index, 0, 0) else {
|
||||
continue;
|
||||
};
|
||||
let slot = &model.slots[slot_idx];
|
||||
let batch_end =
|
||||
usize::from(slot.batch_start).saturating_add(usize::from(slot.batch_count));
|
||||
if batch_end > model.batches.len() {
|
||||
continue;
|
||||
}
|
||||
for batch in &model.batches[usize::from(slot.batch_start)..batch_end] {
|
||||
let index_start = usize::try_from(batch.index_start).unwrap_or(usize::MAX);
|
||||
let index_count = usize::from(batch.index_count);
|
||||
let end = index_start.saturating_add(index_count);
|
||||
if end <= model.indices.len() && index_count >= 3 {
|
||||
has_renderable_batch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if has_renderable_batch {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if has_renderable_batch {
|
||||
renderable_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(model_count > 0, "no .msh entries found");
|
||||
assert!(
|
||||
renderable_count > 0,
|
||||
"no renderable models (lod0/group0) were detected"
|
||||
);
|
||||
assert!(
|
||||
legacy_stride24_count <= model_count,
|
||||
"internal test accounting error"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_minimal_synthetic_model() {
|
||||
// Nested NRes with required resources only.
|
||||
let mut payload = Vec::new();
|
||||
payload.extend_from_slice(b"NRes");
|
||||
payload.extend_from_slice(&0x100u32.to_le_bytes());
|
||||
payload.extend_from_slice(&5u32.to_le_bytes()); // entry_count
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // total_size placeholder
|
||||
|
||||
let mut resource_offsets = Vec::new();
|
||||
let mut resource_sizes = Vec::new();
|
||||
let mut resource_types = Vec::new();
|
||||
let mut resource_attr3 = Vec::new();
|
||||
let mut resource_names = Vec::new();
|
||||
|
||||
let add_resource = |payload: &mut Vec<u8>,
|
||||
offsets: &mut Vec<u32>,
|
||||
sizes: &mut Vec<u32>,
|
||||
types: &mut Vec<u32>,
|
||||
attr3: &mut Vec<u32>,
|
||||
names: &mut Vec<String>,
|
||||
kind: u32,
|
||||
name: &str,
|
||||
data: &[u8],
|
||||
attr3_val: u32| {
|
||||
offsets.push(u32::try_from(payload.len()).expect("offset overflow"));
|
||||
payload.extend_from_slice(data);
|
||||
while !payload.len().is_multiple_of(8) {
|
||||
payload.push(0);
|
||||
}
|
||||
sizes.push(u32::try_from(data.len()).expect("size overflow"));
|
||||
types.push(kind);
|
||||
attr3.push(attr3_val);
|
||||
names.push(name.to_string());
|
||||
};
|
||||
|
||||
let node = {
|
||||
let mut b = vec![0u8; 38];
|
||||
// slot[0][0] = 0
|
||||
b[8..10].copy_from_slice(&0u16.to_le_bytes());
|
||||
for i in 1..15 {
|
||||
let off = 8 + i * 2;
|
||||
b[off..off + 2].copy_from_slice(&u16::MAX.to_le_bytes());
|
||||
}
|
||||
b
|
||||
};
|
||||
let mut res2 = vec![0u8; 0x8C + 68];
|
||||
res2[0x8C..0x8C + 2].copy_from_slice(&0u16.to_le_bytes()); // tri_start
|
||||
res2[0x8C + 2..0x8C + 4].copy_from_slice(&0u16.to_le_bytes()); // tri_count
|
||||
res2[0x8C + 4..0x8C + 6].copy_from_slice(&0u16.to_le_bytes()); // batch_start
|
||||
res2[0x8C + 6..0x8C + 8].copy_from_slice(&1u16.to_le_bytes()); // batch_count
|
||||
let positions = [0f32, 0f32, 0f32, 1f32, 0f32, 0f32, 0f32, 1f32, 0f32]
|
||||
.iter()
|
||||
.flat_map(|v| v.to_le_bytes())
|
||||
.collect::<Vec<_>>();
|
||||
let indices = [0u16, 1, 2]
|
||||
.iter()
|
||||
.flat_map(|v| v.to_le_bytes())
|
||||
.collect::<Vec<_>>();
|
||||
let batch = {
|
||||
let mut b = vec![0u8; 20];
|
||||
b[0..2].copy_from_slice(&0u16.to_le_bytes());
|
||||
b[2..4].copy_from_slice(&0u16.to_le_bytes());
|
||||
b[8..10].copy_from_slice(&3u16.to_le_bytes()); // index_count
|
||||
b[10..14].copy_from_slice(&0u32.to_le_bytes()); // index_start
|
||||
b[16..20].copy_from_slice(&0u32.to_le_bytes()); // base_vertex
|
||||
b
|
||||
};
|
||||
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES1_NODE_TABLE,
|
||||
"Res1",
|
||||
&node,
|
||||
38,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES2_SLOTS,
|
||||
"Res2",
|
||||
&res2,
|
||||
68,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES3_POSITIONS,
|
||||
"Res3",
|
||||
&positions,
|
||||
12,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES6_INDICES,
|
||||
"Res6",
|
||||
&indices,
|
||||
2,
|
||||
);
|
||||
add_resource(
|
||||
&mut payload,
|
||||
&mut resource_offsets,
|
||||
&mut resource_sizes,
|
||||
&mut resource_types,
|
||||
&mut resource_attr3,
|
||||
&mut resource_names,
|
||||
RES13_BATCHES,
|
||||
"Res13",
|
||||
&batch,
|
||||
20,
|
||||
);
|
||||
|
||||
let directory_offset = payload.len();
|
||||
for i in 0..resource_types.len() {
|
||||
payload.extend_from_slice(&resource_types[i].to_le_bytes());
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // attr1
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // attr2
|
||||
payload.extend_from_slice(&resource_sizes[i].to_le_bytes());
|
||||
payload.extend_from_slice(&resource_attr3[i].to_le_bytes());
|
||||
let mut name_raw = [0u8; 36];
|
||||
let bytes = resource_names[i].as_bytes();
|
||||
name_raw[..bytes.len()].copy_from_slice(bytes);
|
||||
payload.extend_from_slice(&name_raw);
|
||||
payload.extend_from_slice(&resource_offsets[i].to_le_bytes());
|
||||
payload.extend_from_slice(&(i as u32).to_le_bytes()); // sort index
|
||||
}
|
||||
let total_size = u32::try_from(payload.len()).expect("size overflow");
|
||||
payload[12..16].copy_from_slice(&total_size.to_le_bytes());
|
||||
assert_eq!(
|
||||
directory_offset + resource_types.len() * 64,
|
||||
payload.len(),
|
||||
"synthetic nested NRes layout invalid"
|
||||
);
|
||||
|
||||
let model = parse_model_payload(&payload).expect("failed to parse synthetic model");
|
||||
assert_eq!(model.node_count, 1);
|
||||
assert_eq!(model.positions.len(), 3);
|
||||
assert_eq!(model.indices.len(), 3);
|
||||
assert_eq!(model.batches.len(), 1);
|
||||
assert_eq!(model.slot_index(0, 0, 0), Some(0));
|
||||
}
|
||||
8
crates/render-core/Cargo.toml
Normal file
8
crates/render-core/Cargo.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
[package]
|
||||
name = "render-core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
msh-core = { path = "../msh-core" }
|
||||
nres = { path = "../nres" }
|
||||
14
crates/render-core/README.md
Normal file
14
crates/render-core/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# render-core
|
||||
|
||||
CPU-подготовка draw-данных для моделей `MSH`.
|
||||
|
||||
Покрывает:
|
||||
|
||||
- обход `node -> slot -> batch`;
|
||||
- раскрытие индексов в triangle-list (`Vec<[f32;3]>`);
|
||||
- расчёт bounds по вершинам.
|
||||
|
||||
Тесты:
|
||||
|
||||
- построение рендер-сеток на реальных `.msh` из `testdata`;
|
||||
- unit-test bounds.
|
||||
84
crates/render-core/src/lib.rs
Normal file
84
crates/render-core/src/lib.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use msh_core::Model;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RenderMesh {
|
||||
pub vertices: Vec<[f32; 3]>,
|
||||
pub batch_count: usize,
|
||||
}
|
||||
|
||||
impl RenderMesh {
|
||||
pub fn triangle_count(&self) -> usize {
|
||||
self.vertices.len() / 3
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds an expanded triangle list for a specific LOD/group pair.
|
||||
///
|
||||
/// The output is suitable for simple `glDrawArrays(GL_TRIANGLES, ...)` paths.
|
||||
pub fn build_render_mesh(model: &Model, lod: usize, group: usize) -> RenderMesh {
|
||||
let mut vertices = Vec::new();
|
||||
let mut batch_count = 0usize;
|
||||
|
||||
for node_index in 0..model.node_count {
|
||||
let Some(slot_idx) = model.slot_index(node_index, lod, group) else {
|
||||
continue;
|
||||
};
|
||||
let Some(slot) = model.slots.get(slot_idx) else {
|
||||
continue;
|
||||
};
|
||||
let batch_start = usize::from(slot.batch_start);
|
||||
let batch_end = batch_start.saturating_add(usize::from(slot.batch_count));
|
||||
if batch_end > model.batches.len() {
|
||||
continue;
|
||||
}
|
||||
|
||||
for batch in &model.batches[batch_start..batch_end] {
|
||||
let index_start = usize::try_from(batch.index_start).unwrap_or(usize::MAX);
|
||||
let index_count = usize::from(batch.index_count);
|
||||
let index_end = index_start.saturating_add(index_count);
|
||||
if index_end > model.indices.len() || index_count < 3 {
|
||||
continue;
|
||||
}
|
||||
|
||||
for &idx in &model.indices[index_start..index_end] {
|
||||
let final_idx_u64 = u64::from(batch.base_vertex).saturating_add(u64::from(idx));
|
||||
let Ok(final_idx) = usize::try_from(final_idx_u64) else {
|
||||
continue;
|
||||
};
|
||||
let Some(pos) = model.positions.get(final_idx) else {
|
||||
continue;
|
||||
};
|
||||
vertices.push(*pos);
|
||||
}
|
||||
batch_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
RenderMesh {
|
||||
vertices,
|
||||
batch_count,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute_bounds(vertices: &[[f32; 3]]) -> Option<([f32; 3], [f32; 3])> {
|
||||
let mut iter = vertices.iter();
|
||||
let first = iter.next()?;
|
||||
let mut min_v = *first;
|
||||
let mut max_v = *first;
|
||||
|
||||
for v in iter {
|
||||
for i in 0..3 {
|
||||
if v[i] < min_v[i] {
|
||||
min_v[i] = v[i];
|
||||
}
|
||||
if v[i] > max_v[i] {
|
||||
max_v[i] = v[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some((min_v, max_v))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
101
crates/render-core/src/tests.rs
Normal file
101
crates/render-core/src/tests.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use super::*;
|
||||
use msh_core::parse_model_payload;
|
||||
use nres::Archive;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn collect_files_recursive(root: &Path, out: &mut Vec<PathBuf>) {
|
||||
let Ok(entries) = fs::read_dir(root) else {
|
||||
return;
|
||||
};
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
collect_files_recursive(&path, out);
|
||||
} else if path.is_file() {
|
||||
out.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn nres_test_files() -> Vec<PathBuf> {
|
||||
let root = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("..")
|
||||
.join("..")
|
||||
.join("testdata");
|
||||
let mut files = Vec::new();
|
||||
collect_files_recursive(&root, &mut files);
|
||||
files.sort();
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
fs::read(path)
|
||||
.map(|bytes| bytes.get(0..4) == Some(b"NRes"))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_render_mesh_for_real_models() {
|
||||
let archives = nres_test_files();
|
||||
if archives.is_empty() {
|
||||
eprintln!("skipping build_render_mesh_for_real_models: no NRes files in testdata");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut models_checked = 0usize;
|
||||
let mut meshes_non_empty = 0usize;
|
||||
let mut bounds_non_empty = 0usize;
|
||||
|
||||
for archive_path in archives {
|
||||
let archive = Archive::open_path(&archive_path)
|
||||
.unwrap_or_else(|err| panic!("failed to open {}: {err}", archive_path.display()));
|
||||
for entry in archive.entries() {
|
||||
if !entry.meta.name.to_ascii_lowercase().ends_with(".msh") {
|
||||
continue;
|
||||
}
|
||||
models_checked += 1;
|
||||
let payload = archive.read(entry.id).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to read model '{}' from {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
let model = parse_model_payload(payload.as_slice()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to parse model '{}' from {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
let mesh = build_render_mesh(&model, 0, 0);
|
||||
if !mesh.vertices.is_empty() {
|
||||
meshes_non_empty += 1;
|
||||
}
|
||||
if compute_bounds(&mesh.vertices).is_some() {
|
||||
bounds_non_empty += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(models_checked > 0, "no MSH models found");
|
||||
assert!(
|
||||
meshes_non_empty > 0,
|
||||
"all generated render meshes are empty"
|
||||
);
|
||||
assert_eq!(
|
||||
meshes_non_empty, bounds_non_empty,
|
||||
"bounds must be available for every non-empty mesh"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_bounds_handles_empty_and_non_empty() {
|
||||
assert!(compute_bounds(&[]).is_none());
|
||||
let bounds = compute_bounds(&[[1.0, 2.0, 3.0], [-2.0, 5.0, 0.5], [0.0, -1.0, 9.0]])
|
||||
.expect("bounds expected");
|
||||
assert_eq!(bounds.0, [-2.0, -1.0, 0.5]);
|
||||
assert_eq!(bounds.1, [1.0, 5.0, 9.0]);
|
||||
}
|
||||
20
crates/render-demo/Cargo.toml
Normal file
20
crates/render-demo/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "render-demo"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
demo = ["dep:sdl2", "dep:glow"]
|
||||
|
||||
[dependencies]
|
||||
msh-core = { path = "../msh-core" }
|
||||
nres = { path = "../nres" }
|
||||
render-core = { path = "../render-core" }
|
||||
sdl2 = { version = "0.37", optional = true, default-features = false, features = ["bundled", "static-link"] }
|
||||
glow = { version = "0.16", optional = true }
|
||||
|
||||
[[bin]]
|
||||
name = "parkan-render-demo"
|
||||
path = "src/main.rs"
|
||||
required-features = ["demo"]
|
||||
30
crates/render-demo/README.md
Normal file
30
crates/render-demo/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# render-demo
|
||||
|
||||
Тестовый рендерер Parkan-моделей на Rust (`SDL2 + OpenGL ES 2.0`).
|
||||
|
||||
## Назначение
|
||||
|
||||
- Проверить, что `nres + msh-core + render-core` дают рабочий draw-path на реальных ассетах.
|
||||
- Служить минимальным reference-приложением.
|
||||
|
||||
## Запуск
|
||||
|
||||
```bash
|
||||
cargo run -p render-demo --features demo -- \
|
||||
--archive "testdata/Parkan - Iron Strategy/animals.rlb" \
|
||||
--model "A_L_01.msh" \
|
||||
--lod 0 \
|
||||
--group 0
|
||||
```
|
||||
|
||||
Параметры:
|
||||
|
||||
- `--archive` (обязательный): NRes-архив с `.msh` entry.
|
||||
- `--model` (опционально): имя модели; если не задано, берётся первая `.msh`.
|
||||
- `--lod` (опционально, default `0`).
|
||||
- `--group` (опционально, default `0`).
|
||||
|
||||
## Ограничения
|
||||
|
||||
- Рендер только геометрии (без материалов/текстур/FX).
|
||||
- Вывод через `glDrawArrays(GL_TRIANGLES)` из расширенного triangle-list.
|
||||
4
crates/render-demo/build.rs
Normal file
4
crates/render-demo/build.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
#[cfg(windows)]
|
||||
println!("cargo:rustc-link-lib=advapi32");
|
||||
}
|
||||
113
crates/render-demo/src/lib.rs
Normal file
113
crates/render-demo/src/lib.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
use msh_core::{parse_model_payload, Model};
|
||||
use nres::Archive;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Nres(nres::error::Error),
|
||||
Msh(msh_core::error::Error),
|
||||
NoMshEntries,
|
||||
ModelNotFound(String),
|
||||
}
|
||||
|
||||
impl From<nres::error::Error> for Error {
|
||||
fn from(value: nres::error::Error) -> Self {
|
||||
Self::Nres(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<msh_core::error::Error> for Error {
|
||||
fn from(value: msh_core::error::Error) -> Self {
|
||||
Self::Msh(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
pub fn load_model_from_archive(path: &Path, model_name: Option<&str>) -> Result<Model> {
|
||||
let archive = Archive::open_path(path)?;
|
||||
let mut msh_entries = Vec::new();
|
||||
for entry in archive.entries() {
|
||||
if entry.meta.name.to_ascii_lowercase().ends_with(".msh") {
|
||||
msh_entries.push((entry.id, entry.meta.name.clone()));
|
||||
}
|
||||
}
|
||||
if msh_entries.is_empty() {
|
||||
return Err(Error::NoMshEntries);
|
||||
}
|
||||
|
||||
let target_id = if let Some(name) = model_name {
|
||||
msh_entries
|
||||
.iter()
|
||||
.find(|(_, n)| n.eq_ignore_ascii_case(name))
|
||||
.map(|(id, _)| *id)
|
||||
.ok_or_else(|| Error::ModelNotFound(name.to_string()))?
|
||||
} else {
|
||||
msh_entries[0].0
|
||||
};
|
||||
|
||||
let payload = archive.read(target_id)?;
|
||||
Ok(parse_model_payload(payload.as_slice())?)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn collect_files_recursive(root: &Path, out: &mut Vec<PathBuf>) {
|
||||
let Ok(entries) = fs::read_dir(root) else {
|
||||
return;
|
||||
};
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
collect_files_recursive(&path, out);
|
||||
} else if path.is_file() {
|
||||
out.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn archive_with_msh() -> Option<PathBuf> {
|
||||
let root = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("..")
|
||||
.join("..")
|
||||
.join("testdata");
|
||||
let mut files = Vec::new();
|
||||
collect_files_recursive(&root, &mut files);
|
||||
files.sort();
|
||||
for path in files {
|
||||
let Ok(bytes) = fs::read(&path) else {
|
||||
continue;
|
||||
};
|
||||
if bytes.get(0..4) != Some(b"NRes") {
|
||||
continue;
|
||||
}
|
||||
let Ok(archive) = Archive::open_path(&path) else {
|
||||
continue;
|
||||
};
|
||||
if archive
|
||||
.entries()
|
||||
.any(|entry| entry.meta.name.to_ascii_lowercase().ends_with(".msh"))
|
||||
{
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn load_model_from_real_archive() {
|
||||
let Some(path) = archive_with_msh() else {
|
||||
eprintln!("skipping load_model_from_real_archive: no .msh archives in testdata");
|
||||
return;
|
||||
};
|
||||
let model = load_model_from_archive(&path, None)
|
||||
.unwrap_or_else(|err| panic!("failed to load model from {}: {err:?}", path.display()));
|
||||
assert!(model.node_count > 0);
|
||||
assert!(!model.positions.is_empty());
|
||||
assert!(!model.indices.is_empty());
|
||||
}
|
||||
}
|
||||
357
crates/render-demo/src/main.rs
Normal file
357
crates/render-demo/src/main.rs
Normal file
@@ -0,0 +1,357 @@
|
||||
use glow::HasContext as _;
|
||||
use render_core::{build_render_mesh, compute_bounds};
|
||||
use render_demo::load_model_from_archive;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Instant;
|
||||
|
||||
struct Args {
|
||||
archive: PathBuf,
|
||||
model: Option<String>,
|
||||
lod: usize,
|
||||
group: usize,
|
||||
}
|
||||
|
||||
fn parse_args() -> Result<Args, String> {
|
||||
let mut archive = None;
|
||||
let mut model = None;
|
||||
let mut lod = 0usize;
|
||||
let mut group = 0usize;
|
||||
|
||||
let mut it = std::env::args().skip(1);
|
||||
while let Some(arg) = it.next() {
|
||||
match arg.as_str() {
|
||||
"--archive" => {
|
||||
let value = it
|
||||
.next()
|
||||
.ok_or_else(|| String::from("missing value for --archive"))?;
|
||||
archive = Some(PathBuf::from(value));
|
||||
}
|
||||
"--model" => {
|
||||
let value = it
|
||||
.next()
|
||||
.ok_or_else(|| String::from("missing value for --model"))?;
|
||||
model = Some(value);
|
||||
}
|
||||
"--lod" => {
|
||||
let value = it
|
||||
.next()
|
||||
.ok_or_else(|| String::from("missing value for --lod"))?;
|
||||
lod = value
|
||||
.parse::<usize>()
|
||||
.map_err(|_| String::from("invalid --lod value"))?;
|
||||
}
|
||||
"--group" => {
|
||||
let value = it
|
||||
.next()
|
||||
.ok_or_else(|| String::from("missing value for --group"))?;
|
||||
group = value
|
||||
.parse::<usize>()
|
||||
.map_err(|_| String::from("invalid --group value"))?;
|
||||
}
|
||||
"--help" | "-h" => {
|
||||
print_help();
|
||||
std::process::exit(0);
|
||||
}
|
||||
other => {
|
||||
return Err(format!("unknown argument: {other}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let archive = archive.ok_or_else(|| String::from("missing required --archive"))?;
|
||||
Ok(Args {
|
||||
archive,
|
||||
model,
|
||||
lod,
|
||||
group,
|
||||
})
|
||||
}
|
||||
|
||||
fn print_help() {
|
||||
eprintln!("parkan-render-demo --archive <path> [--model <name.msh>] [--lod N] [--group N]");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = match parse_args() {
|
||||
Ok(v) => v,
|
||||
Err(err) => {
|
||||
eprintln!("{err}");
|
||||
print_help();
|
||||
std::process::exit(2);
|
||||
}
|
||||
};
|
||||
|
||||
let model = match load_model_from_archive(&args.archive, args.model.as_deref()) {
|
||||
Ok(v) => v,
|
||||
Err(err) => {
|
||||
eprintln!("failed to load model: {err:?}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let mesh = build_render_mesh(&model, args.lod, args.group);
|
||||
if mesh.vertices.is_empty() {
|
||||
eprintln!(
|
||||
"model has no renderable triangles for lod={} group={}",
|
||||
args.lod, args.group
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
let Some((bounds_min, bounds_max)) = compute_bounds(&mesh.vertices) else {
|
||||
eprintln!("failed to compute mesh bounds");
|
||||
std::process::exit(1);
|
||||
};
|
||||
|
||||
let center = [
|
||||
0.5 * (bounds_min[0] + bounds_max[0]),
|
||||
0.5 * (bounds_min[1] + bounds_max[1]),
|
||||
0.5 * (bounds_min[2] + bounds_max[2]),
|
||||
];
|
||||
let extent = [
|
||||
bounds_max[0] - bounds_min[0],
|
||||
bounds_max[1] - bounds_min[1],
|
||||
bounds_max[2] - bounds_min[2],
|
||||
];
|
||||
let radius =
|
||||
(extent[0] * extent[0] + extent[1] * extent[1] + extent[2] * extent[2]).sqrt() * 0.5;
|
||||
let camera_distance = (radius * 2.5).max(2.0);
|
||||
|
||||
let sdl = sdl2::init().expect("failed to init SDL2");
|
||||
let video = sdl.video().expect("failed to init SDL2 video");
|
||||
|
||||
{
|
||||
let gl_attr = video.gl_attr();
|
||||
gl_attr.set_context_profile(sdl2::video::GLProfile::GLES);
|
||||
gl_attr.set_context_version(2, 0);
|
||||
gl_attr.set_depth_size(24);
|
||||
gl_attr.set_double_buffer(true);
|
||||
}
|
||||
|
||||
let window = video
|
||||
.window("Parkan Render Demo (SDL2 + OpenGL ES 2.0)", 1280, 720)
|
||||
.opengl()
|
||||
.resizable()
|
||||
.build()
|
||||
.expect("failed to create window");
|
||||
|
||||
let gl_ctx = window
|
||||
.gl_create_context()
|
||||
.expect("failed to create OpenGL context");
|
||||
window
|
||||
.gl_make_current(&gl_ctx)
|
||||
.expect("failed to make GL context current");
|
||||
let _ = video.gl_set_swap_interval(1);
|
||||
|
||||
let mut vertices_flat = Vec::with_capacity(mesh.vertices.len() * 3);
|
||||
for pos in &mesh.vertices {
|
||||
vertices_flat.extend_from_slice(pos);
|
||||
}
|
||||
|
||||
let gl = unsafe {
|
||||
glow::Context::from_loader_function(|name| video.gl_get_proc_address(name) as *const _)
|
||||
};
|
||||
|
||||
let program = unsafe { create_program(&gl).expect("failed to create shader program") };
|
||||
let u_mvp = unsafe { gl.get_uniform_location(program, "u_mvp") };
|
||||
let a_pos = unsafe { gl.get_attrib_location(program, "a_pos") };
|
||||
let a_pos = a_pos.expect("shader attribute a_pos is missing");
|
||||
|
||||
let vbo = unsafe { gl.create_buffer().expect("failed to create VBO") };
|
||||
unsafe {
|
||||
gl.bind_buffer(glow::ARRAY_BUFFER, Some(vbo));
|
||||
gl.buffer_data_u8_slice(
|
||||
glow::ARRAY_BUFFER,
|
||||
cast_slice_u8(&vertices_flat),
|
||||
glow::STATIC_DRAW,
|
||||
);
|
||||
gl.bind_buffer(glow::ARRAY_BUFFER, None);
|
||||
}
|
||||
|
||||
let mut events = sdl.event_pump().expect("failed to get SDL event pump");
|
||||
let start = Instant::now();
|
||||
|
||||
'main_loop: loop {
|
||||
for event in events.poll_iter() {
|
||||
match event {
|
||||
sdl2::event::Event::Quit { .. } => break 'main_loop,
|
||||
sdl2::event::Event::KeyDown {
|
||||
keycode: Some(sdl2::keyboard::Keycode::Escape),
|
||||
..
|
||||
} => break 'main_loop,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let elapsed = start.elapsed().as_secs_f32();
|
||||
let (w, h) = window.size();
|
||||
let aspect = (w as f32 / (h.max(1) as f32)).max(0.01);
|
||||
|
||||
let proj = mat4_perspective(60.0_f32.to_radians(), aspect, 0.01, camera_distance * 10.0);
|
||||
let view = mat4_translation(0.0, 0.0, -camera_distance);
|
||||
let center_shift = mat4_translation(-center[0], -center[1], -center[2]);
|
||||
let rot = mat4_rotation_y(elapsed * 0.35);
|
||||
let model_m = mat4_mul(&rot, ¢er_shift);
|
||||
let vp = mat4_mul(&view, &model_m);
|
||||
let mvp = mat4_mul(&proj, &vp);
|
||||
|
||||
unsafe {
|
||||
gl.viewport(0, 0, w as i32, h as i32);
|
||||
gl.enable(glow::DEPTH_TEST);
|
||||
gl.clear_color(0.06, 0.08, 0.12, 1.0);
|
||||
gl.clear(glow::COLOR_BUFFER_BIT | glow::DEPTH_BUFFER_BIT);
|
||||
|
||||
gl.use_program(Some(program));
|
||||
gl.uniform_matrix_4_f32_slice(u_mvp.as_ref(), false, &mvp);
|
||||
|
||||
gl.bind_buffer(glow::ARRAY_BUFFER, Some(vbo));
|
||||
gl.enable_vertex_attrib_array(a_pos);
|
||||
gl.vertex_attrib_pointer_f32(a_pos, 3, glow::FLOAT, false, 12, 0);
|
||||
gl.draw_arrays(
|
||||
glow::TRIANGLES,
|
||||
0,
|
||||
i32::try_from(mesh.vertices.len()).unwrap_or(i32::MAX),
|
||||
);
|
||||
gl.disable_vertex_attrib_array(a_pos);
|
||||
gl.bind_buffer(glow::ARRAY_BUFFER, None);
|
||||
gl.use_program(None);
|
||||
}
|
||||
|
||||
window.gl_swap_window();
|
||||
}
|
||||
|
||||
unsafe {
|
||||
gl.delete_buffer(vbo);
|
||||
gl.delete_program(program);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn create_program(gl: &glow::Context) -> Result<glow::NativeProgram, String> {
|
||||
let vs_src = r#"
|
||||
attribute vec3 a_pos;
|
||||
uniform mat4 u_mvp;
|
||||
void main() {
|
||||
gl_Position = u_mvp * vec4(a_pos, 1.0);
|
||||
}
|
||||
"#;
|
||||
|
||||
let fs_src = r#"
|
||||
precision mediump float;
|
||||
void main() {
|
||||
gl_FragColor = vec4(0.85, 0.90, 1.00, 1.0);
|
||||
}
|
||||
"#;
|
||||
|
||||
let program = gl.create_program().map_err(|e| e.to_string())?;
|
||||
let vs = gl
|
||||
.create_shader(glow::VERTEX_SHADER)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let fs = gl
|
||||
.create_shader(glow::FRAGMENT_SHADER)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
gl.shader_source(vs, vs_src);
|
||||
gl.compile_shader(vs);
|
||||
if !gl.get_shader_compile_status(vs) {
|
||||
let log = gl.get_shader_info_log(vs);
|
||||
gl.delete_shader(vs);
|
||||
gl.delete_shader(fs);
|
||||
gl.delete_program(program);
|
||||
return Err(format!("vertex shader compile failed: {log}"));
|
||||
}
|
||||
|
||||
gl.shader_source(fs, fs_src);
|
||||
gl.compile_shader(fs);
|
||||
if !gl.get_shader_compile_status(fs) {
|
||||
let log = gl.get_shader_info_log(fs);
|
||||
gl.delete_shader(vs);
|
||||
gl.delete_shader(fs);
|
||||
gl.delete_program(program);
|
||||
return Err(format!("fragment shader compile failed: {log}"));
|
||||
}
|
||||
|
||||
gl.attach_shader(program, vs);
|
||||
gl.attach_shader(program, fs);
|
||||
gl.link_program(program);
|
||||
|
||||
gl.detach_shader(program, vs);
|
||||
gl.detach_shader(program, fs);
|
||||
gl.delete_shader(vs);
|
||||
gl.delete_shader(fs);
|
||||
|
||||
if !gl.get_program_link_status(program) {
|
||||
let log = gl.get_program_info_log(program);
|
||||
gl.delete_program(program);
|
||||
return Err(format!("program link failed: {log}"));
|
||||
}
|
||||
|
||||
Ok(program)
|
||||
}
|
||||
|
||||
fn cast_slice_u8<T>(slice: &[T]) -> &[u8] {
|
||||
unsafe { std::slice::from_raw_parts(slice.as_ptr() as *const u8, std::mem::size_of_val(slice)) }
|
||||
}
|
||||
|
||||
fn mat4_identity() -> [f32; 16] {
|
||||
[
|
||||
1.0, 0.0, 0.0, 0.0, //
|
||||
0.0, 1.0, 0.0, 0.0, //
|
||||
0.0, 0.0, 1.0, 0.0, //
|
||||
0.0, 0.0, 0.0, 1.0, //
|
||||
]
|
||||
}
|
||||
|
||||
fn mat4_translation(x: f32, y: f32, z: f32) -> [f32; 16] {
|
||||
let mut m = mat4_identity();
|
||||
m[12] = x;
|
||||
m[13] = y;
|
||||
m[14] = z;
|
||||
m
|
||||
}
|
||||
|
||||
fn mat4_rotation_y(rad: f32) -> [f32; 16] {
|
||||
let c = rad.cos();
|
||||
let s = rad.sin();
|
||||
[
|
||||
c, 0.0, -s, 0.0, //
|
||||
0.0, 1.0, 0.0, 0.0, //
|
||||
s, 0.0, c, 0.0, //
|
||||
0.0, 0.0, 0.0, 1.0, //
|
||||
]
|
||||
}
|
||||
|
||||
fn mat4_perspective(fovy: f32, aspect: f32, near: f32, far: f32) -> [f32; 16] {
|
||||
let f = 1.0 / (0.5 * fovy).tan();
|
||||
let nf = 1.0 / (near - far);
|
||||
[
|
||||
f / aspect,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
f,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
(far + near) * nf,
|
||||
-1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
(2.0 * far * near) * nf,
|
||||
0.0,
|
||||
]
|
||||
}
|
||||
|
||||
fn mat4_mul(a: &[f32; 16], b: &[f32; 16]) -> [f32; 16] {
|
||||
let mut out = [0.0f32; 16];
|
||||
for c in 0..4 {
|
||||
for r in 0..4 {
|
||||
let mut acc = 0.0f32;
|
||||
for k in 0..4 {
|
||||
acc += a[k * 4 + r] * b[c * 4 + k];
|
||||
}
|
||||
out[c * 4 + r] = acc;
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
7
crates/texm/Cargo.toml
Normal file
7
crates/texm/Cargo.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "texm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
nres = { path = "../nres" }
|
||||
15
crates/texm/README.md
Normal file
15
crates/texm/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# texm
|
||||
|
||||
Парсер формата текстур `Texm`.
|
||||
|
||||
Покрывает:
|
||||
|
||||
- header (`width/height/mipCount/flags/format`);
|
||||
- core size расчёт;
|
||||
- optional `Page` chunk;
|
||||
- строгую валидацию layout.
|
||||
|
||||
Тесты:
|
||||
|
||||
- прогон по реальным `Texm` из `testdata`;
|
||||
- синтетические edge-cases (indexed + page, minimal rgba).
|
||||
61
crates/texm/src/error.rs
Normal file
61
crates/texm/src/error.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
use core::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
HeaderTooSmall {
|
||||
size: usize,
|
||||
},
|
||||
InvalidMagic {
|
||||
got: u32,
|
||||
},
|
||||
InvalidDimensions {
|
||||
width: u32,
|
||||
height: u32,
|
||||
},
|
||||
InvalidMipCount {
|
||||
mip_count: u32,
|
||||
},
|
||||
UnknownFormat {
|
||||
format: u32,
|
||||
},
|
||||
IntegerOverflow,
|
||||
CoreDataOutOfBounds {
|
||||
expected_end: usize,
|
||||
actual_size: usize,
|
||||
},
|
||||
InvalidPageMagic,
|
||||
InvalidPageSize {
|
||||
expected: usize,
|
||||
actual: usize,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::HeaderTooSmall { size } => {
|
||||
write!(f, "Texm payload too small for header: {size}")
|
||||
}
|
||||
Self::InvalidMagic { got } => write!(f, "invalid Texm magic: 0x{got:08X}"),
|
||||
Self::InvalidDimensions { width, height } => {
|
||||
write!(f, "invalid Texm dimensions: {width}x{height}")
|
||||
}
|
||||
Self::InvalidMipCount { mip_count } => write!(f, "invalid Texm mip_count={mip_count}"),
|
||||
Self::UnknownFormat { format } => write!(f, "unknown Texm format={format}"),
|
||||
Self::IntegerOverflow => write!(f, "integer overflow"),
|
||||
Self::CoreDataOutOfBounds {
|
||||
expected_end,
|
||||
actual_size,
|
||||
} => write!(
|
||||
f,
|
||||
"Texm core data out of bounds: expected_end={expected_end}, actual_size={actual_size}"
|
||||
),
|
||||
Self::InvalidPageMagic => write!(f, "Texm tail exists but Page magic is missing"),
|
||||
Self::InvalidPageSize { expected, actual } => {
|
||||
write!(f, "invalid Page chunk size: expected={expected}, actual={actual}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
258
crates/texm/src/lib.rs
Normal file
258
crates/texm/src/lib.rs
Normal file
@@ -0,0 +1,258 @@
|
||||
pub mod error;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
pub const TEXM_MAGIC: u32 = 0x6D78_6554;
|
||||
pub const PAGE_MAGIC: u32 = 0x6567_6150;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum PixelFormat {
|
||||
Indexed8,
|
||||
Rgb565,
|
||||
Rgb556,
|
||||
Argb4444,
|
||||
LuminanceAlpha88,
|
||||
Rgb888,
|
||||
Argb8888,
|
||||
}
|
||||
|
||||
impl PixelFormat {
|
||||
pub fn from_raw(raw: u32) -> Option<Self> {
|
||||
match raw {
|
||||
0 => Some(Self::Indexed8),
|
||||
565 => Some(Self::Rgb565),
|
||||
556 => Some(Self::Rgb556),
|
||||
4444 => Some(Self::Argb4444),
|
||||
88 => Some(Self::LuminanceAlpha88),
|
||||
888 => Some(Self::Rgb888),
|
||||
8888 => Some(Self::Argb8888),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bytes_per_pixel(self) -> usize {
|
||||
match self {
|
||||
Self::Indexed8 => 1,
|
||||
Self::Rgb565 | Self::Rgb556 | Self::Argb4444 | Self::LuminanceAlpha88 => 2,
|
||||
Self::Rgb888 | Self::Argb8888 => 4,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Header {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub mip_count: u32,
|
||||
pub flags4: u32,
|
||||
pub flags5: u32,
|
||||
pub unk6: u32,
|
||||
pub format_raw: u32,
|
||||
pub format: PixelFormat,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MipLevel {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub offset: usize,
|
||||
pub size: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct PageRect {
|
||||
pub x: i16,
|
||||
pub w: i16,
|
||||
pub y: i16,
|
||||
pub h: i16,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Texture {
|
||||
pub header: Header,
|
||||
pub palette: Option<[u8; 1024]>,
|
||||
pub mip_levels: Vec<MipLevel>,
|
||||
pub page_rects: Vec<PageRect>,
|
||||
}
|
||||
|
||||
impl Texture {
|
||||
pub fn core_size(&self) -> usize {
|
||||
let mut size = 32usize;
|
||||
if self.palette.is_some() {
|
||||
size += 1024;
|
||||
}
|
||||
for level in &self.mip_levels {
|
||||
size += level.size;
|
||||
}
|
||||
size
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_texm(payload: &[u8]) -> Result<Texture> {
|
||||
if payload.len() < 32 {
|
||||
return Err(Error::HeaderTooSmall {
|
||||
size: payload.len(),
|
||||
});
|
||||
}
|
||||
|
||||
let magic = read_u32(payload, 0)?;
|
||||
if magic != TEXM_MAGIC {
|
||||
return Err(Error::InvalidMagic { got: magic });
|
||||
}
|
||||
|
||||
let width = read_u32(payload, 4)?;
|
||||
let height = read_u32(payload, 8)?;
|
||||
let mip_count = read_u32(payload, 12)?;
|
||||
let flags4 = read_u32(payload, 16)?;
|
||||
let flags5 = read_u32(payload, 20)?;
|
||||
let unk6 = read_u32(payload, 24)?;
|
||||
let format_raw = read_u32(payload, 28)?;
|
||||
|
||||
if width == 0 || height == 0 {
|
||||
return Err(Error::InvalidDimensions { width, height });
|
||||
}
|
||||
if mip_count == 0 {
|
||||
return Err(Error::InvalidMipCount { mip_count });
|
||||
}
|
||||
|
||||
let format =
|
||||
PixelFormat::from_raw(format_raw).ok_or(Error::UnknownFormat { format: format_raw })?;
|
||||
let bytes_per_pixel = format.bytes_per_pixel();
|
||||
|
||||
let mut offset = 32usize;
|
||||
let palette = if format == PixelFormat::Indexed8 {
|
||||
let end = offset.checked_add(1024).ok_or(Error::IntegerOverflow)?;
|
||||
if end > payload.len() {
|
||||
return Err(Error::CoreDataOutOfBounds {
|
||||
expected_end: end,
|
||||
actual_size: payload.len(),
|
||||
});
|
||||
}
|
||||
let mut pal = [0u8; 1024];
|
||||
pal.copy_from_slice(&payload[offset..end]);
|
||||
offset = end;
|
||||
Some(pal)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut mip_levels =
|
||||
Vec::with_capacity(usize::try_from(mip_count).map_err(|_| Error::IntegerOverflow)?);
|
||||
let mut w = width;
|
||||
let mut h = height;
|
||||
for _ in 0..mip_count {
|
||||
let pixel_count_u64 = u64::from(w)
|
||||
.checked_mul(u64::from(h))
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let level_size_u64 = pixel_count_u64
|
||||
.checked_mul(u64::try_from(bytes_per_pixel).map_err(|_| Error::IntegerOverflow)?)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let level_size = usize::try_from(level_size_u64).map_err(|_| Error::IntegerOverflow)?;
|
||||
let level_offset = offset;
|
||||
offset = offset
|
||||
.checked_add(level_size)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
if offset > payload.len() {
|
||||
return Err(Error::CoreDataOutOfBounds {
|
||||
expected_end: offset,
|
||||
actual_size: payload.len(),
|
||||
});
|
||||
}
|
||||
mip_levels.push(MipLevel {
|
||||
width: w,
|
||||
height: h,
|
||||
offset: level_offset,
|
||||
size: level_size,
|
||||
});
|
||||
w = w.max(1) >> 1;
|
||||
h = h.max(1) >> 1;
|
||||
if w == 0 {
|
||||
w = 1;
|
||||
}
|
||||
if h == 0 {
|
||||
h = 1;
|
||||
}
|
||||
}
|
||||
|
||||
let page_rects = parse_page_tail(payload, offset)?;
|
||||
|
||||
Ok(Texture {
|
||||
header: Header {
|
||||
width,
|
||||
height,
|
||||
mip_count,
|
||||
flags4,
|
||||
flags5,
|
||||
unk6,
|
||||
format_raw,
|
||||
format,
|
||||
},
|
||||
palette,
|
||||
mip_levels,
|
||||
page_rects,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_page_tail(payload: &[u8], core_end: usize) -> Result<Vec<PageRect>> {
|
||||
if core_end == payload.len() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
if payload.len().saturating_sub(core_end) < 8 {
|
||||
return Err(Error::InvalidPageSize {
|
||||
expected: 8,
|
||||
actual: payload.len().saturating_sub(core_end),
|
||||
});
|
||||
}
|
||||
let magic = read_u32(payload, core_end)?;
|
||||
if magic != PAGE_MAGIC {
|
||||
return Err(Error::InvalidPageMagic);
|
||||
}
|
||||
let rect_count = read_u32(payload, core_end + 4)?;
|
||||
let rect_count_usize = usize::try_from(rect_count).map_err(|_| Error::IntegerOverflow)?;
|
||||
let expected_size = 8usize
|
||||
.checked_add(
|
||||
rect_count_usize
|
||||
.checked_mul(8)
|
||||
.ok_or(Error::IntegerOverflow)?,
|
||||
)
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
let actual = payload.len().saturating_sub(core_end);
|
||||
if expected_size != actual {
|
||||
return Err(Error::InvalidPageSize {
|
||||
expected: expected_size,
|
||||
actual,
|
||||
});
|
||||
}
|
||||
|
||||
let mut rects = Vec::with_capacity(rect_count_usize);
|
||||
for i in 0..rect_count_usize {
|
||||
let off = core_end
|
||||
.checked_add(8)
|
||||
.and_then(|v| v.checked_add(i * 8))
|
||||
.ok_or(Error::IntegerOverflow)?;
|
||||
rects.push(PageRect {
|
||||
x: read_i16(payload, off)?,
|
||||
w: read_i16(payload, off + 2)?,
|
||||
y: read_i16(payload, off + 4)?,
|
||||
h: read_i16(payload, off + 6)?,
|
||||
});
|
||||
}
|
||||
Ok(rects)
|
||||
}
|
||||
|
||||
fn read_u32(data: &[u8], offset: usize) -> Result<u32> {
|
||||
let bytes = data.get(offset..offset + 4).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 4] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(u32::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
fn read_i16(data: &[u8], offset: usize) -> Result<i16> {
|
||||
let bytes = data.get(offset..offset + 2).ok_or(Error::IntegerOverflow)?;
|
||||
let arr: [u8; 2] = bytes.try_into().map_err(|_| Error::IntegerOverflow)?;
|
||||
Ok(i16::from_le_bytes(arr))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
150
crates/texm/src/tests.rs
Normal file
150
crates/texm/src/tests.rs
Normal file
@@ -0,0 +1,150 @@
|
||||
use super::*;
|
||||
use nres::Archive;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn collect_files_recursive(root: &Path, out: &mut Vec<PathBuf>) {
|
||||
let Ok(entries) = fs::read_dir(root) else {
|
||||
return;
|
||||
};
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
collect_files_recursive(&path, out);
|
||||
} else if path.is_file() {
|
||||
out.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn nres_test_files() -> Vec<PathBuf> {
|
||||
let root = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("..")
|
||||
.join("..")
|
||||
.join("testdata");
|
||||
let mut files = Vec::new();
|
||||
collect_files_recursive(&root, &mut files);
|
||||
files.sort();
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
fs::read(path)
|
||||
.map(|bytes| bytes.get(0..4) == Some(b"NRes"))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn texm_parse_all_game_textures() {
|
||||
let archives = nres_test_files();
|
||||
if archives.is_empty() {
|
||||
eprintln!("skipping texm_parse_all_game_textures: no NRes files in testdata");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut texm_total = 0usize;
|
||||
let mut texm_with_page = 0usize;
|
||||
for archive_path in archives {
|
||||
let archive = Archive::open_path(&archive_path)
|
||||
.unwrap_or_else(|err| panic!("failed to open {}: {err}", archive_path.display()));
|
||||
|
||||
for entry in archive.entries() {
|
||||
if entry.meta.kind != TEXM_MAGIC {
|
||||
continue;
|
||||
}
|
||||
texm_total += 1;
|
||||
let payload = archive.read(entry.id).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to read Texm entry '{}' in {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
let texture = parse_texm(payload.as_slice()).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"failed to parse Texm '{}' in {}: {err}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
)
|
||||
});
|
||||
if !texture.page_rects.is_empty() {
|
||||
texm_with_page += 1;
|
||||
}
|
||||
|
||||
assert!(
|
||||
texture.core_size() <= payload.as_slice().len(),
|
||||
"core size must be within payload for '{}' in {}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
);
|
||||
assert_eq!(
|
||||
usize::try_from(texture.header.mip_count).ok(),
|
||||
Some(texture.mip_levels.len()),
|
||||
"mip count mismatch for '{}' in {}",
|
||||
entry.meta.name,
|
||||
archive_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
assert!(texm_total > 0, "no Texm textures found");
|
||||
assert!(
|
||||
texm_with_page > 0,
|
||||
"expected at least one Texm texture with Page chunk"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn texm_parse_minimal_argb8888_no_page() {
|
||||
let mut payload = Vec::new();
|
||||
payload.extend_from_slice(&TEXM_MAGIC.to_le_bytes());
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // width
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // height
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // mip_count
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // flags4
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // flags5
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // unk6
|
||||
payload.extend_from_slice(&8888u32.to_le_bytes()); // format
|
||||
payload.extend_from_slice(&[1, 2, 3, 4]); // one pixel
|
||||
|
||||
let parsed = parse_texm(&payload).expect("failed to parse minimal texm");
|
||||
assert_eq!(parsed.header.width, 1);
|
||||
assert_eq!(parsed.header.height, 1);
|
||||
assert_eq!(parsed.mip_levels.len(), 1);
|
||||
assert!(parsed.page_rects.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn texm_parse_indexed_with_page_chunk() {
|
||||
let mut payload = Vec::new();
|
||||
payload.extend_from_slice(&TEXM_MAGIC.to_le_bytes());
|
||||
payload.extend_from_slice(&2u32.to_le_bytes()); // width
|
||||
payload.extend_from_slice(&2u32.to_le_bytes()); // height
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // mip_count
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // flags4
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // flags5
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // unk6
|
||||
payload.extend_from_slice(&0u32.to_le_bytes()); // format indexed8
|
||||
payload.extend_from_slice(&[0u8; 1024]); // palette
|
||||
payload.extend_from_slice(&[1, 2, 3, 4]); // pixels
|
||||
payload.extend_from_slice(&PAGE_MAGIC.to_le_bytes());
|
||||
payload.extend_from_slice(&1u32.to_le_bytes()); // rect_count
|
||||
payload.extend_from_slice(&0i16.to_le_bytes()); // x
|
||||
payload.extend_from_slice(&2i16.to_le_bytes()); // w
|
||||
payload.extend_from_slice(&0i16.to_le_bytes()); // y
|
||||
payload.extend_from_slice(&2i16.to_le_bytes()); // h
|
||||
|
||||
let parsed = parse_texm(&payload).expect("failed to parse indexed texm");
|
||||
assert!(parsed.palette.is_some());
|
||||
assert_eq!(parsed.page_rects.len(), 1);
|
||||
assert_eq!(
|
||||
parsed.page_rects[0],
|
||||
PageRect {
|
||||
x: 0,
|
||||
w: 2,
|
||||
y: 0,
|
||||
h: 2
|
||||
}
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user