feat(resource-viewer): добавить новый ресурсный просмотрщик с базовой функциональностью
Some checks failed
Test / Lint (push) Failing after 2m30s
Test / Test (push) Has been skipped
Test / Render parity (push) Has been skipped

feat(nres): улучшить структуру архива с добавлением заголовка и информации о записях
feat(rsli): добавить поддержку заголовка библиотеки и улучшить обработку записей
This commit is contained in:
2026-02-19 10:51:54 +00:00
parent cb0ca2f2f0
commit 598137ed13
6 changed files with 701 additions and 35 deletions

View File

@@ -26,10 +26,28 @@ pub enum OpenMode {
ReadWrite,
}
#[derive(Clone, Debug)]
pub struct ArchiveHeader {
pub magic: [u8; 4],
pub version: u32,
pub entry_count: u32,
pub total_size: u32,
pub directory_offset: u64,
pub directory_size: u64,
}
#[derive(Clone, Debug)]
pub struct ArchiveInfo {
pub raw_mode: bool,
pub file_size: u64,
pub header: Option<ArchiveHeader>,
}
#[derive(Debug)]
pub struct Archive {
bytes: Arc<[u8]>,
entries: Vec<EntryRecord>,
info: ArchiveInfo,
raw_mode: bool,
}
@@ -54,6 +72,13 @@ pub struct EntryRef<'a> {
pub meta: &'a EntryMeta,
}
#[derive(Copy, Clone, Debug)]
pub struct EntryInspect<'a> {
pub id: EntryId,
pub meta: &'a EntryMeta,
pub name_raw: &'a [u8; 36],
}
#[derive(Clone, Debug)]
struct EntryRecord {
meta: EntryMeta,
@@ -76,17 +101,27 @@ impl Archive {
}
pub fn open_bytes(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Self> {
let (entries, _) = parse_archive(&bytes, opts.raw_mode)?;
let file_size = u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?;
let (entries, header) = parse_archive(&bytes, opts.raw_mode)?;
if opts.prefetch_pages {
prefetch_pages(&bytes);
}
Ok(Self {
bytes,
entries,
info: ArchiveInfo {
raw_mode: opts.raw_mode,
file_size,
header,
},
raw_mode: opts.raw_mode,
})
}
pub fn info(&self) -> &ArchiveInfo {
&self.info
}
pub fn entry_count(&self) -> usize {
self.entries.len()
}
@@ -101,6 +136,17 @@ impl Archive {
})
}
pub fn entries_inspect(&self) -> impl Iterator<Item = EntryInspect<'_>> {
self.entries.iter().enumerate().filter_map(|(idx, entry)| {
let id = u32::try_from(idx).ok()?;
Some(EntryInspect {
id: EntryId(id),
meta: &entry.meta,
name_raw: &entry.name_raw,
})
})
}
pub fn find(&self, name: &str) -> Option<EntryId> {
if self.entries.is_empty() {
return None;
@@ -153,6 +199,16 @@ impl Archive {
})
}
pub fn inspect(&self, id: EntryId) -> Option<EntryInspect<'_>> {
let idx = usize::try_from(id.0).ok()?;
let entry = self.entries.get(idx)?;
Some(EntryInspect {
id,
meta: &entry.meta,
name_raw: &entry.name_raw,
})
}
pub fn read(&self, id: EntryId) -> Result<ResourceData<'_>> {
let range = self.entry_range(id)?;
Ok(ResourceData::Borrowed(&self.bytes[range]))
@@ -377,7 +433,10 @@ impl Editor {
}
}
fn parse_archive(bytes: &[u8], raw_mode: bool) -> Result<(Vec<EntryRecord>, u64)> {
fn parse_archive(
bytes: &[u8],
raw_mode: bool,
) -> Result<(Vec<EntryRecord>, Option<ArchiveHeader>)> {
if raw_mode {
let data_size = u32::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?;
let entry = EntryRecord {
@@ -398,10 +457,7 @@ fn parse_archive(bytes: &[u8], raw_mode: bool) -> Result<(Vec<EntryRecord>, u64)
name
},
};
return Ok((
vec![entry],
u64::try_from(bytes.len()).map_err(|_| Error::IntegerOverflow)?,
));
return Ok((vec![entry], None));
}
if bytes.len() < 16 {
@@ -526,7 +582,17 @@ fn parse_archive(bytes: &[u8], raw_mode: bool) -> Result<(Vec<EntryRecord>, u64)
});
}
Ok((entries, directory_offset))
Ok((
entries,
Some(ArchiveHeader {
magic: *b"NRes",
version,
entry_count: u32::try_from(entry_count).map_err(|_| Error::IntegerOverflow)?,
total_size,
directory_offset,
directory_size: directory_len,
}),
))
}
fn checked_range(offset: u64, size: u32, bytes_len: usize) -> Result<Range<usize>> {

View File

@@ -30,20 +30,33 @@ impl Default for OpenOptions {
}
}
#[derive(Clone, Debug)]
pub struct LibraryHeader {
pub raw: [u8; 32],
pub magic: [u8; 2],
pub reserved: u8,
pub version: u8,
pub entry_count: i16,
pub presorted_flag: u16,
pub xor_seed: u32,
}
#[derive(Clone, Debug)]
pub struct AoTrailer {
pub raw: [u8; 6],
pub overlay: u32,
}
#[derive(Debug)]
pub struct Library {
bytes: Arc<[u8]>,
entries: Vec<EntryRecord>,
#[cfg(test)]
pub(crate) header_raw: [u8; 32],
header: LibraryHeader,
ao_trailer: Option<AoTrailer>,
#[cfg(test)]
pub(crate) table_plain_original: Vec<u8>,
#[cfg(test)]
pub(crate) xor_seed: u32,
#[cfg(test)]
pub(crate) source_size: usize,
#[cfg(test)]
pub(crate) trailer_raw: Option<[u8; 6]>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
@@ -77,6 +90,16 @@ pub struct EntryRef<'a> {
pub meta: &'a EntryMeta,
}
#[derive(Copy, Clone, Debug)]
pub struct EntryInspect<'a> {
pub id: EntryId,
pub meta: &'a EntryMeta,
pub name_raw: &'a [u8; 12],
pub service_tail: &'a [u8; 4],
pub sort_to_original: i16,
pub data_offset_raw: u32,
}
pub struct PackedResource {
pub meta: EntryMeta,
pub packed: Vec<u8>,
@@ -86,9 +109,9 @@ pub struct PackedResource {
pub(crate) struct EntryRecord {
pub(crate) meta: EntryMeta,
pub(crate) name_raw: [u8; 12],
pub(crate) service_tail: [u8; 4],
pub(crate) sort_to_original: i16,
pub(crate) key16: u16,
#[cfg(test)]
pub(crate) data_offset_raw: u32,
pub(crate) packed_size_declared: u32,
pub(crate) packed_size_available: usize,
@@ -106,6 +129,14 @@ impl Library {
parse_library(arc, opts)
}
pub fn header(&self) -> &LibraryHeader {
&self.header
}
pub fn ao_trailer(&self) -> Option<&AoTrailer> {
self.ao_trailer.as_ref()
}
pub fn entry_count(&self) -> usize {
self.entries.len()
}
@@ -120,6 +151,20 @@ impl Library {
})
}
pub fn entries_inspect(&self) -> impl Iterator<Item = EntryInspect<'_>> {
self.entries.iter().enumerate().filter_map(|(idx, entry)| {
let id = u32::try_from(idx).ok()?;
Some(EntryInspect {
id: EntryId(id),
meta: &entry.meta,
name_raw: &entry.name_raw,
service_tail: &entry.service_tail,
sort_to_original: entry.sort_to_original,
data_offset_raw: entry.data_offset_raw,
})
})
}
pub fn find(&self, name: &str) -> Option<EntryId> {
if self.entries.is_empty() {
return None;
@@ -187,6 +232,19 @@ impl Library {
})
}
pub fn inspect(&self, id: EntryId) -> Option<EntryInspect<'_>> {
let idx = usize::try_from(id.0).ok()?;
let entry = self.entries.get(idx)?;
Some(EntryInspect {
id,
meta: &entry.meta,
name_raw: &entry.name_raw,
service_tail: &entry.service_tail,
sort_to_original: entry.sort_to_original,
data_offset_raw: entry.data_offset_raw,
})
}
pub fn load(&self, id: EntryId) -> Result<Vec<u8>> {
let entry = self.entry_by_id(id)?;
let packed = self.packed_slice(id, entry)?;
@@ -284,7 +342,7 @@ impl Library {
#[cfg(test)]
pub(crate) fn rebuild_from_parsed_metadata(&self) -> Result<Vec<u8>> {
let trailer_len = usize::from(self.trailer_raw.is_some()) * 6;
let trailer_len = usize::from(self.ao_trailer.is_some()) * 6;
let pre_trailer_size = self
.source_size
.checked_sub(trailer_len)
@@ -304,9 +362,11 @@ impl Library {
}
let mut out = vec![0u8; pre_trailer_size];
out[0..32].copy_from_slice(&self.header_raw);
let encrypted_table =
xor_stream(&self.table_plain_original, (self.xor_seed & 0xFFFF) as u16);
out[0..32].copy_from_slice(&self.header.raw);
let encrypted_table = xor_stream(
&self.table_plain_original,
(self.header.xor_seed & 0xFFFF) as u16,
);
out[32..table_end].copy_from_slice(&encrypted_table);
let mut occupied = vec![false; pre_trailer_size];
@@ -337,8 +397,8 @@ impl Library {
}
}
if let Some(trailer) = self.trailer_raw {
out.extend_from_slice(&trailer);
if let Some(trailer) = &self.ao_trailer {
out.extend_from_slice(&trailer.raw);
}
Ok(out)
}

View File

@@ -1,6 +1,8 @@
use crate::compress::xor::xor_stream;
use crate::error::Error;
use crate::{EntryMeta, EntryRecord, Library, OpenOptions, PackMethod, Result};
use crate::{
AoTrailer, EntryMeta, EntryRecord, Library, LibraryHeader, OpenOptions, PackMethod, Result,
};
use std::cmp::Ordering;
use std::sync::Arc;
@@ -16,13 +18,17 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
let mut header_raw = [0u8; 32];
header_raw.copy_from_slice(&bytes[0..32]);
if &bytes[0..2] != b"NL" {
let mut magic = [0u8; 2];
magic.copy_from_slice(&bytes[0..2]);
if &magic != b"NL" {
let mut got = [0u8; 2];
got.copy_from_slice(&bytes[0..2]);
return Err(Error::InvalidMagic { got });
}
if bytes[3] != 0x01 {
return Err(Error::UnsupportedVersion { got: bytes[3] });
let reserved = bytes[2];
let version = bytes[3];
if version != 0x01 {
return Err(Error::UnsupportedVersion { got: version });
}
let entry_count = i16::from_le_bytes([bytes[4], bytes[5]]);
@@ -36,7 +42,17 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
return Err(Error::TooManyEntries { got: count });
}
let presorted_flag = u16::from_le_bytes([bytes[14], bytes[15]]);
let xor_seed = u32::from_le_bytes([bytes[20], bytes[21], bytes[22], bytes[23]]);
let header = LibraryHeader {
raw: header_raw,
magic,
reserved,
version,
entry_count,
presorted_flag,
xor_seed,
};
let table_len = count.checked_mul(32).ok_or(Error::IntegerOverflow)?;
let table_offset = 32usize;
@@ -58,8 +74,6 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
}
let (overlay, trailer_raw) = parse_ao_trailer(&bytes, opts.allow_ao_trailer)?;
#[cfg(not(test))]
let _ = trailer_raw;
let mut entries = Vec::with_capacity(count);
for idx in 0..count {
@@ -67,6 +81,8 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
let mut name_raw = [0u8; 12];
name_raw.copy_from_slice(&row[0..12]);
let mut service_tail = [0u8; 4];
service_tail.copy_from_slice(&row[12..16]);
let flags_signed = i16::from_le_bytes([row[16], row[17]]);
let sort_to_original = i16::from_le_bytes([row[18], row[19]]);
@@ -137,9 +153,9 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
unpacked_size,
},
name_raw,
service_tail,
sort_to_original,
key16: sort_to_original as u16,
#[cfg(test)]
data_offset_raw,
packed_size_declared,
packed_size_available,
@@ -147,7 +163,6 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
});
}
let presorted_flag = u16::from_le_bytes([bytes[14], bytes[15]]);
if presorted_flag == 0xABBA {
let mut seen = vec![false; count];
for entry in &entries {
@@ -196,16 +211,12 @@ pub fn parse_library(bytes: Arc<[u8]>, opts: OpenOptions) -> Result<Library> {
Ok(Library {
bytes,
entries,
#[cfg(test)]
header_raw,
header,
ao_trailer: trailer_raw.map(|raw| AoTrailer { raw, overlay }),
#[cfg(test)]
table_plain_original,
#[cfg(test)]
xor_seed,
#[cfg(test)]
source_size,
#[cfg(test)]
trailer_raw,
})
}