Initial vendor packages
Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
1328
vendor/object/src/read/any.rs
vendored
Normal file
1328
vendor/object/src/read/any.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
759
vendor/object/src/read/archive.rs
vendored
Normal file
759
vendor/object/src/read/archive.rs
vendored
Normal file
@@ -0,0 +1,759 @@
|
||||
//! Support for archive files.
|
||||
//!
|
||||
//! ## Example
|
||||
//! ```no_run
|
||||
//! use object::{Object, ObjectSection};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads an archive and displays the name of each member.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let file = object::read::archive::ArchiveFile::parse(&*data)?;
|
||||
//! for member in file.members() {
|
||||
//! let member = member?;
|
||||
//! println!("{}", String::from_utf8_lossy(member.name()));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
use core::convert::TryInto;
|
||||
|
||||
use crate::archive;
|
||||
use crate::read::{self, Bytes, Error, ReadError, ReadRef};
|
||||
|
||||
/// The kind of archive format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum ArchiveKind {
|
||||
/// There are no special files that indicate the archive format.
|
||||
Unknown,
|
||||
/// The GNU (or System V) archive format.
|
||||
Gnu,
|
||||
/// The GNU (or System V) archive format with 64-bit symbol table.
|
||||
Gnu64,
|
||||
/// The BSD archive format.
|
||||
Bsd,
|
||||
/// The BSD archive format with 64-bit symbol table.
|
||||
///
|
||||
/// This is used for Darwin.
|
||||
Bsd64,
|
||||
/// The Windows COFF archive format.
|
||||
Coff,
|
||||
/// The AIX big archive format.
|
||||
AixBig,
|
||||
}
|
||||
|
||||
/// The list of members in the archive.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum Members<'data> {
|
||||
Common {
|
||||
offset: u64,
|
||||
end_offset: u64,
|
||||
},
|
||||
AixBig {
|
||||
index: &'data [archive::AixMemberOffset],
|
||||
},
|
||||
}
|
||||
|
||||
/// A partially parsed archive file.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ArchiveFile<'data, R: ReadRef<'data> = &'data [u8]> {
|
||||
data: R,
|
||||
kind: ArchiveKind,
|
||||
members: Members<'data>,
|
||||
symbols: (u64, u64),
|
||||
names: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> ArchiveFile<'data, R> {
|
||||
/// Parse the archive header and special members.
|
||||
pub fn parse(data: R) -> read::Result<Self> {
|
||||
let len = data.len().read_error("Unknown archive length")?;
|
||||
let mut tail = 0;
|
||||
let magic = data
|
||||
.read_bytes(&mut tail, archive::MAGIC.len() as u64)
|
||||
.read_error("Invalid archive size")?;
|
||||
|
||||
if magic == archive::AIX_BIG_MAGIC {
|
||||
return Self::parse_aixbig(data);
|
||||
} else if magic != archive::MAGIC {
|
||||
return Err(Error("Unsupported archive identifier"));
|
||||
}
|
||||
|
||||
let mut members_offset = tail;
|
||||
let members_end_offset = len;
|
||||
|
||||
let mut file = ArchiveFile {
|
||||
data,
|
||||
kind: ArchiveKind::Unknown,
|
||||
members: Members::Common {
|
||||
offset: 0,
|
||||
end_offset: 0,
|
||||
},
|
||||
symbols: (0, 0),
|
||||
names: &[],
|
||||
};
|
||||
|
||||
// The first few members may be special, so parse them.
|
||||
// GNU has:
|
||||
// - "/" or "/SYM64/": symbol table (optional)
|
||||
// - "//": names table (optional)
|
||||
// COFF has:
|
||||
// - "/": first linker member
|
||||
// - "/": second linker member
|
||||
// - "//": names table
|
||||
// BSD has:
|
||||
// - "__.SYMDEF" or "__.SYMDEF SORTED": symbol table (optional)
|
||||
// BSD 64-bit has:
|
||||
// - "__.SYMDEF_64" or "__.SYMDEF_64 SORTED": symbol table (optional)
|
||||
// BSD may use the extended name for the symbol table. This is handled
|
||||
// by `ArchiveMember::parse`.
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"/" {
|
||||
// GNU symbol table (unless we later determine this is COFF).
|
||||
file.kind = ArchiveKind::Gnu;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"/" {
|
||||
// COFF linker member.
|
||||
file.kind = ArchiveKind::Coff;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"//" {
|
||||
// COFF names table.
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
}
|
||||
}
|
||||
} else if member.name == b"//" {
|
||||
// GNU names table.
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
}
|
||||
}
|
||||
} else if member.name == b"/SYM64/" {
|
||||
// GNU 64-bit symbol table.
|
||||
file.kind = ArchiveKind::Gnu64;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"//" {
|
||||
// GNU names table.
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
}
|
||||
}
|
||||
} else if member.name == b"//" {
|
||||
// GNU names table.
|
||||
file.kind = ArchiveKind::Gnu;
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
} else if member.name == b"__.SYMDEF" || member.name == b"__.SYMDEF SORTED" {
|
||||
// BSD symbol table.
|
||||
file.kind = ArchiveKind::Bsd;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
} else if member.name == b"__.SYMDEF_64" || member.name == b"__.SYMDEF_64 SORTED" {
|
||||
// BSD 64-bit symbol table.
|
||||
file.kind = ArchiveKind::Bsd64;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
} else {
|
||||
// TODO: This could still be a BSD file. We leave this as unknown for now.
|
||||
}
|
||||
}
|
||||
file.members = Members::Common {
|
||||
offset: members_offset,
|
||||
end_offset: members_end_offset,
|
||||
};
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
fn parse_aixbig(data: R) -> read::Result<Self> {
|
||||
let mut tail = 0;
|
||||
|
||||
let file_header = data
|
||||
.read::<archive::AixFileHeader>(&mut tail)
|
||||
.read_error("Invalid AIX big archive file header")?;
|
||||
// Caller already validated this.
|
||||
debug_assert_eq!(file_header.magic, archive::AIX_BIG_MAGIC);
|
||||
|
||||
let mut file = ArchiveFile {
|
||||
data,
|
||||
kind: ArchiveKind::AixBig,
|
||||
members: Members::AixBig { index: &[] },
|
||||
symbols: (0, 0),
|
||||
names: &[],
|
||||
};
|
||||
|
||||
// Read the span of symbol table.
|
||||
let symtbl64 = parse_u64_digits(&file_header.gst64off, 10)
|
||||
.read_error("Invalid offset to 64-bit symbol table in AIX big archive")?;
|
||||
if symtbl64 > 0 {
|
||||
// The symbol table is also a file with header.
|
||||
let member = ArchiveMember::parse_aixbig(data, symtbl64)?;
|
||||
file.symbols = member.file_range();
|
||||
} else {
|
||||
let symtbl = parse_u64_digits(&file_header.gstoff, 10)
|
||||
.read_error("Invalid offset to symbol table in AIX big archive")?;
|
||||
if symtbl > 0 {
|
||||
// The symbol table is also a file with header.
|
||||
let member = ArchiveMember::parse_aixbig(data, symtbl)?;
|
||||
file.symbols = member.file_range();
|
||||
}
|
||||
}
|
||||
|
||||
// Big archive member index table lists file entries with offsets and names.
|
||||
// To avoid potential infinite loop (members are double-linked list), the
|
||||
// iterator goes through the index instead of real members.
|
||||
let member_table_offset = parse_u64_digits(&file_header.memoff, 10)
|
||||
.read_error("Invalid offset for member table of AIX big archive")?;
|
||||
if member_table_offset == 0 {
|
||||
// The offset would be zero if archive contains no file.
|
||||
return Ok(file);
|
||||
}
|
||||
|
||||
// The member index table is also a file with header.
|
||||
let member = ArchiveMember::parse_aixbig(data, member_table_offset)?;
|
||||
let mut member_data = Bytes(member.data(data)?);
|
||||
|
||||
// Structure of member index table:
|
||||
// Number of entries (20 bytes)
|
||||
// Offsets of each entry (20*N bytes)
|
||||
// Names string table (the rest of bytes to fill size defined in header)
|
||||
let members_count_bytes = member_data
|
||||
.read_slice::<u8>(20)
|
||||
.read_error("Missing member count in AIX big archive")?;
|
||||
let members_count = parse_u64_digits(members_count_bytes, 10)
|
||||
.and_then(|size| size.try_into().ok())
|
||||
.read_error("Invalid member count in AIX big archive")?;
|
||||
let index = member_data
|
||||
.read_slice::<archive::AixMemberOffset>(members_count)
|
||||
.read_error("Member count overflow in AIX big archive")?;
|
||||
file.members = Members::AixBig { index };
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
/// Return the archive format.
|
||||
#[inline]
|
||||
pub fn kind(&self) -> ArchiveKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
/// Iterate over the members of the archive.
|
||||
///
|
||||
/// This does not return special members.
|
||||
#[inline]
|
||||
pub fn members(&self) -> ArchiveMemberIterator<'data, R> {
|
||||
ArchiveMemberIterator {
|
||||
data: self.data,
|
||||
members: self.members,
|
||||
names: self.names,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the members of an archive.
|
||||
#[derive(Debug)]
|
||||
pub struct ArchiveMemberIterator<'data, R: ReadRef<'data> = &'data [u8]> {
|
||||
data: R,
|
||||
members: Members<'data>,
|
||||
names: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> Iterator for ArchiveMemberIterator<'data, R> {
|
||||
type Item = read::Result<ArchiveMember<'data>>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &mut self.members {
|
||||
Members::Common {
|
||||
ref mut offset,
|
||||
ref mut end_offset,
|
||||
} => {
|
||||
if *offset >= *end_offset {
|
||||
return None;
|
||||
}
|
||||
let member = ArchiveMember::parse(self.data, offset, self.names);
|
||||
if member.is_err() {
|
||||
*offset = *end_offset;
|
||||
}
|
||||
Some(member)
|
||||
}
|
||||
Members::AixBig { ref mut index } => match **index {
|
||||
[] => None,
|
||||
[ref first, ref rest @ ..] => {
|
||||
*index = rest;
|
||||
let member = ArchiveMember::parse_aixbig_index(self.data, first);
|
||||
if member.is_err() {
|
||||
*index = &[];
|
||||
}
|
||||
Some(member)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An archive member header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum MemberHeader<'data> {
|
||||
/// Common header used by many formats.
|
||||
Common(&'data archive::Header),
|
||||
/// AIX big archive header
|
||||
AixBig(&'data archive::AixHeader),
|
||||
}
|
||||
|
||||
/// A partially parsed archive member.
|
||||
#[derive(Debug)]
|
||||
pub struct ArchiveMember<'data> {
|
||||
header: MemberHeader<'data>,
|
||||
name: &'data [u8],
|
||||
offset: u64,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
impl<'data> ArchiveMember<'data> {
|
||||
/// Parse the member header, name, and file data in an archive with the common format.
|
||||
///
|
||||
/// This reads the extended name (if any) and adjusts the file size.
|
||||
fn parse<R: ReadRef<'data>>(
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
names: &'data [u8],
|
||||
) -> read::Result<Self> {
|
||||
let header = data
|
||||
.read::<archive::Header>(offset)
|
||||
.read_error("Invalid archive member header")?;
|
||||
if header.terminator != archive::TERMINATOR {
|
||||
return Err(Error("Invalid archive terminator"));
|
||||
}
|
||||
|
||||
let mut file_offset = *offset;
|
||||
let mut file_size =
|
||||
parse_u64_digits(&header.size, 10).read_error("Invalid archive member size")?;
|
||||
*offset = offset
|
||||
.checked_add(file_size)
|
||||
.read_error("Archive member size is too large")?;
|
||||
// Entries are padded to an even number of bytes.
|
||||
if (file_size & 1) != 0 {
|
||||
*offset = offset.saturating_add(1);
|
||||
}
|
||||
|
||||
let name = if header.name[0] == b'/' && (header.name[1] as char).is_ascii_digit() {
|
||||
// Read file name from the names table.
|
||||
parse_sysv_extended_name(&header.name[1..], names)
|
||||
.read_error("Invalid archive extended name offset")?
|
||||
} else if &header.name[..3] == b"#1/" && (header.name[3] as char).is_ascii_digit() {
|
||||
// Read file name from the start of the file data.
|
||||
parse_bsd_extended_name(&header.name[3..], data, &mut file_offset, &mut file_size)
|
||||
.read_error("Invalid archive extended name length")?
|
||||
} else if header.name[0] == b'/' {
|
||||
let name_len = memchr::memchr(b' ', &header.name).unwrap_or(header.name.len());
|
||||
&header.name[..name_len]
|
||||
} else {
|
||||
let name_len = memchr::memchr(b'/', &header.name)
|
||||
.or_else(|| memchr::memchr(b' ', &header.name))
|
||||
.unwrap_or(header.name.len());
|
||||
&header.name[..name_len]
|
||||
};
|
||||
|
||||
Ok(ArchiveMember {
|
||||
header: MemberHeader::Common(header),
|
||||
name,
|
||||
offset: file_offset,
|
||||
size: file_size,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse a member index entry in an AIX big archive,
|
||||
/// and then parse the member header, name, and file data.
|
||||
fn parse_aixbig_index<R: ReadRef<'data>>(
|
||||
data: R,
|
||||
index: &archive::AixMemberOffset,
|
||||
) -> read::Result<Self> {
|
||||
let offset = parse_u64_digits(&index.0, 10)
|
||||
.read_error("Invalid AIX big archive file member offset")?;
|
||||
Self::parse_aixbig(data, offset)
|
||||
}
|
||||
|
||||
/// Parse the member header, name, and file data in an AIX big archive.
|
||||
fn parse_aixbig<R: ReadRef<'data>>(data: R, mut offset: u64) -> read::Result<Self> {
|
||||
// The format was described at
|
||||
// https://www.ibm.com/docs/en/aix/7.3?topic=formats-ar-file-format-big
|
||||
let header = data
|
||||
.read::<archive::AixHeader>(&mut offset)
|
||||
.read_error("Invalid AIX big archive member header")?;
|
||||
let name_length = parse_u64_digits(&header.namlen, 10)
|
||||
.read_error("Invalid AIX big archive member name length")?;
|
||||
let name = data
|
||||
.read_bytes(&mut offset, name_length)
|
||||
.read_error("Invalid AIX big archive member name")?;
|
||||
|
||||
// The actual data for a file member begins at the first even-byte boundary beyond the
|
||||
// member header and continues for the number of bytes specified by the ar_size field. The
|
||||
// ar command inserts null bytes for padding where necessary.
|
||||
if offset & 1 != 0 {
|
||||
offset = offset.saturating_add(1);
|
||||
}
|
||||
// Because of the even-byte boundary, we have to read and check terminator after header.
|
||||
let terminator = data
|
||||
.read_bytes(&mut offset, 2)
|
||||
.read_error("Invalid AIX big archive terminator")?;
|
||||
if terminator != archive::TERMINATOR {
|
||||
return Err(Error("Invalid AIX big archive terminator"));
|
||||
}
|
||||
|
||||
let size = parse_u64_digits(&header.size, 10)
|
||||
.read_error("Invalid archive member size in AIX big archive")?;
|
||||
Ok(ArchiveMember {
|
||||
header: MemberHeader::AixBig(header),
|
||||
name,
|
||||
offset,
|
||||
size,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the raw header that is common to many archive formats.
|
||||
///
|
||||
/// Returns `None` if this archive does not use the common header format.
|
||||
#[inline]
|
||||
pub fn header(&self) -> Option<&'data archive::Header> {
|
||||
match self.header {
|
||||
MemberHeader::Common(header) => Some(header),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the raw header for AIX big archives.
|
||||
///
|
||||
/// Returns `None` if this is not an AIX big archive.
|
||||
#[inline]
|
||||
pub fn aix_header(&self) -> Option<&'data archive::AixHeader> {
|
||||
match self.header {
|
||||
MemberHeader::AixBig(header) => Some(header),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the parsed file name.
|
||||
///
|
||||
/// This may be an extended file name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Parse the file modification timestamp from the header.
|
||||
#[inline]
|
||||
pub fn date(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.date, 10),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.date, 10),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the user ID from the header.
|
||||
#[inline]
|
||||
pub fn uid(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.uid, 10),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.uid, 10),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the group ID from the header.
|
||||
#[inline]
|
||||
pub fn gid(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.gid, 10),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.gid, 10),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the file mode from the header.
|
||||
#[inline]
|
||||
pub fn mode(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.mode, 8),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.mode, 8),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the file data.
|
||||
pub fn file_range(&self) -> (u64, u64) {
|
||||
(self.offset, self.size)
|
||||
}
|
||||
|
||||
/// Return the file data.
|
||||
#[inline]
|
||||
pub fn data<R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [u8]> {
|
||||
data.read_bytes_at(self.offset, self.size)
|
||||
.read_error("Archive member size is too large")
|
||||
}
|
||||
}
|
||||
|
||||
// Ignores bytes starting from the first space.
|
||||
fn parse_u64_digits(digits: &[u8], radix: u32) -> Option<u64> {
|
||||
if let [b' ', ..] = digits {
|
||||
return None;
|
||||
}
|
||||
let mut result: u64 = 0;
|
||||
for &c in digits {
|
||||
if c == b' ' {
|
||||
return Some(result);
|
||||
} else {
|
||||
let x = (c as char).to_digit(radix)?;
|
||||
result = result
|
||||
.checked_mul(u64::from(radix))?
|
||||
.checked_add(u64::from(x))?;
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn parse_sysv_extended_name<'data>(digits: &[u8], names: &'data [u8]) -> Result<&'data [u8], ()> {
|
||||
let offset = parse_u64_digits(digits, 10).ok_or(())?;
|
||||
let offset = offset.try_into().map_err(|_| ())?;
|
||||
let name_data = names.get(offset..).ok_or(())?;
|
||||
let name = match memchr::memchr2(b'/', b'\0', name_data) {
|
||||
Some(len) => &name_data[..len],
|
||||
None => name_data,
|
||||
};
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
/// Modifies `data` to start after the extended name.
|
||||
fn parse_bsd_extended_name<'data, R: ReadRef<'data>>(
|
||||
digits: &[u8],
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
size: &mut u64,
|
||||
) -> Result<&'data [u8], ()> {
|
||||
let len = parse_u64_digits(digits, 10).ok_or(())?;
|
||||
*size = size.checked_sub(len).ok_or(())?;
|
||||
let name_data = data.read_bytes(offset, len)?;
|
||||
let name = match memchr::memchr(b'\0', name_data) {
|
||||
Some(len) => &name_data[..len],
|
||||
None => name_data,
|
||||
};
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn kind() {
|
||||
let data = b"!<arch>\n";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Unknown);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/ 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/ 4 `\n\
|
||||
0000\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/SYM64/ 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/SYM64/ 4 `\n\
|
||||
0000\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
__.SYMDEF 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/9 13 `\n\
|
||||
__.SYMDEF0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/16 20 `\n\
|
||||
__.SYMDEF SORTED0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
__.SYMDEF_64 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/12 16 `\n\
|
||||
__.SYMDEF_640000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/19 23 `\n\
|
||||
__.SYMDEF_64 SORTED0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/ 4 `\n\
|
||||
0000\
|
||||
/ 4 `\n\
|
||||
0000\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Coff);
|
||||
|
||||
let data = b"\
|
||||
<bigaf>\n\
|
||||
0 0 \
|
||||
0 0 \
|
||||
0 128 \
|
||||
6 0 \
|
||||
0 \0\0\0\0\0\0\0\0\0\0\0\0\
|
||||
\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\
|
||||
\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\
|
||||
\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::AixBig);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gnu_names() {
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
// 18 `\n\
|
||||
0123456789abcdef/\n\
|
||||
s p a c e/ 0 0 0 644 4 `\n\
|
||||
0000\
|
||||
0123456789abcde/0 0 0 644 3 `\n\
|
||||
odd\n\
|
||||
/0 0 0 0 644 4 `\n\
|
||||
even";
|
||||
let data = &data[..];
|
||||
let archive = ArchiveFile::parse(data).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
let mut members = archive.members();
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"s p a c e");
|
||||
assert_eq!(member.data(data).unwrap(), &b"0000"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcde");
|
||||
assert_eq!(member.data(data).unwrap(), &b"odd"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcdef");
|
||||
assert_eq!(member.data(data).unwrap(), &b"even"[..]);
|
||||
|
||||
assert!(members.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bsd_names() {
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
0123456789abcde 0 0 0 644 3 `\n\
|
||||
odd\n\
|
||||
#1/16 0 0 0 644 20 `\n\
|
||||
0123456789abcdefeven";
|
||||
let data = &data[..];
|
||||
let archive = ArchiveFile::parse(data).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Unknown);
|
||||
let mut members = archive.members();
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcde");
|
||||
assert_eq!(member.data(data).unwrap(), &b"odd"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcdef");
|
||||
assert_eq!(member.data(data).unwrap(), &b"even"[..]);
|
||||
|
||||
assert!(members.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aix_names() {
|
||||
let data = b"\
|
||||
<bigaf>\n\
|
||||
396 0 0 \
|
||||
128 262 0 \
|
||||
4 262 0 \
|
||||
1662610370 223 1 644 16 \
|
||||
0123456789abcdef`\nord\n\
|
||||
4 396 128 \
|
||||
1662610374 223 1 644 16 \
|
||||
fedcba9876543210`\nrev\n\
|
||||
94 0 262 \
|
||||
0 0 0 0 0 \
|
||||
`\n2 128 \
|
||||
262 0123456789abcdef\0fedcba9876543210\0";
|
||||
let data = &data[..];
|
||||
let archive = ArchiveFile::parse(data).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::AixBig);
|
||||
let mut members = archive.members();
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcdef");
|
||||
assert_eq!(member.data(data).unwrap(), &b"ord\n"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"fedcba9876543210");
|
||||
assert_eq!(member.data(data).unwrap(), &b"rev\n"[..]);
|
||||
|
||||
assert!(members.next().is_none());
|
||||
}
|
||||
}
|
||||
211
vendor/object/src/read/coff/comdat.rs
vendored
Normal file
211
vendor/object/src/read/coff/comdat.rs
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
use core::str;
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::read::{
|
||||
self, ComdatKind, ObjectComdat, ReadError, ReadRef, Result, SectionIndex, SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{CoffFile, CoffHeader, ImageSymbol};
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigComdatIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffComdatIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffComdatIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffComdatIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffComdat<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let index = self.index;
|
||||
let symbol = self.file.common.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
if let Some(comdat) = CoffComdat::parse(self.file, symbol, index) {
|
||||
return Some(comdat);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectComdat`] trait implementation.
|
||||
pub type CoffBigComdat<'data, 'file, R = &'data [u8]> =
|
||||
CoffComdat<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A COMDAT section group in a [`CoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectComdat`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffComdat<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
file: &'file CoffFile<'data, R, Coff>,
|
||||
symbol_index: SymbolIndex,
|
||||
symbol: &'data Coff::ImageSymbol,
|
||||
selection: u8,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffComdat<'data, 'file, R, Coff> {
|
||||
fn parse(
|
||||
file: &'file CoffFile<'data, R, Coff>,
|
||||
section_symbol: &'data Coff::ImageSymbol,
|
||||
index: usize,
|
||||
) -> Option<CoffComdat<'data, 'file, R, Coff>> {
|
||||
// Must be a section symbol.
|
||||
if !section_symbol.has_aux_section() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Auxiliary record must have a non-associative selection.
|
||||
let aux = file.common.symbols.aux_section(index).ok()?;
|
||||
let selection = aux.selection;
|
||||
if selection == 0 || selection == pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the COMDAT symbol.
|
||||
let mut symbol_index = index;
|
||||
let mut symbol = section_symbol;
|
||||
let section_number = section_symbol.section_number();
|
||||
loop {
|
||||
symbol_index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
symbol = file.common.symbols.symbol(symbol_index).ok()?;
|
||||
if section_number == symbol.section_number() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Some(CoffComdat {
|
||||
file,
|
||||
symbol_index: SymbolIndex(symbol_index),
|
||||
symbol,
|
||||
selection,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffComdat<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectComdat<'data>
|
||||
for CoffComdat<'data, 'file, R, Coff>
|
||||
{
|
||||
type SectionIterator = CoffComdatSectionIterator<'data, 'file, R, Coff>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
match self.selection {
|
||||
pe::IMAGE_COMDAT_SELECT_NODUPLICATES => ComdatKind::NoDuplicates,
|
||||
pe::IMAGE_COMDAT_SELECT_ANY => ComdatKind::Any,
|
||||
pe::IMAGE_COMDAT_SELECT_SAME_SIZE => ComdatKind::SameSize,
|
||||
pe::IMAGE_COMDAT_SELECT_EXACT_MATCH => ComdatKind::ExactMatch,
|
||||
pe::IMAGE_COMDAT_SELECT_LARGEST => ComdatKind::Largest,
|
||||
pe::IMAGE_COMDAT_SELECT_NEWEST => ComdatKind::Newest,
|
||||
_ => ComdatKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
self.symbol_index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
// Find the name of first symbol referring to the section.
|
||||
self.symbol.name(self.file.common.symbols.strings())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
let bytes = self.name_bytes()?;
|
||||
str::from_utf8(bytes)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF COMDAT name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
CoffComdatSectionIterator {
|
||||
file: self.file,
|
||||
section_number: self.symbol.section_number(),
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigComdatSectionIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffComdatSectionIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffComdatSectionIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
file: &'file CoffFile<'data, R, Coff>,
|
||||
section_number: i32,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffComdatSectionIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// Find associated COMDAT symbols.
|
||||
// TODO: it seems gcc doesn't use associated symbols for this
|
||||
loop {
|
||||
let index = self.index;
|
||||
let symbol = self.file.common.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
|
||||
// Must be a section symbol.
|
||||
if !symbol.has_aux_section() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let section_number = symbol.section_number();
|
||||
|
||||
let aux = self.file.common.symbols.aux_section(index).ok()?;
|
||||
if aux.selection == pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE {
|
||||
let number = if Coff::is_type_bigobj() {
|
||||
u32::from(aux.number.get(LE)) | (u32::from(aux.high_number.get(LE)) << 16)
|
||||
} else {
|
||||
u32::from(aux.number.get(LE))
|
||||
};
|
||||
if number as i32 == self.section_number {
|
||||
return Some(SectionIndex(section_number as usize));
|
||||
}
|
||||
} else if aux.selection != 0 {
|
||||
if section_number == self.section_number {
|
||||
return Some(SectionIndex(section_number as usize));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
381
vendor/object/src/read/coff/file.rs
vendored
Normal file
381
vendor/object/src/read/coff/file.rs
vendored
Normal file
@@ -0,0 +1,381 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, Export, FileFlags, Import, NoDynamicRelocationIterator, Object, ObjectKind,
|
||||
ObjectSection, ReadError, ReadRef, Result, SectionIndex, SubArchitecture, SymbolIndex,
|
||||
};
|
||||
use crate::{pe, LittleEndian as LE, Pod};
|
||||
|
||||
use super::{
|
||||
CoffComdat, CoffComdatIterator, CoffSection, CoffSectionIterator, CoffSegment,
|
||||
CoffSegmentIterator, CoffSymbol, CoffSymbolIterator, CoffSymbolTable, ImageSymbol,
|
||||
SectionTable, SymbolTable,
|
||||
};
|
||||
|
||||
/// The common parts of `PeFile` and `CoffFile`.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CoffCommon<'data, R: ReadRef<'data>, Coff: CoffHeader = pe::ImageFileHeader> {
|
||||
pub(crate) sections: SectionTable<'data>,
|
||||
pub(crate) symbols: SymbolTable<'data, R, Coff>,
|
||||
pub(crate) image_base: u64,
|
||||
}
|
||||
|
||||
/// A COFF bigobj object file with 32-bit section numbers.
|
||||
///
|
||||
/// This is a file that starts with [`pe::AnonObjectHeaderBigobj`], and corresponds
|
||||
/// to [`crate::FileKind::CoffBig`].
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
pub type CoffBigFile<'data, R = &'data [u8]> = CoffFile<'data, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A COFF object file.
|
||||
///
|
||||
/// This is a file that starts with [`pe::ImageFileHeader`], and corresponds
|
||||
/// to [`crate::FileKind::Coff`].
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffFile<'data, R: ReadRef<'data> = &'data [u8], Coff: CoffHeader = pe::ImageFileHeader>
|
||||
{
|
||||
pub(super) header: &'data Coff,
|
||||
pub(super) common: CoffCommon<'data, R, Coff>,
|
||||
pub(super) data: R,
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> CoffFile<'data, R, Coff> {
|
||||
/// Parse the raw COFF file data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = Coff::parse(data, &mut offset)?;
|
||||
let sections = header.sections(data, offset)?;
|
||||
let symbols = header.symbols(data)?;
|
||||
|
||||
Ok(CoffFile {
|
||||
header,
|
||||
common: CoffCommon {
|
||||
sections,
|
||||
symbols,
|
||||
image_base: 0,
|
||||
},
|
||||
data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffFile<'data, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R, Coff> Object<'data, 'file> for CoffFile<'data, R, Coff>
|
||||
where
|
||||
'data: 'file,
|
||||
R: 'file + ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
type Segment = CoffSegment<'data, 'file, R, Coff>;
|
||||
type SegmentIterator = CoffSegmentIterator<'data, 'file, R, Coff>;
|
||||
type Section = CoffSection<'data, 'file, R, Coff>;
|
||||
type SectionIterator = CoffSectionIterator<'data, 'file, R, Coff>;
|
||||
type Comdat = CoffComdat<'data, 'file, R, Coff>;
|
||||
type ComdatIterator = CoffComdatIterator<'data, 'file, R, Coff>;
|
||||
type Symbol = CoffSymbol<'data, 'file, R, Coff>;
|
||||
type SymbolIterator = CoffSymbolIterator<'data, 'file, R, Coff>;
|
||||
type SymbolTable = CoffSymbolTable<'data, 'file, R, Coff>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match self.header.machine() {
|
||||
pe::IMAGE_FILE_MACHINE_ARMNT => Architecture::Arm,
|
||||
pe::IMAGE_FILE_MACHINE_ARM64 | pe::IMAGE_FILE_MACHINE_ARM64EC => Architecture::Aarch64,
|
||||
pe::IMAGE_FILE_MACHINE_I386 => Architecture::I386,
|
||||
pe::IMAGE_FILE_MACHINE_AMD64 => Architecture::X86_64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
match self.header.machine() {
|
||||
pe::IMAGE_FILE_MACHINE_ARM64EC => Some(SubArchitecture::Arm64EC),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
// Windows COFF is always 32-bit, even for 64-bit architectures. This could be confusing.
|
||||
false
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
ObjectKind::Relocatable
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> CoffSegmentIterator<'data, 'file, R, Coff> {
|
||||
CoffSegmentIterator {
|
||||
file: self,
|
||||
iter: self.common.sections.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<CoffSection<'data, 'file, R, Coff>> {
|
||||
self.sections()
|
||||
.find(|section| section.name_bytes() == Ok(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> Result<CoffSection<'data, 'file, R, Coff>> {
|
||||
let section = self.common.sections.section(index.0)?;
|
||||
Ok(CoffSection {
|
||||
file: self,
|
||||
index,
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> CoffSectionIterator<'data, 'file, R, Coff> {
|
||||
CoffSectionIterator {
|
||||
file: self,
|
||||
iter: self.common.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> CoffComdatIterator<'data, 'file, R, Coff> {
|
||||
CoffComdatIterator {
|
||||
file: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> Result<CoffSymbol<'data, 'file, R, Coff>> {
|
||||
let symbol = self.common.symbols.symbol(index.0)?;
|
||||
Ok(CoffSymbol {
|
||||
file: &self.common,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> CoffSymbolIterator<'data, 'file, R, Coff> {
|
||||
CoffSymbolIterator {
|
||||
file: &self.common,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_table(&'file self) -> Option<CoffSymbolTable<'data, 'file, R, Coff>> {
|
||||
Some(CoffSymbolTable { file: &self.common })
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> CoffSymbolIterator<'data, 'file, R, Coff> {
|
||||
CoffSymbolIterator {
|
||||
file: &self.common,
|
||||
// Hack: don't return any.
|
||||
index: self.common.symbols.len(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_symbol_table(&'file self) -> Option<CoffSymbolTable<'data, 'file, R, Coff>> {
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_relocations(&'file self) -> Option<NoDynamicRelocationIterator> {
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>> {
|
||||
// TODO: this could return undefined symbols, but not needed yet.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
// TODO: this could return global symbols, but not needed yet.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.section_by_name(".debug_info").is_some()
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn entry(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::Coff {
|
||||
characteristics: self.header.characteristics(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the `class_id` field from a [`pe::AnonObjectHeader`].
|
||||
///
|
||||
/// This can be used to determine the format of the header.
|
||||
pub fn anon_object_class_id<'data, R: ReadRef<'data>>(data: R) -> Result<pe::ClsId> {
|
||||
let header = data
|
||||
.read_at::<pe::AnonObjectHeader>(0)
|
||||
.read_error("Invalid anon object header size or alignment")?;
|
||||
Ok(header.class_id)
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`pe::ImageFileHeader`] and [`pe::AnonObjectHeaderBigobj`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait CoffHeader: Debug + Pod {
|
||||
type ImageSymbol: ImageSymbol;
|
||||
type ImageSymbolBytes: Debug + Pod;
|
||||
|
||||
/// Return true if this type is [`pe::AnonObjectHeaderBigobj`].
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
fn is_type_bigobj() -> bool;
|
||||
|
||||
fn machine(&self) -> u16;
|
||||
fn number_of_sections(&self) -> u32;
|
||||
fn pointer_to_symbol_table(&self) -> u32;
|
||||
fn number_of_symbols(&self) -> u32;
|
||||
fn characteristics(&self) -> u16;
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be the file header offset. It is updated to point after the optional header,
|
||||
/// which is where the section headers are located.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> read::Result<&'data Self>;
|
||||
|
||||
/// Read the section table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be after the optional file header.
|
||||
#[inline]
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: u64,
|
||||
) -> read::Result<SectionTable<'data>> {
|
||||
SectionTable::parse(self, data, offset)
|
||||
}
|
||||
|
||||
/// Read the symbol table and string table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
#[inline]
|
||||
fn symbols<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
) -> read::Result<SymbolTable<'data, R, Self>> {
|
||||
SymbolTable::parse(self, data)
|
||||
}
|
||||
}
|
||||
|
||||
impl CoffHeader for pe::ImageFileHeader {
|
||||
type ImageSymbol = pe::ImageSymbol;
|
||||
type ImageSymbolBytes = pe::ImageSymbolBytes;
|
||||
|
||||
fn is_type_bigobj() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn machine(&self) -> u16 {
|
||||
self.machine.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_sections(&self) -> u32 {
|
||||
self.number_of_sections.get(LE).into()
|
||||
}
|
||||
|
||||
fn pointer_to_symbol_table(&self) -> u32 {
|
||||
self.pointer_to_symbol_table.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_symbols(&self) -> u32 {
|
||||
self.number_of_symbols.get(LE)
|
||||
}
|
||||
|
||||
fn characteristics(&self) -> u16 {
|
||||
self.characteristics.get(LE)
|
||||
}
|
||||
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<pe::ImageFileHeader>(offset)
|
||||
.read_error("Invalid COFF file header size or alignment")?;
|
||||
|
||||
// Skip over the optional header.
|
||||
*offset = offset
|
||||
.checked_add(header.size_of_optional_header.get(LE).into())
|
||||
.read_error("Invalid COFF optional header size")?;
|
||||
|
||||
// TODO: maybe validate that the machine is known?
|
||||
Ok(header)
|
||||
}
|
||||
}
|
||||
|
||||
impl CoffHeader for pe::AnonObjectHeaderBigobj {
|
||||
type ImageSymbol = pe::ImageSymbolEx;
|
||||
type ImageSymbolBytes = pe::ImageSymbolExBytes;
|
||||
|
||||
fn is_type_bigobj() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn machine(&self) -> u16 {
|
||||
self.machine.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_sections(&self) -> u32 {
|
||||
self.number_of_sections.get(LE)
|
||||
}
|
||||
|
||||
fn pointer_to_symbol_table(&self) -> u32 {
|
||||
self.pointer_to_symbol_table.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_symbols(&self) -> u32 {
|
||||
self.number_of_symbols.get(LE)
|
||||
}
|
||||
|
||||
fn characteristics(&self) -> u16 {
|
||||
0
|
||||
}
|
||||
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<pe::AnonObjectHeaderBigobj>(offset)
|
||||
.read_error("Invalid COFF bigobj file header size or alignment")?;
|
||||
|
||||
if header.sig1.get(LE) != pe::IMAGE_FILE_MACHINE_UNKNOWN
|
||||
|| header.sig2.get(LE) != 0xffff
|
||||
|| header.version.get(LE) < 2
|
||||
|| header.class_id != pe::ANON_OBJECT_HEADER_BIGOBJ_CLASS_ID
|
||||
{
|
||||
return Err(read::Error("Invalid COFF bigobj header values"));
|
||||
}
|
||||
|
||||
// TODO: maybe validate that the machine is known?
|
||||
Ok(header)
|
||||
}
|
||||
}
|
||||
220
vendor/object/src/read/coff/import.rs
vendored
Normal file
220
vendor/object/src/read/coff/import.rs
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
//! Support for reading short import files.
|
||||
//!
|
||||
//! These are used by some Windows linkers as a more compact way to describe
|
||||
//! dynamically imported symbols.
|
||||
|
||||
use crate::read::{Architecture, Error, ReadError, ReadRef, Result};
|
||||
use crate::{pe, ByteString, Bytes, LittleEndian as LE, SubArchitecture};
|
||||
|
||||
/// A Windows short form description of a symbol to import.
|
||||
///
|
||||
/// Used in Windows import libraries to provide a mapping from
|
||||
/// a symbol name to a DLL export. This is not an object file.
|
||||
///
|
||||
/// This is a file that starts with [`pe::ImportObjectHeader`], and corresponds
|
||||
/// to [`crate::FileKind::CoffImport`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportFile<'data> {
|
||||
header: &'data pe::ImportObjectHeader,
|
||||
kind: ImportType,
|
||||
dll: ByteString<'data>,
|
||||
symbol: ByteString<'data>,
|
||||
import: Option<ByteString<'data>>,
|
||||
}
|
||||
|
||||
impl<'data> ImportFile<'data> {
|
||||
/// Parse it.
|
||||
pub fn parse<R: ReadRef<'data>>(data: R) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = pe::ImportObjectHeader::parse(data, &mut offset)?;
|
||||
let data = header.parse_data(data, &mut offset)?;
|
||||
|
||||
// Unmangles a name by removing a `?`, `@` or `_` prefix.
|
||||
fn strip_prefix(s: &[u8]) -> &[u8] {
|
||||
match s.split_first() {
|
||||
Some((b, rest)) if [b'?', b'@', b'_'].contains(b) => rest,
|
||||
_ => s,
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
header,
|
||||
dll: data.dll,
|
||||
symbol: data.symbol,
|
||||
kind: match header.import_type() {
|
||||
pe::IMPORT_OBJECT_CODE => ImportType::Code,
|
||||
pe::IMPORT_OBJECT_DATA => ImportType::Data,
|
||||
pe::IMPORT_OBJECT_CONST => ImportType::Const,
|
||||
_ => return Err(Error("Invalid COFF import library import type")),
|
||||
},
|
||||
import: match header.name_type() {
|
||||
pe::IMPORT_OBJECT_ORDINAL => None,
|
||||
pe::IMPORT_OBJECT_NAME => Some(data.symbol()),
|
||||
pe::IMPORT_OBJECT_NAME_NO_PREFIX => Some(strip_prefix(data.symbol())),
|
||||
pe::IMPORT_OBJECT_NAME_UNDECORATE => Some(
|
||||
strip_prefix(data.symbol())
|
||||
.split(|&b| b == b'@')
|
||||
.next()
|
||||
.unwrap(),
|
||||
),
|
||||
pe::IMPORT_OBJECT_NAME_EXPORTAS => data.export(),
|
||||
_ => return Err(Error("Unknown COFF import library name type")),
|
||||
}
|
||||
.map(ByteString),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the machine type.
|
||||
pub fn architecture(&self) -> Architecture {
|
||||
match self.header.machine.get(LE) {
|
||||
pe::IMAGE_FILE_MACHINE_ARMNT => Architecture::Arm,
|
||||
pe::IMAGE_FILE_MACHINE_ARM64 | pe::IMAGE_FILE_MACHINE_ARM64EC => Architecture::Aarch64,
|
||||
pe::IMAGE_FILE_MACHINE_I386 => Architecture::I386,
|
||||
pe::IMAGE_FILE_MACHINE_AMD64 => Architecture::X86_64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the sub machine type, if available.
|
||||
pub fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
match self.header.machine.get(LE) {
|
||||
pe::IMAGE_FILE_MACHINE_ARM64EC => Some(SubArchitecture::Arm64EC),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// The public symbol name.
|
||||
pub fn symbol(&self) -> &'data [u8] {
|
||||
self.symbol.0
|
||||
}
|
||||
|
||||
/// The name of the DLL to import the symbol from.
|
||||
pub fn dll(&self) -> &'data [u8] {
|
||||
self.dll.0
|
||||
}
|
||||
|
||||
/// The name exported from the DLL.
|
||||
pub fn import(&self) -> ImportName<'data> {
|
||||
match self.import {
|
||||
Some(name) => ImportName::Name(name.0),
|
||||
None => ImportName::Ordinal(self.header.ordinal_or_hint.get(LE)),
|
||||
}
|
||||
}
|
||||
|
||||
/// The type of import. Usually either a function or data.
|
||||
pub fn import_type(&self) -> ImportType {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
/// The name or ordinal to import from a DLL.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ImportName<'data> {
|
||||
/// Import by ordinal. Ordinarily this is a 1-based index.
|
||||
Ordinal(u16),
|
||||
/// Import by name.
|
||||
Name(&'data [u8]),
|
||||
}
|
||||
|
||||
/// The kind of import symbol.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ImportType {
|
||||
/// An executable code symbol.
|
||||
Code,
|
||||
/// A data symbol.
|
||||
Data,
|
||||
/// A constant value.
|
||||
Const,
|
||||
}
|
||||
|
||||
impl pe::ImportObjectHeader {
|
||||
/// Read the short import header.
|
||||
///
|
||||
/// Also checks that the signature and version are valid.
|
||||
/// Directly following this header will be the string data.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<pe::ImportObjectHeader>(offset)
|
||||
.read_error("Invalid COFF import library header size")?;
|
||||
if header.sig1.get(LE) != 0 || header.sig2.get(LE) != pe::IMPORT_OBJECT_HDR_SIG2 {
|
||||
Err(Error("Invalid COFF import library header"))
|
||||
} else if header.version.get(LE) != 0 {
|
||||
Err(Error("Unknown COFF import library header version"))
|
||||
} else {
|
||||
Ok(header)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the data following the header.
|
||||
pub fn parse_data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
) -> Result<ImportObjectData<'data>> {
|
||||
let mut data = Bytes(
|
||||
data.read_bytes(offset, u64::from(self.size_of_data.get(LE)))
|
||||
.read_error("Invalid COFF import library data size")?,
|
||||
);
|
||||
let symbol = data
|
||||
.read_string()
|
||||
.map(ByteString)
|
||||
.read_error("Could not read COFF import library symbol name")?;
|
||||
let dll = data
|
||||
.read_string()
|
||||
.map(ByteString)
|
||||
.read_error("Could not read COFF import library DLL name")?;
|
||||
let export = if self.name_type() == pe::IMPORT_OBJECT_NAME_EXPORTAS {
|
||||
data.read_string()
|
||||
.map(ByteString)
|
||||
.map(Some)
|
||||
.read_error("Could not read COFF import library export name")?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ImportObjectData {
|
||||
symbol,
|
||||
dll,
|
||||
export,
|
||||
})
|
||||
}
|
||||
|
||||
/// The type of import.
|
||||
///
|
||||
/// This is one of the `IMPORT_OBJECT_*` constants.
|
||||
pub fn import_type(&self) -> u16 {
|
||||
self.name_type.get(LE) & pe::IMPORT_OBJECT_TYPE_MASK
|
||||
}
|
||||
|
||||
/// The type of import name.
|
||||
///
|
||||
/// This is one of the `IMPORT_OBJECT_*` constants.
|
||||
pub fn name_type(&self) -> u16 {
|
||||
(self.name_type.get(LE) >> pe::IMPORT_OBJECT_NAME_SHIFT) & pe::IMPORT_OBJECT_NAME_MASK
|
||||
}
|
||||
}
|
||||
|
||||
/// The data following [`pe::ImportObjectHeader`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportObjectData<'data> {
|
||||
symbol: ByteString<'data>,
|
||||
dll: ByteString<'data>,
|
||||
export: Option<ByteString<'data>>,
|
||||
}
|
||||
|
||||
impl<'data> ImportObjectData<'data> {
|
||||
/// The public symbol name.
|
||||
pub fn symbol(&self) -> &'data [u8] {
|
||||
self.symbol.0
|
||||
}
|
||||
|
||||
/// The name of the DLL to import the symbol from.
|
||||
pub fn dll(&self) -> &'data [u8] {
|
||||
self.dll.0
|
||||
}
|
||||
|
||||
/// The name exported from the DLL.
|
||||
///
|
||||
/// This is only set if the name is not derived from the symbol name.
|
||||
pub fn export(&self) -> Option<&'data [u8]> {
|
||||
self.export.map(|export| export.0)
|
||||
}
|
||||
}
|
||||
66
vendor/object/src/read/coff/mod.rs
vendored
Normal file
66
vendor/object/src/read/coff/mod.rs
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
//! Support for reading Windows COFF files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between COFF object files
|
||||
//! and COFF bigobj files. The primary trait for this is [`CoffHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`CoffFile`] implements the [`Object`](crate::read::Object) trait for
|
||||
//! COFF files. [`CoffFile`] is parameterised by [`CoffHeader`].
|
||||
//! The default parameter allows reading regular COFF object files,
|
||||
//! while the type alias [`CoffBigFile`] allows reading COFF bigobj files.
|
||||
//!
|
||||
//! [`ImportFile`] allows reading COFF short imports that are used in import
|
||||
//! libraries. Currently these are not integrated with the unified read API.
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`CoffHeader`] trait can be directly used to parse both COFF
|
||||
//! object files (which start with [`pe::ImageFileHeader`]) and COFF bigobj
|
||||
//! files (which start with [`pe::AnonObjectHeaderBigobj`]).
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::pe;
|
||||
//! use object::read::coff::{CoffHeader, ImageSymbol as _};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section and symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let mut offset = 0;
|
||||
//! let header = pe::ImageFileHeader::parse(&*data, &mut offset)?;
|
||||
//! let sections = header.sections(&*data, offset)?;
|
||||
//! let symbols = header.symbols(&*data)?;
|
||||
//! for section in sections.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(section.name(symbols.strings())?));
|
||||
//! }
|
||||
//! for (_index, symbol) in symbols.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(symbol.name(symbols.strings())?));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::pe;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod comdat;
|
||||
pub use comdat::*;
|
||||
|
||||
mod import;
|
||||
pub use import::*;
|
||||
106
vendor/object/src/read/coff/relocation.rs
vendored
Normal file
106
vendor/object/src/read/coff/relocation.rs
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
use alloc::fmt;
|
||||
use core::slice;
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::read::{
|
||||
ReadRef, Relocation, RelocationEncoding, RelocationKind, RelocationTarget, SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{CoffFile, CoffHeader};
|
||||
|
||||
/// An iterator for the relocations in a [`CoffBigSection`](super::CoffBigSection).
|
||||
pub type CoffBigRelocationIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffRelocationIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the relocations in a [`CoffSection`](super::CoffSection).
|
||||
pub struct CoffRelocationIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) iter: slice::Iter<'data, pe::ImageRelocation>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffRelocationIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|relocation| {
|
||||
let (kind, size, addend) = match self.file.header.machine() {
|
||||
pe::IMAGE_FILE_MACHINE_ARMNT => match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_ARM_ADDR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_ARM_ADDR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_ARM_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
pe::IMAGE_REL_ARM_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_ARM_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
},
|
||||
pe::IMAGE_FILE_MACHINE_ARM64 | pe::IMAGE_FILE_MACHINE_ARM64EC => {
|
||||
match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_ARM64_ADDR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_ARM64_ADDR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_ARM64_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
pe::IMAGE_REL_ARM64_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_ARM64_ADDR64 => (RelocationKind::Absolute, 64, 0),
|
||||
pe::IMAGE_REL_ARM64_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
}
|
||||
}
|
||||
pe::IMAGE_FILE_MACHINE_I386 => match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_I386_DIR16 => (RelocationKind::Absolute, 16, 0),
|
||||
pe::IMAGE_REL_I386_REL16 => (RelocationKind::Relative, 16, 0),
|
||||
pe::IMAGE_REL_I386_DIR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_I386_DIR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_I386_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_I386_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
pe::IMAGE_REL_I386_SECREL7 => (RelocationKind::SectionOffset, 7, 0),
|
||||
pe::IMAGE_REL_I386_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
},
|
||||
pe::IMAGE_FILE_MACHINE_AMD64 => match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_AMD64_ADDR64 => (RelocationKind::Absolute, 64, 0),
|
||||
pe::IMAGE_REL_AMD64_ADDR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_AMD64_ADDR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_AMD64_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
pe::IMAGE_REL_AMD64_REL32_1 => (RelocationKind::Relative, 32, -5),
|
||||
pe::IMAGE_REL_AMD64_REL32_2 => (RelocationKind::Relative, 32, -6),
|
||||
pe::IMAGE_REL_AMD64_REL32_3 => (RelocationKind::Relative, 32, -7),
|
||||
pe::IMAGE_REL_AMD64_REL32_4 => (RelocationKind::Relative, 32, -8),
|
||||
pe::IMAGE_REL_AMD64_REL32_5 => (RelocationKind::Relative, 32, -9),
|
||||
pe::IMAGE_REL_AMD64_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_AMD64_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
pe::IMAGE_REL_AMD64_SECREL7 => (RelocationKind::SectionOffset, 7, 0),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
},
|
||||
_ => (RelocationKind::Coff(relocation.typ.get(LE)), 0, 0),
|
||||
};
|
||||
let target = RelocationTarget::Symbol(SymbolIndex(
|
||||
relocation.symbol_table_index.get(LE) as usize,
|
||||
));
|
||||
(
|
||||
u64::from(relocation.virtual_address.get(LE)),
|
||||
Relocation {
|
||||
kind,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
size,
|
||||
target,
|
||||
addend,
|
||||
implicit_addend: true,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> fmt::Debug
|
||||
for CoffRelocationIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CoffRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
585
vendor/object/src/read/coff/section.rs
vendored
Normal file
585
vendor/object/src/read/coff/section.rs
vendored
Normal file
@@ -0,0 +1,585 @@
|
||||
use core::convert::TryFrom;
|
||||
use core::{iter, result, slice, str};
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, CompressedData, CompressedFileRange, Error, ObjectSection, ObjectSegment, ReadError,
|
||||
ReadRef, Result, SectionFlags, SectionIndex, SectionKind, SegmentFlags,
|
||||
};
|
||||
|
||||
use super::{CoffFile, CoffHeader, CoffRelocationIterator};
|
||||
|
||||
/// The table of section headers in a COFF or PE file.
|
||||
///
|
||||
/// Returned by [`CoffHeader::sections`] and
|
||||
/// [`ImageNtHeaders::sections`](crate::read::pe::ImageNtHeaders::sections).
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct SectionTable<'data> {
|
||||
sections: &'data [pe::ImageSectionHeader],
|
||||
}
|
||||
|
||||
impl<'data> SectionTable<'data> {
|
||||
/// Parse the section table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be after the optional file header.
|
||||
pub fn parse<Coff: CoffHeader, R: ReadRef<'data>>(
|
||||
header: &Coff,
|
||||
data: R,
|
||||
offset: u64,
|
||||
) -> Result<Self> {
|
||||
let sections = data
|
||||
.read_slice_at(offset, header.number_of_sections() as usize)
|
||||
.read_error("Invalid COFF/PE section headers")?;
|
||||
Ok(SectionTable { sections })
|
||||
}
|
||||
|
||||
/// Iterate over the section headers.
|
||||
///
|
||||
/// Warning: sections indices start at 1.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, pe::ImageSectionHeader> {
|
||||
self.sections.iter()
|
||||
}
|
||||
|
||||
/// Return true if the section table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.sections.is_empty()
|
||||
}
|
||||
|
||||
/// The number of section headers.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.sections.len()
|
||||
}
|
||||
|
||||
/// Return the section header at the given index.
|
||||
///
|
||||
/// The index is 1-based.
|
||||
pub fn section(&self, index: usize) -> read::Result<&'data pe::ImageSectionHeader> {
|
||||
self.sections
|
||||
.get(index.wrapping_sub(1))
|
||||
.read_error("Invalid COFF/PE section index")
|
||||
}
|
||||
|
||||
/// Return the section header with the given name.
|
||||
///
|
||||
/// The returned index is 1-based.
|
||||
///
|
||||
/// Ignores sections with invalid names.
|
||||
pub fn section_by_name<R: ReadRef<'data>>(
|
||||
&self,
|
||||
strings: StringTable<'data, R>,
|
||||
name: &[u8],
|
||||
) -> Option<(usize, &'data pe::ImageSectionHeader)> {
|
||||
self.sections
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, section)| section.name(strings) == Ok(name))
|
||||
.map(|(index, section)| (index + 1, section))
|
||||
}
|
||||
|
||||
/// Compute the maximum file offset used by sections.
|
||||
///
|
||||
/// This will usually match the end of file, unless the PE file has a
|
||||
/// [data overlay](https://security.stackexchange.com/questions/77336/how-is-the-file-overlay-read-by-an-exe-virus)
|
||||
pub fn max_section_file_offset(&self) -> u64 {
|
||||
let mut max = 0;
|
||||
for section in self.iter() {
|
||||
match (section.pointer_to_raw_data.get(LE) as u64)
|
||||
.checked_add(section.size_of_raw_data.get(LE) as u64)
|
||||
{
|
||||
None => {
|
||||
// This cannot happen, we're suming two u32 into a u64
|
||||
continue;
|
||||
}
|
||||
Some(end_of_section) => {
|
||||
if end_of_section > max {
|
||||
max = end_of_section;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
max
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the loadable sections in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSegmentIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffSegmentIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the loadable sections in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSegmentIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) iter: slice::Iter<'data, pe::ImageSectionHeader>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffSegmentIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffSegment<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|section| CoffSegment {
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A loadable section in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
pub type CoffBigSegment<'data, 'file, R = &'data [u8]> =
|
||||
CoffSegment<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A loadable section in a [`CoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSegment<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffSegment<'data, 'file, R, Coff> {
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.section
|
||||
.coff_data(self.file.data)
|
||||
.read_error("Invalid COFF section offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSegment<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSegment<'data>
|
||||
for CoffSegment<'data, 'file, R, Coff>
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
u64::from(self.section.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.section.coff_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
let (offset, size) = self.section.coff_file_range().unwrap_or((0, 0));
|
||||
(u64::from(offset), u64::from(size))
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
self.section
|
||||
.name(self.file.common.symbols.strings())
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
let name = self.section.name(self.file.common.symbols.strings())?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF section name")
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let characteristics = self.section.characteristics.get(LE);
|
||||
SegmentFlags::Coff { characteristics }
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSectionIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffSectionIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the sections in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSectionIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, pe::ImageSectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffSectionIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffSection<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|(index, section)| CoffSection {
|
||||
file: self.file,
|
||||
index: SectionIndex(index + 1),
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
pub type CoffBigSection<'data, 'file, R = &'data [u8]> =
|
||||
CoffSection<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A section in a [`CoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSection<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) index: SectionIndex,
|
||||
pub(super) section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffSection<'data, 'file, R, Coff> {
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.section
|
||||
.coff_data(self.file.data)
|
||||
.read_error("Invalid COFF section offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSection<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSection<'data>
|
||||
for CoffSection<'data, 'file, R, Coff>
|
||||
{
|
||||
type RelocationIterator = CoffRelocationIterator<'data, 'file, R, Coff>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
// TODO: This may need to be the length from the auxiliary symbol for this section.
|
||||
u64::from(self.section.size_of_raw_data.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.section.coff_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
let (offset, size) = self.section.coff_file_range()?;
|
||||
Some((u64::from(offset), u64::from(size)))
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
self.section.name(self.file.common.symbols.strings())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF section name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SectionKind {
|
||||
self.section.kind()
|
||||
}
|
||||
|
||||
fn relocations(&self) -> CoffRelocationIterator<'data, 'file, R, Coff> {
|
||||
let relocations = self.section.coff_relocations(self.file.data).unwrap_or(&[]);
|
||||
CoffRelocationIterator {
|
||||
file: self.file,
|
||||
iter: relocations.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::Coff {
|
||||
characteristics: self.section.characteristics.get(LE),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageSectionHeader {
|
||||
pub(crate) fn kind(&self) -> SectionKind {
|
||||
let characteristics = self.characteristics.get(LE);
|
||||
if characteristics & (pe::IMAGE_SCN_CNT_CODE | pe::IMAGE_SCN_MEM_EXECUTE) != 0 {
|
||||
SectionKind::Text
|
||||
} else if characteristics & pe::IMAGE_SCN_CNT_INITIALIZED_DATA != 0 {
|
||||
if characteristics & pe::IMAGE_SCN_MEM_DISCARDABLE != 0 {
|
||||
SectionKind::Other
|
||||
} else if characteristics & pe::IMAGE_SCN_MEM_WRITE != 0 {
|
||||
SectionKind::Data
|
||||
} else {
|
||||
SectionKind::ReadOnlyData
|
||||
}
|
||||
} else if characteristics & pe::IMAGE_SCN_CNT_UNINITIALIZED_DATA != 0 {
|
||||
SectionKind::UninitializedData
|
||||
} else if characteristics & pe::IMAGE_SCN_LNK_INFO != 0 {
|
||||
SectionKind::Linker
|
||||
} else {
|
||||
SectionKind::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageSectionHeader {
|
||||
/// Return the string table offset of the section name.
|
||||
///
|
||||
/// Returns `Ok(None)` if the name doesn't use the string table
|
||||
/// and can be obtained with `raw_name` instead.
|
||||
pub fn name_offset(&self) -> Result<Option<u32>> {
|
||||
let bytes = &self.name;
|
||||
if bytes[0] != b'/' {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if bytes[1] == b'/' {
|
||||
let mut offset = 0;
|
||||
for byte in bytes[2..].iter() {
|
||||
let digit = match byte {
|
||||
b'A'..=b'Z' => byte - b'A',
|
||||
b'a'..=b'z' => byte - b'a' + 26,
|
||||
b'0'..=b'9' => byte - b'0' + 52,
|
||||
b'+' => 62,
|
||||
b'/' => 63,
|
||||
_ => return Err(Error("Invalid COFF section name base-64 offset")),
|
||||
};
|
||||
offset = offset * 64 + digit as u64;
|
||||
}
|
||||
u32::try_from(offset)
|
||||
.ok()
|
||||
.read_error("Invalid COFF section name base-64 offset")
|
||||
.map(Some)
|
||||
} else {
|
||||
let mut offset = 0;
|
||||
for byte in bytes[1..].iter() {
|
||||
let digit = match byte {
|
||||
b'0'..=b'9' => byte - b'0',
|
||||
0 => break,
|
||||
_ => return Err(Error("Invalid COFF section name base-10 offset")),
|
||||
};
|
||||
offset = offset * 10 + digit as u32;
|
||||
}
|
||||
Ok(Some(offset))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section name.
|
||||
///
|
||||
/// This handles decoding names that are offsets into the symbol string table.
|
||||
pub fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
if let Some(offset) = self.name_offset()? {
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid COFF section name offset")
|
||||
} else {
|
||||
Ok(self.raw_name())
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the raw section name.
|
||||
pub fn raw_name(&self) -> &[u8] {
|
||||
let bytes = &self.name;
|
||||
match memchr::memchr(b'\0', bytes) {
|
||||
Some(end) => &bytes[..end],
|
||||
None => &bytes[..],
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the section in a COFF file.
|
||||
///
|
||||
/// Returns `None` for sections that have no data in the file.
|
||||
pub fn coff_file_range(&self) -> Option<(u32, u32)> {
|
||||
if self.characteristics.get(LE) & pe::IMAGE_SCN_CNT_UNINITIALIZED_DATA != 0 {
|
||||
None
|
||||
} else {
|
||||
let offset = self.pointer_to_raw_data.get(LE);
|
||||
// Note: virtual size is not used for COFF.
|
||||
let size = self.size_of_raw_data.get(LE);
|
||||
Some((offset, size))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section data in a COFF file.
|
||||
///
|
||||
/// Returns `Ok(&[])` if the section has no data.
|
||||
/// Returns `Err` for invalid values.
|
||||
pub fn coff_data<'data, R: ReadRef<'data>>(&self, data: R) -> result::Result<&'data [u8], ()> {
|
||||
if let Some((offset, size)) = self.coff_file_range() {
|
||||
data.read_bytes_at(offset.into(), size.into())
|
||||
} else {
|
||||
Ok(&[])
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section alignment in bytes.
|
||||
///
|
||||
/// This is only valid for sections in a COFF file.
|
||||
pub fn coff_alignment(&self) -> u64 {
|
||||
match self.characteristics.get(LE) & pe::IMAGE_SCN_ALIGN_MASK {
|
||||
pe::IMAGE_SCN_ALIGN_1BYTES => 1,
|
||||
pe::IMAGE_SCN_ALIGN_2BYTES => 2,
|
||||
pe::IMAGE_SCN_ALIGN_4BYTES => 4,
|
||||
pe::IMAGE_SCN_ALIGN_8BYTES => 8,
|
||||
pe::IMAGE_SCN_ALIGN_16BYTES => 16,
|
||||
pe::IMAGE_SCN_ALIGN_32BYTES => 32,
|
||||
pe::IMAGE_SCN_ALIGN_64BYTES => 64,
|
||||
pe::IMAGE_SCN_ALIGN_128BYTES => 128,
|
||||
pe::IMAGE_SCN_ALIGN_256BYTES => 256,
|
||||
pe::IMAGE_SCN_ALIGN_512BYTES => 512,
|
||||
pe::IMAGE_SCN_ALIGN_1024BYTES => 1024,
|
||||
pe::IMAGE_SCN_ALIGN_2048BYTES => 2048,
|
||||
pe::IMAGE_SCN_ALIGN_4096BYTES => 4096,
|
||||
pe::IMAGE_SCN_ALIGN_8192BYTES => 8192,
|
||||
_ => 16,
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the relocations in a COFF file.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn coff_relocations<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
) -> read::Result<&'data [pe::ImageRelocation]> {
|
||||
let mut pointer = self.pointer_to_relocations.get(LE).into();
|
||||
let mut number: usize = self.number_of_relocations.get(LE).into();
|
||||
if number == core::u16::MAX.into()
|
||||
&& self.characteristics.get(LE) & pe::IMAGE_SCN_LNK_NRELOC_OVFL != 0
|
||||
{
|
||||
// Extended relocations. Read first relocation (which contains extended count) & adjust
|
||||
// relocations pointer.
|
||||
let extended_relocation_info = data
|
||||
.read_at::<pe::ImageRelocation>(pointer)
|
||||
.read_error("Invalid COFF relocation offset or number")?;
|
||||
number = extended_relocation_info.virtual_address.get(LE) as usize;
|
||||
if number == 0 {
|
||||
return Err(Error("Invalid COFF relocation number"));
|
||||
}
|
||||
pointer += core::mem::size_of::<pe::ImageRelocation>() as u64;
|
||||
// Extended relocation info does not contribute to the count of sections.
|
||||
number -= 1;
|
||||
}
|
||||
data.read_slice_at(pointer, number)
|
||||
.read_error("Invalid COFF relocation offset or number")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn name_offset() {
|
||||
let mut section = pe::ImageSectionHeader::default();
|
||||
section.name = *b"xxxxxxxx";
|
||||
assert_eq!(section.name_offset(), Ok(None));
|
||||
section.name = *b"/0\0\0\0\0\0\0";
|
||||
assert_eq!(section.name_offset(), Ok(Some(0)));
|
||||
section.name = *b"/9999999";
|
||||
assert_eq!(section.name_offset(), Ok(Some(999_9999)));
|
||||
section.name = *b"//AAAAAA";
|
||||
assert_eq!(section.name_offset(), Ok(Some(0)));
|
||||
section.name = *b"//D/////";
|
||||
assert_eq!(section.name_offset(), Ok(Some(0xffff_ffff)));
|
||||
section.name = *b"//EAAAAA";
|
||||
assert!(section.name_offset().is_err());
|
||||
section.name = *b"////////";
|
||||
assert!(section.name_offset().is_err());
|
||||
}
|
||||
}
|
||||
635
vendor/object/src/read/coff/symbol.rs
vendored
Normal file
635
vendor/object/src/read/coff/symbol.rs
vendored
Normal file
@@ -0,0 +1,635 @@
|
||||
use alloc::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
use core::str;
|
||||
|
||||
use super::{CoffCommon, CoffHeader, SectionTable};
|
||||
use crate::endian::{LittleEndian as LE, U32Bytes};
|
||||
use crate::pe;
|
||||
use crate::pod::{bytes_of, bytes_of_slice, Pod};
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, Bytes, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Result, SectionIndex,
|
||||
SymbolFlags, SymbolIndex, SymbolKind, SymbolMap, SymbolMapEntry, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
/// A table of symbol entries in a COFF or PE file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`CoffHeader::symbols`] and
|
||||
/// [`ImageNtHeaders::symbols`](crate::read::pe::ImageNtHeaders::symbols).
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable<'data, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
symbols: &'data [Coff::ImageSymbolBytes],
|
||||
strings: StringTable<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> Default for SymbolTable<'data, R, Coff> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
symbols: &[],
|
||||
strings: StringTable::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> SymbolTable<'data, R, Coff> {
|
||||
/// Read the symbol table.
|
||||
pub fn parse(header: &Coff, data: R) -> Result<Self> {
|
||||
// The symbol table may not be present.
|
||||
let mut offset = header.pointer_to_symbol_table().into();
|
||||
let (symbols, strings) = if offset != 0 {
|
||||
let symbols = data
|
||||
.read_slice(&mut offset, header.number_of_symbols() as usize)
|
||||
.read_error("Invalid COFF symbol table offset or size")?;
|
||||
|
||||
// Note: don't update data when reading length; the length includes itself.
|
||||
let length = data
|
||||
.read_at::<U32Bytes<_>>(offset)
|
||||
.read_error("Missing COFF string table")?
|
||||
.get(LE);
|
||||
let str_end = offset
|
||||
.checked_add(length as u64)
|
||||
.read_error("Invalid COFF string table length")?;
|
||||
let strings = StringTable::new(data, offset, str_end);
|
||||
|
||||
(symbols, strings)
|
||||
} else {
|
||||
(&[][..], StringTable::default())
|
||||
};
|
||||
|
||||
Ok(SymbolTable { symbols, strings })
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbol table entries.
|
||||
///
|
||||
/// This includes auxiliary symbol table entries.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter<'table>(&'table self) -> SymbolIterator<'data, 'table, R, Coff> {
|
||||
SymbolIterator {
|
||||
symbols: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol table entry at the given index.
|
||||
#[inline]
|
||||
pub fn symbol(&self, index: usize) -> Result<&'data Coff::ImageSymbol> {
|
||||
self.get::<Coff::ImageSymbol>(index, 0)
|
||||
}
|
||||
|
||||
/// Return the auxiliary function symbol for the symbol table entry at the given index.
|
||||
///
|
||||
/// Note that the index is of the symbol, not the first auxiliary record.
|
||||
#[inline]
|
||||
pub fn aux_function(&self, index: usize) -> Result<&'data pe::ImageAuxSymbolFunction> {
|
||||
self.get::<pe::ImageAuxSymbolFunction>(index, 1)
|
||||
}
|
||||
|
||||
/// Return the auxiliary section symbol for the symbol table entry at the given index.
|
||||
///
|
||||
/// Note that the index is of the symbol, not the first auxiliary record.
|
||||
#[inline]
|
||||
pub fn aux_section(&self, index: usize) -> Result<&'data pe::ImageAuxSymbolSection> {
|
||||
self.get::<pe::ImageAuxSymbolSection>(index, 1)
|
||||
}
|
||||
|
||||
/// Return the auxiliary file name for the symbol table entry at the given index.
|
||||
///
|
||||
/// Note that the index is of the symbol, not the first auxiliary record.
|
||||
pub fn aux_file_name(&self, index: usize, aux_count: u8) -> Result<&'data [u8]> {
|
||||
let entries = index
|
||||
.checked_add(1)
|
||||
.and_then(|x| Some(x..x.checked_add(aux_count.into())?))
|
||||
.and_then(|x| self.symbols.get(x))
|
||||
.read_error("Invalid COFF symbol index")?;
|
||||
let bytes = bytes_of_slice(entries);
|
||||
// The name is padded with nulls.
|
||||
Ok(match memchr::memchr(b'\0', bytes) {
|
||||
Some(end) => &bytes[..end],
|
||||
None => bytes,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the symbol table entry or auxiliary record at the given index and offset.
|
||||
pub fn get<T: Pod>(&self, index: usize, offset: usize) -> Result<&'data T> {
|
||||
let bytes = index
|
||||
.checked_add(offset)
|
||||
.and_then(|x| self.symbols.get(x))
|
||||
.read_error("Invalid COFF symbol index")?;
|
||||
Bytes(bytes_of(bytes))
|
||||
.read()
|
||||
.read_error("Invalid COFF symbol data")
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to a user-defined map entry.
|
||||
pub fn map<Entry: SymbolMapEntry, F: Fn(&'data Coff::ImageSymbol) -> Option<Entry>>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> SymbolMap<Entry> {
|
||||
let mut symbols = Vec::with_capacity(self.symbols.len());
|
||||
for (_, symbol) in self.iter() {
|
||||
if !symbol.is_definition() {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry) = f(symbol) {
|
||||
symbols.push(entry);
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for symbol entries in a COFF or PE file.
|
||||
///
|
||||
/// Yields the index and symbol structure for each symbol.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolIterator<'data, 'table, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
symbols: &'table SymbolTable<'data, R, Coff>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'table, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for SymbolIterator<'data, 'table, R, Coff>
|
||||
{
|
||||
type Item = (usize, &'data Coff::ImageSymbol);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
Some((index, symbol))
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSymbolTable<'data, 'file, R = &'data [u8]> =
|
||||
CoffSymbolTable<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A symbol table in a [`CoffFile`](super::CoffFile)
|
||||
/// or [`PeFile`](crate::read::pe::PeFile).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct CoffSymbolTable<'data, 'file, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
pub(crate) file: &'file CoffCommon<'data, R, Coff>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSymbolTable<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSymbolTable<'data>
|
||||
for CoffSymbolTable<'data, 'file, R, Coff>
|
||||
{
|
||||
type Symbol = CoffSymbol<'data, 'file, R, Coff>;
|
||||
type SymbolIterator = CoffSymbolIterator<'data, 'file, R, Coff>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
CoffSymbolIterator {
|
||||
file: self.file,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol> {
|
||||
let symbol = self.file.symbols.symbol(index.0)?;
|
||||
Ok(CoffSymbol {
|
||||
file: self.file,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSymbolIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffSymbolIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the symbols in a [`CoffFile`](super::CoffFile)
|
||||
/// or [`PeFile`](crate::read::pe::PeFile).
|
||||
pub struct CoffSymbolIterator<'data, 'file, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
pub(crate) file: &'file CoffCommon<'data, R, Coff>,
|
||||
pub(crate) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> fmt::Debug
|
||||
for CoffSymbolIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CoffSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffSymbolIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffSymbol<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.file.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
Some(CoffSymbol {
|
||||
file: self.file,
|
||||
index: SymbolIndex(index),
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
pub type CoffBigSymbol<'data, 'file, R = &'data [u8]> =
|
||||
CoffSymbol<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A symbol in a [`CoffFile`](super::CoffFile) or [`PeFile`](crate::read::pe::PeFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct CoffSymbol<'data, 'file, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
pub(crate) file: &'file CoffCommon<'data, R, Coff>,
|
||||
pub(crate) index: SymbolIndex,
|
||||
pub(crate) symbol: &'data Coff::ImageSymbol,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffSymbol<'data, 'file, R, Coff> {
|
||||
#[inline]
|
||||
/// Get the raw `ImageSymbol` struct.
|
||||
pub fn raw_symbol(&self) -> &'data Coff::ImageSymbol {
|
||||
self.symbol
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSymbol<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSymbol<'data>
|
||||
for CoffSymbol<'data, 'file, R, Coff>
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&'data [u8]> {
|
||||
if self.symbol.has_aux_file_name() {
|
||||
self.file
|
||||
.symbols
|
||||
.aux_file_name(self.index.0, self.symbol.number_of_aux_symbols())
|
||||
} else {
|
||||
self.symbol.name(self.file.symbols.strings())
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF symbol name")
|
||||
}
|
||||
|
||||
fn address(&self) -> u64 {
|
||||
// Only return an address for storage classes that we know use an address.
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC
|
||||
| pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL
|
||||
| pe::IMAGE_SYM_CLASS_LABEL => {}
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL => {
|
||||
if self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED {
|
||||
// Undefined or common data, neither of which have an address.
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
_ => return 0,
|
||||
}
|
||||
self.symbol
|
||||
.address(self.file.image_base, &self.file.sections)
|
||||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC => {
|
||||
// Section symbols may duplicate the size from the section table.
|
||||
if self.symbol.has_aux_section() {
|
||||
if let Ok(aux) = self.file.symbols.aux_section(self.index.0) {
|
||||
u64::from(aux.length.get(LE))
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL => {
|
||||
if self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED {
|
||||
// For undefined symbols, symbol.value is 0 and the size is 0.
|
||||
// For common data, symbol.value is the size.
|
||||
u64::from(self.symbol.value())
|
||||
} else if self.symbol.has_aux_function() {
|
||||
// Function symbols may have a size.
|
||||
if let Ok(aux) = self.file.symbols.aux_function(self.index.0) {
|
||||
u64::from(aux.total_size.get(LE))
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
// Most symbols don't have sizes.
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
let derived_kind = if self.symbol.derived_type() == pe::IMAGE_SYM_DTYPE_FUNCTION {
|
||||
SymbolKind::Text
|
||||
} else {
|
||||
SymbolKind::Data
|
||||
};
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC => {
|
||||
if self.symbol.has_aux_section() {
|
||||
SymbolKind::Section
|
||||
} else {
|
||||
derived_kind
|
||||
}
|
||||
}
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => derived_kind,
|
||||
pe::IMAGE_SYM_CLASS_SECTION => SymbolKind::Section,
|
||||
pe::IMAGE_SYM_CLASS_FILE => SymbolKind::File,
|
||||
pe::IMAGE_SYM_CLASS_LABEL => SymbolKind::Label,
|
||||
_ => SymbolKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.symbol.section_number() {
|
||||
pe::IMAGE_SYM_UNDEFINED => {
|
||||
if self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_EXTERNAL {
|
||||
if self.symbol.value() == 0 {
|
||||
SymbolSection::Undefined
|
||||
} else {
|
||||
SymbolSection::Common
|
||||
}
|
||||
} else if self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_SECTION {
|
||||
SymbolSection::Undefined
|
||||
} else {
|
||||
SymbolSection::Unknown
|
||||
}
|
||||
}
|
||||
pe::IMAGE_SYM_ABSOLUTE => SymbolSection::Absolute,
|
||||
pe::IMAGE_SYM_DEBUG => {
|
||||
if self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_FILE {
|
||||
SymbolSection::None
|
||||
} else {
|
||||
SymbolSection::Unknown
|
||||
}
|
||||
}
|
||||
index if index > 0 => SymbolSection::Section(SectionIndex(index as usize)),
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_EXTERNAL
|
||||
&& self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED
|
||||
&& self.symbol.value() == 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
self.symbol.is_definition()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_EXTERNAL
|
||||
&& self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED
|
||||
&& self.symbol.value() != 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn scope(&self) -> SymbolScope {
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => {
|
||||
// TODO: determine if symbol is exported
|
||||
SymbolScope::Linkage
|
||||
}
|
||||
_ => SymbolScope::Compilation,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
!self.is_global()
|
||||
}
|
||||
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
if self.symbol.has_aux_section() {
|
||||
if let Ok(aux) = self.file.symbols.aux_section(self.index.0) {
|
||||
let number = if Coff::is_type_bigobj() {
|
||||
u32::from(aux.number.get(LE)) | (u32::from(aux.high_number.get(LE)) << 16)
|
||||
} else {
|
||||
u32::from(aux.number.get(LE))
|
||||
};
|
||||
return SymbolFlags::CoffSection {
|
||||
selection: aux.selection,
|
||||
associative_section: if number == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(SectionIndex(number as usize))
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
SymbolFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`pe::ImageSymbol`] and [`pe::ImageSymbolEx`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait ImageSymbol: Debug + Pod {
|
||||
fn raw_name(&self) -> &[u8; 8];
|
||||
fn value(&self) -> u32;
|
||||
fn section_number(&self) -> i32;
|
||||
fn typ(&self) -> u16;
|
||||
fn storage_class(&self) -> u8;
|
||||
fn number_of_aux_symbols(&self) -> u8;
|
||||
|
||||
/// Parse a COFF symbol name.
|
||||
///
|
||||
/// `strings` must be the string table used for symbol names.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
let name = self.raw_name();
|
||||
if name[0] == 0 {
|
||||
// If the name starts with 0 then the last 4 bytes are a string table offset.
|
||||
let offset = u32::from_le_bytes(name[4..8].try_into().unwrap());
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid COFF symbol name offset")
|
||||
} else {
|
||||
// The name is inline and padded with nulls.
|
||||
Ok(match memchr::memchr(b'\0', name) {
|
||||
Some(end) => &name[..end],
|
||||
None => &name[..],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol address.
|
||||
///
|
||||
/// This takes into account the image base and the section address.
|
||||
fn address(&self, image_base: u64, sections: &SectionTable<'_>) -> Result<u64> {
|
||||
let section_number = self.section_number() as usize;
|
||||
let section = sections.section(section_number)?;
|
||||
let virtual_address = u64::from(section.virtual_address.get(LE));
|
||||
let value = u64::from(self.value());
|
||||
Ok(image_base + virtual_address + value)
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
fn is_definition(&self) -> bool {
|
||||
if self.section_number() <= 0 {
|
||||
return false;
|
||||
}
|
||||
match self.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC => !self.has_aux_section(),
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the symbol has an auxiliary file name.
|
||||
fn has_aux_file_name(&self) -> bool {
|
||||
self.number_of_aux_symbols() > 0 && self.storage_class() == pe::IMAGE_SYM_CLASS_FILE
|
||||
}
|
||||
|
||||
/// Return true if the symbol has an auxiliary function symbol.
|
||||
fn has_aux_function(&self) -> bool {
|
||||
self.number_of_aux_symbols() > 0 && self.derived_type() == pe::IMAGE_SYM_DTYPE_FUNCTION
|
||||
}
|
||||
|
||||
/// Return true if the symbol has an auxiliary section symbol.
|
||||
fn has_aux_section(&self) -> bool {
|
||||
self.number_of_aux_symbols() > 0
|
||||
&& self.storage_class() == pe::IMAGE_SYM_CLASS_STATIC
|
||||
&& self.typ() == 0
|
||||
}
|
||||
|
||||
fn base_type(&self) -> u16 {
|
||||
self.typ() & pe::N_BTMASK
|
||||
}
|
||||
|
||||
fn derived_type(&self) -> u16 {
|
||||
(self.typ() & pe::N_TMASK) >> pe::N_BTSHFT
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageSymbol for pe::ImageSymbol {
|
||||
fn raw_name(&self) -> &[u8; 8] {
|
||||
&self.name
|
||||
}
|
||||
fn value(&self) -> u32 {
|
||||
self.value.get(LE)
|
||||
}
|
||||
fn section_number(&self) -> i32 {
|
||||
let section_number = self.section_number.get(LE);
|
||||
if section_number >= pe::IMAGE_SYM_SECTION_MAX {
|
||||
(section_number as i16) as i32
|
||||
} else {
|
||||
section_number as i32
|
||||
}
|
||||
}
|
||||
fn typ(&self) -> u16 {
|
||||
self.typ.get(LE)
|
||||
}
|
||||
fn storage_class(&self) -> u8 {
|
||||
self.storage_class
|
||||
}
|
||||
fn number_of_aux_symbols(&self) -> u8 {
|
||||
self.number_of_aux_symbols
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageSymbol for pe::ImageSymbolEx {
|
||||
fn raw_name(&self) -> &[u8; 8] {
|
||||
&self.name
|
||||
}
|
||||
fn value(&self) -> u32 {
|
||||
self.value.get(LE)
|
||||
}
|
||||
fn section_number(&self) -> i32 {
|
||||
self.section_number.get(LE)
|
||||
}
|
||||
fn typ(&self) -> u16 {
|
||||
self.typ.get(LE)
|
||||
}
|
||||
fn storage_class(&self) -> u8 {
|
||||
self.storage_class
|
||||
}
|
||||
fn number_of_aux_symbols(&self) -> u8 {
|
||||
self.number_of_aux_symbols
|
||||
}
|
||||
}
|
||||
307
vendor/object/src/read/elf/attributes.rs
vendored
Normal file
307
vendor/object/src/read/elf/attributes.rs
vendored
Normal file
@@ -0,0 +1,307 @@
|
||||
use core::convert::TryInto;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian;
|
||||
use crate::read::{Bytes, Error, ReadError, Result};
|
||||
|
||||
use super::FileHeader;
|
||||
|
||||
/// An ELF attributes section.
|
||||
///
|
||||
/// This may be a GNU attributes section, or an architecture specific attributes section.
|
||||
///
|
||||
/// An attributes section contains a series of [`AttributesSubsection`].
|
||||
///
|
||||
/// Returned by [`SectionHeader::attributes`](super::SectionHeader::attributes)
|
||||
/// and [`SectionHeader::gnu_attributes`](super::SectionHeader::gnu_attributes).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSection<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
version: u8,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSection<'data, Elf> {
|
||||
/// Parse an ELF attributes section given the section data.
|
||||
pub fn new(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> {
|
||||
let mut data = Bytes(data);
|
||||
|
||||
// Skip the version field that is one byte long.
|
||||
let version = *data
|
||||
.read::<u8>()
|
||||
.read_error("Invalid ELF attributes section offset or size")?;
|
||||
|
||||
Ok(AttributesSection {
|
||||
endian,
|
||||
version,
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the version of the attributes section.
|
||||
pub fn version(&self) -> u8 {
|
||||
self.version
|
||||
}
|
||||
|
||||
/// Return an iterator over the subsections.
|
||||
pub fn subsections(&self) -> Result<AttributesSubsectionIterator<'data, Elf>> {
|
||||
// There is currently only one format version.
|
||||
if self.version != b'A' {
|
||||
return Err(Error("Unsupported ELF attributes section version"));
|
||||
}
|
||||
|
||||
Ok(AttributesSubsectionIterator {
|
||||
endian: self.endian,
|
||||
data: self.data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the subsections in an [`AttributesSection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsectionIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSubsectionIterator<'data, Elf> {
|
||||
/// Return the next subsection.
|
||||
pub fn next(&mut self) -> Result<Option<AttributesSubsection<'data, Elf>>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let result = self.parse();
|
||||
if result.is_err() {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn parse(&mut self) -> Result<Option<AttributesSubsection<'data, Elf>>> {
|
||||
// First read the subsection length.
|
||||
let mut data = self.data;
|
||||
let length = data
|
||||
.read::<endian::U32Bytes<Elf::Endian>>()
|
||||
.read_error("ELF attributes section is too short")?
|
||||
.get(self.endian);
|
||||
|
||||
// Now read the entire subsection, updating self.data.
|
||||
let mut data = self
|
||||
.data
|
||||
.read_bytes(length as usize)
|
||||
.read_error("Invalid ELF attributes subsection length")?;
|
||||
// Skip the subsection length field.
|
||||
data.skip(4)
|
||||
.read_error("Invalid ELF attributes subsection length")?;
|
||||
|
||||
let vendor = data
|
||||
.read_string()
|
||||
.read_error("Invalid ELF attributes vendor")?;
|
||||
|
||||
Ok(Some(AttributesSubsection {
|
||||
endian: self.endian,
|
||||
length,
|
||||
vendor,
|
||||
data,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// A subsection in an [`AttributesSection`].
|
||||
///
|
||||
/// A subsection is identified by a vendor name. It contains a series of
|
||||
/// [`AttributesSubsubsection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsection<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
length: u32,
|
||||
vendor: &'data [u8],
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSubsection<'data, Elf> {
|
||||
/// Return the length of the attributes subsection.
|
||||
pub fn length(&self) -> u32 {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Return the vendor name of the attributes subsection.
|
||||
pub fn vendor(&self) -> &'data [u8] {
|
||||
self.vendor
|
||||
}
|
||||
|
||||
/// Return an iterator over the sub-subsections.
|
||||
pub fn subsubsections(&self) -> AttributesSubsubsectionIterator<'data, Elf> {
|
||||
AttributesSubsubsectionIterator {
|
||||
endian: self.endian,
|
||||
data: self.data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sub-subsections in an [`AttributesSubsection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsubsectionIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSubsubsectionIterator<'data, Elf> {
|
||||
/// Return the next sub-subsection.
|
||||
pub fn next(&mut self) -> Result<Option<AttributesSubsubsection<'data>>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let result = self.parse();
|
||||
if result.is_err() {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn parse(&mut self) -> Result<Option<AttributesSubsubsection<'data>>> {
|
||||
// The format of a sub-section looks like this:
|
||||
//
|
||||
// <file-tag> <size> <attribute>*
|
||||
// | <section-tag> <size> <section-number>* 0 <attribute>*
|
||||
// | <symbol-tag> <size> <symbol-number>* 0 <attribute>*
|
||||
let mut data = self.data;
|
||||
let tag = *data
|
||||
.read::<u8>()
|
||||
.read_error("ELF attributes subsection is too short")?;
|
||||
let length = data
|
||||
.read::<endian::U32Bytes<Elf::Endian>>()
|
||||
.read_error("ELF attributes subsection is too short")?
|
||||
.get(self.endian);
|
||||
|
||||
// Now read the entire sub-subsection, updating self.data.
|
||||
let mut data = self
|
||||
.data
|
||||
.read_bytes(length as usize)
|
||||
.read_error("Invalid ELF attributes sub-subsection length")?;
|
||||
// Skip the tag and sub-subsection size field.
|
||||
data.skip(1 + 4)
|
||||
.read_error("Invalid ELF attributes sub-subsection length")?;
|
||||
|
||||
let indices = if tag == elf::Tag_Section || tag == elf::Tag_Symbol {
|
||||
data.read_string()
|
||||
.map(Bytes)
|
||||
.read_error("Missing ELF attributes sub-subsection indices")?
|
||||
} else if tag == elf::Tag_File {
|
||||
Bytes(&[])
|
||||
} else {
|
||||
return Err(Error("Unimplemented ELF attributes sub-subsection tag"));
|
||||
};
|
||||
|
||||
Ok(Some(AttributesSubsubsection {
|
||||
tag,
|
||||
length,
|
||||
indices,
|
||||
data,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// A sub-subsection in an [`AttributesSubsection`].
|
||||
///
|
||||
/// A sub-subsection is identified by a tag. It contains an optional series of indices,
|
||||
/// followed by a series of attributes.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsubsection<'data> {
|
||||
tag: u8,
|
||||
length: u32,
|
||||
indices: Bytes<'data>,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> AttributesSubsubsection<'data> {
|
||||
/// Return the tag of the attributes sub-subsection.
|
||||
pub fn tag(&self) -> u8 {
|
||||
self.tag
|
||||
}
|
||||
|
||||
/// Return the length of the attributes sub-subsection.
|
||||
pub fn length(&self) -> u32 {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Return the data containing the indices.
|
||||
pub fn indices_data(&self) -> &'data [u8] {
|
||||
self.indices.0
|
||||
}
|
||||
|
||||
/// Return the indices.
|
||||
///
|
||||
/// This will be section indices if the tag is `Tag_Section`,
|
||||
/// or symbol indices if the tag is `Tag_Symbol`,
|
||||
/// and otherwise it will be empty.
|
||||
pub fn indices(&self) -> AttributeIndexIterator<'data> {
|
||||
AttributeIndexIterator { data: self.indices }
|
||||
}
|
||||
|
||||
/// Return the data containing the attributes.
|
||||
pub fn attributes_data(&self) -> &'data [u8] {
|
||||
self.data.0
|
||||
}
|
||||
|
||||
/// Return a parser for the data containing the attributes.
|
||||
pub fn attributes(&self) -> AttributeReader<'data> {
|
||||
AttributeReader { data: self.data }
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the indices in an [`AttributesSubsubsection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributeIndexIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> AttributeIndexIterator<'data> {
|
||||
/// Parse the next index.
|
||||
pub fn next(&mut self) -> Result<Option<u32>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let err = "Invalid ELF attribute index";
|
||||
self.data
|
||||
.read_uleb128()
|
||||
.read_error(err)?
|
||||
.try_into()
|
||||
.map_err(|_| ())
|
||||
.read_error(err)
|
||||
.map(Some)
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser for the attributes in an [`AttributesSubsubsection`].
|
||||
///
|
||||
/// The parser relies on the caller to know the format of the data for each attribute tag.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributeReader<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> AttributeReader<'data> {
|
||||
/// Parse a tag.
|
||||
pub fn read_tag(&mut self) -> Result<Option<u64>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let err = "Invalid ELF attribute tag";
|
||||
self.data.read_uleb128().read_error(err).map(Some)
|
||||
}
|
||||
|
||||
/// Parse an integer value.
|
||||
pub fn read_integer(&mut self) -> Result<u64> {
|
||||
let err = "Invalid ELF attribute integer value";
|
||||
self.data.read_uleb128().read_error(err)
|
||||
}
|
||||
|
||||
/// Parse a string value.
|
||||
pub fn read_string(&mut self) -> Result<&'data [u8]> {
|
||||
let err = "Invalid ELF attribute string value";
|
||||
self.data.read_string().read_error(err)
|
||||
}
|
||||
}
|
||||
162
vendor/object/src/read/elf/comdat.rs
vendored
Normal file
162
vendor/object/src/read/elf/comdat.rs
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{iter, slice, str};
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{Endianness, U32Bytes};
|
||||
use crate::read::{self, ComdatKind, ObjectComdat, ReadError, ReadRef, SectionIndex, SymbolIndex};
|
||||
|
||||
use super::{ElfFile, FileHeader, SectionHeader, Sym};
|
||||
|
||||
/// An iterator for the COMDAT section groups in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfComdatIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the COMDAT section groups in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfComdatIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in an [`ElfFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct ElfComdatIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, Elf::SectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfComdatIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = ElfComdat<'data, 'file, Elf, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
for (_index, section) in self.iter.by_ref() {
|
||||
if let Some(comdat) = ElfComdat::parse(self.file, section) {
|
||||
return Some(comdat);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfComdat32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdat<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A COMDAT section group in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfComdat64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdat<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A COMDAT section group in an [`ElfFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectComdat`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct ElfComdat<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file ElfFile<'data, Elf, R>,
|
||||
section: &'data Elf::SectionHeader,
|
||||
sections: &'data [U32Bytes<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> ElfComdat<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn parse(
|
||||
file: &'file ElfFile<'data, Elf, R>,
|
||||
section: &'data Elf::SectionHeader,
|
||||
) -> Option<ElfComdat<'data, 'file, Elf, R>> {
|
||||
let (flag, sections) = section.group(file.endian, file.data).ok()??;
|
||||
if flag != elf::GRP_COMDAT {
|
||||
return None;
|
||||
}
|
||||
Some(ElfComdat {
|
||||
file,
|
||||
section,
|
||||
sections,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> read::private::Sealed for ElfComdat<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> ObjectComdat<'data> for ElfComdat<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type SectionIterator = ElfComdatSectionIterator<'data, 'file, Elf, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
ComdatKind::Any
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
SymbolIndex(self.section.sh_info(self.file.endian) as usize)
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&[u8]> {
|
||||
// FIXME: check sh_link
|
||||
let index = self.section.sh_info(self.file.endian) as usize;
|
||||
let symbol = self.file.symbols.symbol(index)?;
|
||||
symbol.name(self.file.endian, self.file.symbols.strings())
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 ELF COMDAT name")
|
||||
}
|
||||
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
ElfComdatSectionIterator {
|
||||
file: self.file,
|
||||
sections: self.sections.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfComdatSectionIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatSectionIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the sections in a COMDAT section group in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfComdatSectionIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatSectionIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in an [`ElfFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct ElfComdatSectionIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file ElfFile<'data, Elf, R>,
|
||||
sections: slice::Iter<'data, U32Bytes<Elf::Endian>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfComdatSectionIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.sections.next()?;
|
||||
Some(SectionIndex(index.get(self.file.endian) as usize))
|
||||
}
|
||||
}
|
||||
56
vendor/object/src/read/elf/compression.rs
vendored
Normal file
56
vendor/object/src/read/elf/compression.rs
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian;
|
||||
use crate::pod::Pod;
|
||||
|
||||
/// A trait for generic access to [`elf::CompressionHeader32`] and [`elf::CompressionHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait CompressionHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn ch_type(&self, endian: Self::Endian) -> u32;
|
||||
fn ch_size(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn ch_addralign(&self, endian: Self::Endian) -> Self::Word;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> CompressionHeader for elf::CompressionHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn ch_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.ch_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_size.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_addralign(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_addralign.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> CompressionHeader for elf::CompressionHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn ch_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.ch_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_size.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_addralign(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_addralign.get(endian)
|
||||
}
|
||||
}
|
||||
117
vendor/object/src/read/elf/dynamic.rs
vendored
Normal file
117
vendor/object/src/read/elf/dynamic.rs
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{ReadError, Result, StringTable};
|
||||
|
||||
/// A trait for generic access to [`elf::Dyn32`] and [`elf::Dyn64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Dyn: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn d_tag(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn d_val(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
/// Try to convert the tag to a `u32`.
|
||||
fn tag32(&self, endian: Self::Endian) -> Option<u32> {
|
||||
self.d_tag(endian).into().try_into().ok()
|
||||
}
|
||||
|
||||
/// Try to convert the value to a `u32`.
|
||||
fn val32(&self, endian: Self::Endian) -> Option<u32> {
|
||||
self.d_val(endian).into().try_into().ok()
|
||||
}
|
||||
|
||||
/// Return true if the value is an offset in the dynamic string table.
|
||||
fn is_string(&self, endian: Self::Endian) -> bool {
|
||||
if let Some(tag) = self.tag32(endian) {
|
||||
match tag {
|
||||
elf::DT_NEEDED
|
||||
| elf::DT_SONAME
|
||||
| elf::DT_RPATH
|
||||
| elf::DT_RUNPATH
|
||||
| elf::DT_AUXILIARY
|
||||
| elf::DT_FILTER => true,
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the value to get a string in a string table.
|
||||
///
|
||||
/// Does not check for an appropriate tag.
|
||||
fn string<'data>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
strings: StringTable<'data>,
|
||||
) -> Result<&'data [u8]> {
|
||||
self.val32(endian)
|
||||
.and_then(|val| strings.get(val).ok())
|
||||
.read_error("Invalid ELF dyn string")
|
||||
}
|
||||
|
||||
/// Return true if the value is an address.
|
||||
fn is_address(&self, endian: Self::Endian) -> bool {
|
||||
if let Some(tag) = self.tag32(endian) {
|
||||
match tag {
|
||||
elf::DT_PLTGOT
|
||||
| elf::DT_HASH
|
||||
| elf::DT_STRTAB
|
||||
| elf::DT_SYMTAB
|
||||
| elf::DT_RELA
|
||||
| elf::DT_INIT
|
||||
| elf::DT_FINI
|
||||
| elf::DT_SYMBOLIC
|
||||
| elf::DT_REL
|
||||
| elf::DT_DEBUG
|
||||
| elf::DT_JMPREL
|
||||
| elf::DT_FINI_ARRAY
|
||||
| elf::DT_INIT_ARRAY
|
||||
| elf::DT_PREINIT_ARRAY
|
||||
| elf::DT_SYMTAB_SHNDX
|
||||
| elf::DT_VERDEF
|
||||
| elf::DT_VERNEED
|
||||
| elf::DT_VERSYM
|
||||
| elf::DT_ADDRRNGLO..=elf::DT_ADDRRNGHI => true,
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Dyn for elf::Dyn32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn d_tag(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_tag.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn d_val(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_val.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Dyn for elf::Dyn64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn d_tag(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_tag.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn d_val(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_val.get(endian)
|
||||
}
|
||||
}
|
||||
916
vendor/object/src/read/elf/file.rs
vendored
Normal file
916
vendor/object/src/read/elf/file.rs
vendored
Normal file
@@ -0,0 +1,916 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use crate::read::{
|
||||
self, util, Architecture, ByteString, Bytes, Error, Export, FileFlags, Import, Object,
|
||||
ObjectKind, ReadError, ReadRef, SectionIndex, StringTable, SymbolIndex,
|
||||
};
|
||||
use crate::{elf, endian, Endian, Endianness, Pod, U32};
|
||||
|
||||
use super::{
|
||||
CompressionHeader, Dyn, ElfComdat, ElfComdatIterator, ElfDynamicRelocationIterator, ElfSection,
|
||||
ElfSectionIterator, ElfSegment, ElfSegmentIterator, ElfSymbol, ElfSymbolIterator,
|
||||
ElfSymbolTable, NoteHeader, ProgramHeader, Rel, Rela, RelocationSections, SectionHeader,
|
||||
SectionTable, Sym, SymbolTable,
|
||||
};
|
||||
|
||||
/// A 32-bit ELF object file.
|
||||
///
|
||||
/// This is a file that starts with [`elf::FileHeader32`], and corresponds
|
||||
/// to [`crate::FileKind::Elf32`].
|
||||
pub type ElfFile32<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfFile<'data, elf::FileHeader32<Endian>, R>;
|
||||
/// A 64-bit ELF object file.
|
||||
///
|
||||
/// This is a file that starts with [`elf::FileHeader64`], and corresponds
|
||||
/// to [`crate::FileKind::Elf64`].
|
||||
pub type ElfFile64<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfFile<'data, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A partially parsed ELF file.
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct ElfFile<'data, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) data: R,
|
||||
pub(super) header: &'data Elf,
|
||||
pub(super) segments: &'data [Elf::ProgramHeader],
|
||||
pub(super) sections: SectionTable<'data, Elf, R>,
|
||||
pub(super) relocations: RelocationSections,
|
||||
pub(super) symbols: SymbolTable<'data, Elf, R>,
|
||||
pub(super) dynamic_symbols: SymbolTable<'data, Elf, R>,
|
||||
}
|
||||
|
||||
impl<'data, Elf, R> ElfFile<'data, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw ELF file data.
|
||||
pub fn parse(data: R) -> read::Result<Self> {
|
||||
let header = Elf::parse(data)?;
|
||||
let endian = header.endian()?;
|
||||
let segments = header.program_headers(endian, data)?;
|
||||
let sections = header.sections(endian, data)?;
|
||||
let symbols = sections.symbols(endian, data, elf::SHT_SYMTAB)?;
|
||||
// TODO: get dynamic symbols from DT_SYMTAB if there are no sections
|
||||
let dynamic_symbols = sections.symbols(endian, data, elf::SHT_DYNSYM)?;
|
||||
// The API we provide requires a mapping from section to relocations, so build it now.
|
||||
let relocations = sections.relocation_sections(endian, symbols.section())?;
|
||||
|
||||
Ok(ElfFile {
|
||||
endian,
|
||||
data,
|
||||
header,
|
||||
segments,
|
||||
sections,
|
||||
relocations,
|
||||
symbols,
|
||||
dynamic_symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the endianness.
|
||||
pub fn endian(&self) -> Elf::Endian {
|
||||
self.endian
|
||||
}
|
||||
|
||||
/// Returns the raw data.
|
||||
pub fn data(&self) -> R {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// Returns the raw ELF file header.
|
||||
pub fn raw_header(&self) -> &'data Elf {
|
||||
self.header
|
||||
}
|
||||
|
||||
/// Returns the raw ELF segments.
|
||||
pub fn raw_segments(&self) -> &'data [Elf::ProgramHeader] {
|
||||
self.segments
|
||||
}
|
||||
|
||||
fn raw_section_by_name<'file>(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
self.sections
|
||||
.section_by_name(self.endian, section_name)
|
||||
.map(|(index, section)| ElfSection {
|
||||
file: self,
|
||||
index: SectionIndex(index),
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "compression")]
|
||||
fn zdebug_section_by_name<'file>(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
if !section_name.starts_with(b".debug_") {
|
||||
return None;
|
||||
}
|
||||
let mut name = Vec::with_capacity(section_name.len() + 1);
|
||||
name.extend_from_slice(b".zdebug_");
|
||||
name.extend_from_slice(§ion_name[7..]);
|
||||
self.raw_section_by_name(&name)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "compression"))]
|
||||
fn zdebug_section_by_name<'file>(
|
||||
&'file self,
|
||||
_section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf, R> read::private::Sealed for ElfFile<'data, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Object<'data, 'file> for ElfFile<'data, Elf, R>
|
||||
where
|
||||
'data: 'file,
|
||||
Elf: FileHeader,
|
||||
R: 'file + ReadRef<'data>,
|
||||
{
|
||||
type Segment = ElfSegment<'data, 'file, Elf, R>;
|
||||
type SegmentIterator = ElfSegmentIterator<'data, 'file, Elf, R>;
|
||||
type Section = ElfSection<'data, 'file, Elf, R>;
|
||||
type SectionIterator = ElfSectionIterator<'data, 'file, Elf, R>;
|
||||
type Comdat = ElfComdat<'data, 'file, Elf, R>;
|
||||
type ComdatIterator = ElfComdatIterator<'data, 'file, Elf, R>;
|
||||
type Symbol = ElfSymbol<'data, 'file, Elf, R>;
|
||||
type SymbolIterator = ElfSymbolIterator<'data, 'file, Elf, R>;
|
||||
type SymbolTable = ElfSymbolTable<'data, 'file, Elf, R>;
|
||||
type DynamicRelocationIterator = ElfDynamicRelocationIterator<'data, 'file, Elf, R>;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match (
|
||||
self.header.e_machine(self.endian),
|
||||
self.header.is_class_64(),
|
||||
) {
|
||||
(elf::EM_AARCH64, true) => Architecture::Aarch64,
|
||||
(elf::EM_AARCH64, false) => Architecture::Aarch64_Ilp32,
|
||||
(elf::EM_ARM, _) => Architecture::Arm,
|
||||
(elf::EM_AVR, _) => Architecture::Avr,
|
||||
(elf::EM_BPF, _) => Architecture::Bpf,
|
||||
(elf::EM_CSKY, _) => Architecture::Csky,
|
||||
(elf::EM_386, _) => Architecture::I386,
|
||||
(elf::EM_X86_64, false) => Architecture::X86_64_X32,
|
||||
(elf::EM_X86_64, true) => Architecture::X86_64,
|
||||
(elf::EM_HEXAGON, _) => Architecture::Hexagon,
|
||||
(elf::EM_LOONGARCH, true) => Architecture::LoongArch64,
|
||||
(elf::EM_MIPS, false) => Architecture::Mips,
|
||||
(elf::EM_MIPS, true) => Architecture::Mips64,
|
||||
(elf::EM_MSP430, _) => Architecture::Msp430,
|
||||
(elf::EM_PPC, _) => Architecture::PowerPc,
|
||||
(elf::EM_PPC64, _) => Architecture::PowerPc64,
|
||||
(elf::EM_RISCV, false) => Architecture::Riscv32,
|
||||
(elf::EM_RISCV, true) => Architecture::Riscv64,
|
||||
// This is either s390 or s390x, depending on the ELF class.
|
||||
// We only support the 64-bit variant s390x here.
|
||||
(elf::EM_S390, true) => Architecture::S390x,
|
||||
(elf::EM_SBF, _) => Architecture::Sbf,
|
||||
(elf::EM_SHARC, false) => Architecture::Sharc,
|
||||
(elf::EM_SPARCV9, true) => Architecture::Sparc64,
|
||||
(elf::EM_XTENSA, false) => Architecture::Xtensa,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.header.is_little_endian()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
self.header.is_class_64()
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
match self.header.e_type(self.endian) {
|
||||
elf::ET_REL => ObjectKind::Relocatable,
|
||||
elf::ET_EXEC => ObjectKind::Executable,
|
||||
// TODO: check for `DF_1_PIE`?
|
||||
elf::ET_DYN => ObjectKind::Dynamic,
|
||||
elf::ET_CORE => ObjectKind::Core,
|
||||
_ => ObjectKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> ElfSegmentIterator<'data, 'file, Elf, R> {
|
||||
ElfSegmentIterator {
|
||||
file: self,
|
||||
iter: self.segments.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
self.raw_section_by_name(section_name)
|
||||
.or_else(|| self.zdebug_section_by_name(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> read::Result<ElfSection<'data, 'file, Elf, R>> {
|
||||
let section = self.sections.section(index)?;
|
||||
Ok(ElfSection {
|
||||
file: self,
|
||||
index,
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> ElfSectionIterator<'data, 'file, Elf, R> {
|
||||
ElfSectionIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> ElfComdatIterator<'data, 'file, Elf, R> {
|
||||
ElfComdatIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> read::Result<ElfSymbol<'data, 'file, Elf, R>> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(ElfSymbol {
|
||||
endian: self.endian,
|
||||
symbols: &self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> ElfSymbolIterator<'data, 'file, Elf, R> {
|
||||
ElfSymbolIterator {
|
||||
endian: self.endian,
|
||||
symbols: &self.symbols,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_table(&'file self) -> Option<ElfSymbolTable<'data, 'file, Elf, R>> {
|
||||
if self.symbols.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(ElfSymbolTable {
|
||||
endian: self.endian,
|
||||
symbols: &self.symbols,
|
||||
})
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> ElfSymbolIterator<'data, 'file, Elf, R> {
|
||||
ElfSymbolIterator {
|
||||
endian: self.endian,
|
||||
symbols: &self.dynamic_symbols,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn dynamic_symbol_table(&'file self) -> Option<ElfSymbolTable<'data, 'file, Elf, R>> {
|
||||
if self.dynamic_symbols.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(ElfSymbolTable {
|
||||
endian: self.endian,
|
||||
symbols: &self.dynamic_symbols,
|
||||
})
|
||||
}
|
||||
|
||||
fn dynamic_relocations(
|
||||
&'file self,
|
||||
) -> Option<ElfDynamicRelocationIterator<'data, 'file, Elf, R>> {
|
||||
Some(ElfDynamicRelocationIterator {
|
||||
section_index: SectionIndex(1),
|
||||
file: self,
|
||||
relocations: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn imports(&self) -> read::Result<Vec<Import<'data>>> {
|
||||
let mut imports = Vec::new();
|
||||
for symbol in self.dynamic_symbols.iter() {
|
||||
if symbol.is_undefined(self.endian) {
|
||||
let name = symbol.name(self.endian, self.dynamic_symbols.strings())?;
|
||||
if !name.is_empty() {
|
||||
// TODO: use symbol versioning to determine library
|
||||
imports.push(Import {
|
||||
name: ByteString(name),
|
||||
library: ByteString(&[]),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
fn exports(&self) -> read::Result<Vec<Export<'data>>> {
|
||||
let mut exports = Vec::new();
|
||||
for symbol in self.dynamic_symbols.iter() {
|
||||
if symbol.is_definition(self.endian) {
|
||||
let name = symbol.name(self.endian, self.dynamic_symbols.strings())?;
|
||||
let address = symbol.st_value(self.endian).into();
|
||||
exports.push(Export {
|
||||
name: ByteString(name),
|
||||
address,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(exports)
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
for section in self.sections.iter() {
|
||||
if let Ok(name) = self.sections.section_name(self.endian, section) {
|
||||
if name == b".debug_info" || name == b".zdebug_info" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn build_id(&self) -> read::Result<Option<&'data [u8]>> {
|
||||
let endian = self.endian;
|
||||
// Use section headers if present, otherwise use program headers.
|
||||
if !self.sections.is_empty() {
|
||||
for section in self.sections.iter() {
|
||||
if let Some(mut notes) = section.notes(endian, self.data)? {
|
||||
while let Some(note) = notes.next()? {
|
||||
if note.name() == elf::ELF_NOTE_GNU
|
||||
&& note.n_type(endian) == elf::NT_GNU_BUILD_ID
|
||||
{
|
||||
return Ok(Some(note.desc()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for segment in self.segments {
|
||||
if let Some(mut notes) = segment.notes(endian, self.data)? {
|
||||
while let Some(note) = notes.next()? {
|
||||
if note.name() == elf::ELF_NOTE_GNU
|
||||
&& note.n_type(endian) == elf::NT_GNU_BUILD_ID
|
||||
{
|
||||
return Ok(Some(note.desc()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn gnu_debuglink(&self) -> read::Result<Option<(&'data [u8], u32)>> {
|
||||
let section = match self.raw_section_by_name(b".gnu_debuglink") {
|
||||
Some(section) => section,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let data = section
|
||||
.section
|
||||
.data(self.endian, self.data)
|
||||
.read_error("Invalid ELF .gnu_debuglink section offset or size")
|
||||
.map(Bytes)?;
|
||||
let filename = data
|
||||
.read_string_at(0)
|
||||
.read_error("Missing ELF .gnu_debuglink filename")?;
|
||||
let crc_offset = util::align(filename.len() + 1, 4);
|
||||
let crc = data
|
||||
.read_at::<U32<_>>(crc_offset)
|
||||
.read_error("Missing ELF .gnu_debuglink crc")?
|
||||
.get(self.endian);
|
||||
Ok(Some((filename, crc)))
|
||||
}
|
||||
|
||||
fn gnu_debugaltlink(&self) -> read::Result<Option<(&'data [u8], &'data [u8])>> {
|
||||
let section = match self.raw_section_by_name(b".gnu_debugaltlink") {
|
||||
Some(section) => section,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let mut data = section
|
||||
.section
|
||||
.data(self.endian, self.data)
|
||||
.read_error("Invalid ELF .gnu_debugaltlink section offset or size")
|
||||
.map(Bytes)?;
|
||||
let filename = data
|
||||
.read_string()
|
||||
.read_error("Missing ELF .gnu_debugaltlink filename")?;
|
||||
let build_id = data.0;
|
||||
Ok(Some((filename, build_id)))
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn entry(&self) -> u64 {
|
||||
self.header.e_entry(self.endian).into()
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::Elf {
|
||||
os_abi: self.header.e_ident().os_abi,
|
||||
abi_version: self.header.e_ident().abi_version,
|
||||
e_flags: self.header.e_flags(self.endian),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::FileHeader32`] and [`elf::FileHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FileHeader: Debug + Pod {
|
||||
// Ideally this would be a `u64: From<Word>`, but can't express that.
|
||||
type Word: Into<u64>;
|
||||
type Sword: Into<i64>;
|
||||
type Endian: endian::Endian;
|
||||
type ProgramHeader: ProgramHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;
|
||||
type SectionHeader: SectionHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;
|
||||
type CompressionHeader: CompressionHeader<Endian = Self::Endian, Word = Self::Word>;
|
||||
type NoteHeader: NoteHeader<Endian = Self::Endian>;
|
||||
type Dyn: Dyn<Endian = Self::Endian, Word = Self::Word>;
|
||||
type Sym: Sym<Endian = Self::Endian, Word = Self::Word>;
|
||||
type Rel: Rel<Endian = Self::Endian, Word = Self::Word>;
|
||||
type Rela: Rela<Endian = Self::Endian, Word = Self::Word> + From<Self::Rel>;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
fn is_type_64(&self) -> bool;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
///
|
||||
/// This is the same as [`Self::is_type_64`], but is non-dispatchable.
|
||||
fn is_type_64_sized() -> bool
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn e_ident(&self) -> &elf::Ident;
|
||||
fn e_type(&self, endian: Self::Endian) -> u16;
|
||||
fn e_machine(&self, endian: Self::Endian) -> u16;
|
||||
fn e_version(&self, endian: Self::Endian) -> u32;
|
||||
fn e_entry(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn e_phoff(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn e_shoff(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn e_flags(&self, endian: Self::Endian) -> u32;
|
||||
fn e_ehsize(&self, endian: Self::Endian) -> u16;
|
||||
fn e_phentsize(&self, endian: Self::Endian) -> u16;
|
||||
fn e_phnum(&self, endian: Self::Endian) -> u16;
|
||||
fn e_shentsize(&self, endian: Self::Endian) -> u16;
|
||||
fn e_shnum(&self, endian: Self::Endian) -> u16;
|
||||
fn e_shstrndx(&self, endian: Self::Endian) -> u16;
|
||||
|
||||
// Provided methods.
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// Also checks that the ident field in the file header is a supported format.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read_at::<Self>(0)
|
||||
.read_error("Invalid ELF header size or alignment")?;
|
||||
if !header.is_supported() {
|
||||
return Err(Error("Unsupported ELF header"));
|
||||
}
|
||||
// TODO: Check self.e_ehsize?
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
/// Check that the ident field in the file header is a supported format.
|
||||
///
|
||||
/// This checks the magic number, version, class, and endianness.
|
||||
fn is_supported(&self) -> bool {
|
||||
let ident = self.e_ident();
|
||||
// TODO: Check self.e_version too? Requires endian though.
|
||||
ident.magic == elf::ELFMAG
|
||||
&& (self.is_type_64() || self.is_class_32())
|
||||
&& (!self.is_type_64() || self.is_class_64())
|
||||
&& (self.is_little_endian() || self.is_big_endian())
|
||||
&& ident.version == elf::EV_CURRENT
|
||||
}
|
||||
|
||||
fn is_class_32(&self) -> bool {
|
||||
self.e_ident().class == elf::ELFCLASS32
|
||||
}
|
||||
|
||||
fn is_class_64(&self) -> bool {
|
||||
self.e_ident().class == elf::ELFCLASS64
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.e_ident().data == elf::ELFDATA2LSB
|
||||
}
|
||||
|
||||
fn is_big_endian(&self) -> bool {
|
||||
self.e_ident().data == elf::ELFDATA2MSB
|
||||
}
|
||||
|
||||
fn endian(&self) -> read::Result<Self::Endian> {
|
||||
Self::Endian::from_big_endian(self.is_big_endian()).read_error("Unsupported ELF endian")
|
||||
}
|
||||
|
||||
/// Return the first section header, if present.
|
||||
///
|
||||
/// Section 0 is a special case because getting the section headers normally
|
||||
/// requires `shnum`, but `shnum` may be in the first section header.
|
||||
fn section_0<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<Option<&'data Self::SectionHeader>> {
|
||||
let shoff: u64 = self.e_shoff(endian).into();
|
||||
if shoff == 0 {
|
||||
// No section headers is ok.
|
||||
return Ok(None);
|
||||
}
|
||||
let shentsize = usize::from(self.e_shentsize(endian));
|
||||
if shentsize != mem::size_of::<Self::SectionHeader>() {
|
||||
// Section header size must match.
|
||||
return Err(Error("Invalid ELF section header entry size"));
|
||||
}
|
||||
data.read_at(shoff)
|
||||
.map(Some)
|
||||
.read_error("Invalid ELF section header offset or size")
|
||||
}
|
||||
|
||||
/// Return the `e_phnum` field of the header. Handles extended values.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn phnum<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<usize> {
|
||||
let e_phnum = self.e_phnum(endian);
|
||||
if e_phnum < elf::PN_XNUM {
|
||||
Ok(e_phnum as usize)
|
||||
} else if let Some(section_0) = self.section_0(endian, data)? {
|
||||
Ok(section_0.sh_info(endian) as usize)
|
||||
} else {
|
||||
// Section 0 must exist if e_phnum overflows.
|
||||
Err(Error("Missing ELF section headers for e_phnum overflow"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `e_shnum` field of the header. Handles extended values.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn shnum<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<usize> {
|
||||
let e_shnum = self.e_shnum(endian);
|
||||
if e_shnum > 0 {
|
||||
Ok(e_shnum as usize)
|
||||
} else if let Some(section_0) = self.section_0(endian, data)? {
|
||||
section_0
|
||||
.sh_size(endian)
|
||||
.into()
|
||||
.try_into()
|
||||
.ok()
|
||||
.read_error("Invalid ELF extended e_shnum")
|
||||
} else {
|
||||
// No section headers is ok.
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `e_shstrndx` field of the header. Handles extended values.
|
||||
///
|
||||
/// Returns `Err` for invalid values (including if the index is 0).
|
||||
fn shstrndx<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<u32> {
|
||||
let e_shstrndx = self.e_shstrndx(endian);
|
||||
let index = if e_shstrndx != elf::SHN_XINDEX {
|
||||
e_shstrndx.into()
|
||||
} else if let Some(section_0) = self.section_0(endian, data)? {
|
||||
section_0.sh_link(endian)
|
||||
} else {
|
||||
// Section 0 must exist if we're trying to read e_shstrndx.
|
||||
return Err(Error("Missing ELF section headers for e_shstrndx overflow"));
|
||||
};
|
||||
if index == 0 {
|
||||
return Err(Error("Missing ELF e_shstrndx"));
|
||||
}
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
/// Return the slice of program headers.
|
||||
///
|
||||
/// Returns `Ok(&[])` if there are no program headers.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn program_headers<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<&'data [Self::ProgramHeader]> {
|
||||
let phoff: u64 = self.e_phoff(endian).into();
|
||||
if phoff == 0 {
|
||||
// No program headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let phnum = self.phnum(endian, data)?;
|
||||
if phnum == 0 {
|
||||
// No program headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let phentsize = self.e_phentsize(endian) as usize;
|
||||
if phentsize != mem::size_of::<Self::ProgramHeader>() {
|
||||
// Program header size must match.
|
||||
return Err(Error("Invalid ELF program header entry size"));
|
||||
}
|
||||
data.read_slice_at(phoff, phnum)
|
||||
.read_error("Invalid ELF program header size or alignment")
|
||||
}
|
||||
|
||||
/// Return the slice of section headers.
|
||||
///
|
||||
/// Returns `Ok(&[])` if there are no section headers.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn section_headers<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<&'data [Self::SectionHeader]> {
|
||||
let shoff: u64 = self.e_shoff(endian).into();
|
||||
if shoff == 0 {
|
||||
// No section headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let shnum = self.shnum(endian, data)?;
|
||||
if shnum == 0 {
|
||||
// No section headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let shentsize = usize::from(self.e_shentsize(endian));
|
||||
if shentsize != mem::size_of::<Self::SectionHeader>() {
|
||||
// Section header size must match.
|
||||
return Err(Error("Invalid ELF section header entry size"));
|
||||
}
|
||||
data.read_slice_at(shoff, shnum)
|
||||
.read_error("Invalid ELF section header offset/size/alignment")
|
||||
}
|
||||
|
||||
/// Return the string table for the section headers.
|
||||
fn section_strings<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
sections: &[Self::SectionHeader],
|
||||
) -> read::Result<StringTable<'data, R>> {
|
||||
if sections.is_empty() {
|
||||
return Ok(StringTable::default());
|
||||
}
|
||||
let index = self.shstrndx(endian, data)? as usize;
|
||||
let shstrtab = sections.get(index).read_error("Invalid ELF e_shstrndx")?;
|
||||
let strings = if let Some((shstrtab_offset, shstrtab_size)) = shstrtab.file_range(endian) {
|
||||
let shstrtab_end = shstrtab_offset
|
||||
.checked_add(shstrtab_size)
|
||||
.read_error("Invalid ELF shstrtab size")?;
|
||||
StringTable::new(data, shstrtab_offset, shstrtab_end)
|
||||
} else {
|
||||
StringTable::default()
|
||||
};
|
||||
Ok(strings)
|
||||
}
|
||||
|
||||
/// Return the section table.
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<SectionTable<'data, Self, R>> {
|
||||
let sections = self.section_headers(endian, data)?;
|
||||
let strings = self.section_strings(endian, data, sections)?;
|
||||
Ok(SectionTable::new(sections, strings))
|
||||
}
|
||||
|
||||
/// Returns whether this is a mips64el elf file.
|
||||
fn is_mips64el(&self, endian: Self::Endian) -> bool {
|
||||
self.is_class_64() && self.is_little_endian() && self.e_machine(endian) == elf::EM_MIPS
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> FileHeader for elf::FileHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Sword = i32;
|
||||
type Endian = Endian;
|
||||
type ProgramHeader = elf::ProgramHeader32<Endian>;
|
||||
type SectionHeader = elf::SectionHeader32<Endian>;
|
||||
type CompressionHeader = elf::CompressionHeader32<Endian>;
|
||||
type NoteHeader = elf::NoteHeader32<Endian>;
|
||||
type Dyn = elf::Dyn32<Endian>;
|
||||
type Sym = elf::Sym32<Endian>;
|
||||
type Rel = elf::Rel32<Endian>;
|
||||
type Rela = elf::Rela32<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn is_type_64(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_type_64_sized() -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ident(&self) -> &elf::Ident {
|
||||
&self.e_ident
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_type(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_machine(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_machine.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_version(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_version.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_entry(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_entry.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_phoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_shoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ehsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_ehsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shstrndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shstrndx.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> FileHeader for elf::FileHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Sword = i64;
|
||||
type Endian = Endian;
|
||||
type ProgramHeader = elf::ProgramHeader64<Endian>;
|
||||
type SectionHeader = elf::SectionHeader64<Endian>;
|
||||
type CompressionHeader = elf::CompressionHeader64<Endian>;
|
||||
type NoteHeader = elf::NoteHeader32<Endian>;
|
||||
type Dyn = elf::Dyn64<Endian>;
|
||||
type Sym = elf::Sym64<Endian>;
|
||||
type Rel = elf::Rel64<Endian>;
|
||||
type Rela = elf::Rela64<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn is_type_64(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_type_64_sized() -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ident(&self) -> &elf::Ident {
|
||||
&self.e_ident
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_type(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_machine(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_machine.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_version(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_version.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_entry(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_entry.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_phoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_shoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ehsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_ehsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shstrndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shstrndx.get(endian)
|
||||
}
|
||||
}
|
||||
224
vendor/object/src/read/elf/hash.rs
vendored
Normal file
224
vendor/object/src/read/elf/hash.rs
vendored
Normal file
@@ -0,0 +1,224 @@
|
||||
use core::mem;
|
||||
|
||||
use crate::elf;
|
||||
use crate::read::{ReadError, ReadRef, Result};
|
||||
use crate::{U32, U64};
|
||||
|
||||
use super::{FileHeader, Sym, SymbolTable, Version, VersionTable};
|
||||
|
||||
/// A SysV symbol hash table in an ELF file.
|
||||
///
|
||||
/// Returned by [`SectionHeader::hash`](super::SectionHeader::hash).
|
||||
#[derive(Debug)]
|
||||
pub struct HashTable<'data, Elf: FileHeader> {
|
||||
buckets: &'data [U32<Elf::Endian>],
|
||||
chains: &'data [U32<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> HashTable<'data, Elf> {
|
||||
/// Parse a SysV hash table.
|
||||
///
|
||||
/// `data` should be from an [`elf::SHT_HASH`] section, or from a
|
||||
/// segment pointed to via the [`elf::DT_HASH`] entry.
|
||||
///
|
||||
/// The header is read at offset 0 in the given `data`.
|
||||
pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = data
|
||||
.read::<elf::HashHeader<Elf::Endian>>(&mut offset)
|
||||
.read_error("Invalid hash header")?;
|
||||
let buckets = data
|
||||
.read_slice(&mut offset, header.bucket_count.get(endian) as usize)
|
||||
.read_error("Invalid hash buckets")?;
|
||||
let chains = data
|
||||
.read_slice(&mut offset, header.chain_count.get(endian) as usize)
|
||||
.read_error("Invalid hash chains")?;
|
||||
Ok(HashTable { buckets, chains })
|
||||
}
|
||||
|
||||
/// Return the symbol table length.
|
||||
pub fn symbol_table_length(&self) -> u32 {
|
||||
self.chains.len() as u32
|
||||
}
|
||||
|
||||
/// Use the hash table to find the symbol table entry with the given name, hash and version.
|
||||
pub fn find<R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
name: &[u8],
|
||||
hash: u32,
|
||||
version: Option<&Version<'_>>,
|
||||
symbols: &SymbolTable<'data, Elf, R>,
|
||||
versions: &VersionTable<'data, Elf>,
|
||||
) -> Option<(usize, &'data Elf::Sym)> {
|
||||
// Get the chain start from the bucket for this hash.
|
||||
let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize;
|
||||
// Avoid infinite loop.
|
||||
let mut i = 0;
|
||||
let strings = symbols.strings();
|
||||
while index != 0 && i < self.chains.len() {
|
||||
if let Ok(symbol) = symbols.symbol(index) {
|
||||
if symbol.name(endian, strings) == Ok(name)
|
||||
&& versions.matches(endian, index, version)
|
||||
{
|
||||
return Some((index, symbol));
|
||||
}
|
||||
}
|
||||
index = self.chains.get(index)?.get(endian) as usize;
|
||||
i += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A GNU symbol hash table in an ELF file.
|
||||
///
|
||||
/// Returned by [`SectionHeader::gnu_hash`](super::SectionHeader::gnu_hash).
|
||||
#[derive(Debug)]
|
||||
pub struct GnuHashTable<'data, Elf: FileHeader> {
|
||||
symbol_base: u32,
|
||||
bloom_shift: u32,
|
||||
bloom_filters: &'data [u8],
|
||||
buckets: &'data [U32<Elf::Endian>],
|
||||
values: &'data [U32<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> GnuHashTable<'data, Elf> {
|
||||
/// Parse a GNU hash table.
|
||||
///
|
||||
/// `data` should be from an [`elf::SHT_GNU_HASH`] section, or from a
|
||||
/// segment pointed to via the [`elf::DT_GNU_HASH`] entry.
|
||||
///
|
||||
/// The header is read at offset 0 in the given `data`.
|
||||
///
|
||||
/// The header does not contain a length field, and so all of `data`
|
||||
/// will be used as the hash table values. It does not matter if this
|
||||
/// is longer than needed, and this will often the case when accessing
|
||||
/// the hash table via the [`elf::DT_GNU_HASH`] entry.
|
||||
pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = data
|
||||
.read::<elf::GnuHashHeader<Elf::Endian>>(&mut offset)
|
||||
.read_error("Invalid GNU hash header")?;
|
||||
let bloom_len =
|
||||
u64::from(header.bloom_count.get(endian)) * mem::size_of::<Elf::Word>() as u64;
|
||||
let bloom_filters = data
|
||||
.read_bytes(&mut offset, bloom_len)
|
||||
.read_error("Invalid GNU hash bloom filters")?;
|
||||
let buckets = data
|
||||
.read_slice(&mut offset, header.bucket_count.get(endian) as usize)
|
||||
.read_error("Invalid GNU hash buckets")?;
|
||||
let chain_count = (data.len() - offset as usize) / 4;
|
||||
let values = data
|
||||
.read_slice(&mut offset, chain_count)
|
||||
.read_error("Invalid GNU hash values")?;
|
||||
Ok(GnuHashTable {
|
||||
symbol_base: header.symbol_base.get(endian),
|
||||
bloom_shift: header.bloom_shift.get(endian),
|
||||
bloom_filters,
|
||||
buckets,
|
||||
values,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the symbol table index of the first symbol in the hash table.
|
||||
pub fn symbol_base(&self) -> u32 {
|
||||
self.symbol_base
|
||||
}
|
||||
|
||||
/// Determine the symbol table length by finding the last entry in the hash table.
|
||||
///
|
||||
/// Returns `None` if the hash table is empty or invalid.
|
||||
pub fn symbol_table_length(&self, endian: Elf::Endian) -> Option<u32> {
|
||||
// Ensure we find a non-empty bucket.
|
||||
if self.symbol_base == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the highest chain index in a bucket.
|
||||
let mut max_symbol = 0;
|
||||
for bucket in self.buckets {
|
||||
let bucket = bucket.get(endian);
|
||||
if max_symbol < bucket {
|
||||
max_symbol = bucket;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the end of the chain.
|
||||
for value in self
|
||||
.values
|
||||
.get(max_symbol.checked_sub(self.symbol_base)? as usize..)?
|
||||
{
|
||||
max_symbol += 1;
|
||||
if value.get(endian) & 1 != 0 {
|
||||
return Some(max_symbol);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Use the hash table to find the symbol table entry with the given name, hash, and version.
|
||||
pub fn find<R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
name: &[u8],
|
||||
hash: u32,
|
||||
version: Option<&Version<'_>>,
|
||||
symbols: &SymbolTable<'data, Elf, R>,
|
||||
versions: &VersionTable<'data, Elf>,
|
||||
) -> Option<(usize, &'data Elf::Sym)> {
|
||||
let word_bits = mem::size_of::<Elf::Word>() as u32 * 8;
|
||||
|
||||
// Test against bloom filter.
|
||||
let bloom_count = self.bloom_filters.len() / mem::size_of::<Elf::Word>();
|
||||
let offset =
|
||||
((hash / word_bits) & (bloom_count as u32 - 1)) * mem::size_of::<Elf::Word>() as u32;
|
||||
let filter = if word_bits == 64 {
|
||||
self.bloom_filters
|
||||
.read_at::<U64<Elf::Endian>>(offset.into())
|
||||
.ok()?
|
||||
.get(endian)
|
||||
} else {
|
||||
self.bloom_filters
|
||||
.read_at::<U32<Elf::Endian>>(offset.into())
|
||||
.ok()?
|
||||
.get(endian)
|
||||
.into()
|
||||
};
|
||||
if filter & (1 << (hash % word_bits)) == 0 {
|
||||
return None;
|
||||
}
|
||||
if filter & (1 << ((hash >> self.bloom_shift) % word_bits)) == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the chain start from the bucket for this hash.
|
||||
let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize;
|
||||
if index == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Test symbols in the chain.
|
||||
let strings = symbols.strings();
|
||||
let symbols = symbols.symbols().get(index..)?;
|
||||
let values = self
|
||||
.values
|
||||
.get(index.checked_sub(self.symbol_base as usize)?..)?;
|
||||
for (symbol, value) in symbols.iter().zip(values.iter()) {
|
||||
let value = value.get(endian);
|
||||
if value | 1 == hash | 1 {
|
||||
if symbol.name(endian, strings) == Ok(name)
|
||||
&& versions.matches(endian, index, version)
|
||||
{
|
||||
return Some((index, symbol));
|
||||
}
|
||||
}
|
||||
if value & 1 != 0 {
|
||||
break;
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
78
vendor/object/src/read/elf/mod.rs
vendored
Normal file
78
vendor/object/src/read/elf/mod.rs
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
//! Support for reading ELF files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between 32-bit and 64-bit ELF.
|
||||
//! The primary trait for this is [`FileHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`ElfFile`] implements the [`Object`](crate::read::Object) trait for ELF files.
|
||||
//! [`ElfFile`] is parameterised by [`FileHeader`] to allow reading both 32-bit and
|
||||
//! 64-bit ELF. There are type aliases for these parameters ([`ElfFile32`] and
|
||||
//! [`ElfFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`FileHeader`] trait can be directly used to parse both [`elf::FileHeader32`]
|
||||
//! and [`elf::FileHeader64`].
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::elf;
|
||||
//! use object::read::elf::{FileHeader, Sym};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let elf = elf::FileHeader64::<object::Endianness>::parse(&*data)?;
|
||||
//! let endian = elf.endian()?;
|
||||
//! let sections = elf.sections(endian, &*data)?;
|
||||
//! let symbols = sections.symbols(endian, &*data, elf::SHT_SYMTAB)?;
|
||||
//! for symbol in symbols.iter() {
|
||||
//! let name = symbol.name(endian, symbols.strings())?;
|
||||
//! println!("{}", String::from_utf8_lossy(name));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::elf;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod segment;
|
||||
pub use segment::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod comdat;
|
||||
pub use comdat::*;
|
||||
|
||||
mod dynamic;
|
||||
pub use dynamic::*;
|
||||
|
||||
mod compression;
|
||||
pub use compression::*;
|
||||
|
||||
mod note;
|
||||
pub use note::*;
|
||||
|
||||
mod hash;
|
||||
pub use hash::*;
|
||||
|
||||
mod version;
|
||||
pub use version::*;
|
||||
|
||||
mod attributes;
|
||||
pub use attributes::*;
|
||||
271
vendor/object/src/read/elf/note.rs
vendored
Normal file
271
vendor/object/src/read/elf/note.rs
vendored
Normal file
@@ -0,0 +1,271 @@
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{self, U32};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::util;
|
||||
use crate::read::{self, Bytes, Error, ReadError};
|
||||
|
||||
use super::FileHeader;
|
||||
|
||||
/// An iterator over the notes in an ELF section or segment.
|
||||
///
|
||||
/// Returned [`ProgramHeader::notes`](super::ProgramHeader::notes)
|
||||
/// and [`SectionHeader::notes`](super::SectionHeader::notes).
|
||||
#[derive(Debug)]
|
||||
pub struct NoteIterator<'data, Elf>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
{
|
||||
endian: Elf::Endian,
|
||||
align: usize,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf> NoteIterator<'data, Elf>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
{
|
||||
/// An iterator over the notes in an ELF section or segment.
|
||||
///
|
||||
/// `align` should be from the `p_align` field of the segment,
|
||||
/// or the `sh_addralign` field of the section. Supported values are
|
||||
/// either 4 or 8, but values less than 4 are treated as 4.
|
||||
/// This matches the behaviour of binutils.
|
||||
///
|
||||
/// Returns `Err` if `align` is invalid.
|
||||
pub fn new(endian: Elf::Endian, align: Elf::Word, data: &'data [u8]) -> read::Result<Self> {
|
||||
let align = match align.into() {
|
||||
0u64..=4 => 4,
|
||||
8 => 8,
|
||||
_ => return Err(Error("Invalid ELF note alignment")),
|
||||
};
|
||||
// TODO: check data alignment?
|
||||
Ok(NoteIterator {
|
||||
endian,
|
||||
align,
|
||||
data: Bytes(data),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the next note.
|
||||
pub fn next(&mut self) -> read::Result<Option<Note<'data, Elf>>> {
|
||||
let mut data = self.data;
|
||||
if data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let header = data
|
||||
.read_at::<Elf::NoteHeader>(0)
|
||||
.read_error("ELF note is too short")?;
|
||||
|
||||
// The name has no alignment requirement.
|
||||
let offset = mem::size_of::<Elf::NoteHeader>();
|
||||
let namesz = header.n_namesz(self.endian) as usize;
|
||||
let name = data
|
||||
.read_bytes_at(offset, namesz)
|
||||
.read_error("Invalid ELF note namesz")?
|
||||
.0;
|
||||
|
||||
// The descriptor must be aligned.
|
||||
let offset = util::align(offset + namesz, self.align);
|
||||
let descsz = header.n_descsz(self.endian) as usize;
|
||||
let desc = data
|
||||
.read_bytes_at(offset, descsz)
|
||||
.read_error("Invalid ELF note descsz")?
|
||||
.0;
|
||||
|
||||
// The next note (if any) must be aligned.
|
||||
let offset = util::align(offset + descsz, self.align);
|
||||
if data.skip(offset).is_err() {
|
||||
data = Bytes(&[]);
|
||||
}
|
||||
self.data = data;
|
||||
|
||||
Ok(Some(Note { header, name, desc }))
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed [`NoteHeader`].
|
||||
#[derive(Debug)]
|
||||
pub struct Note<'data, Elf>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
{
|
||||
header: &'data Elf::NoteHeader,
|
||||
name: &'data [u8],
|
||||
desc: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> Note<'data, Elf> {
|
||||
/// Return the `n_type` field of the `NoteHeader`.
|
||||
///
|
||||
/// The meaning of this field is determined by `name`.
|
||||
pub fn n_type(&self, endian: Elf::Endian) -> u32 {
|
||||
self.header.n_type(endian)
|
||||
}
|
||||
|
||||
/// Return the `n_namesz` field of the `NoteHeader`.
|
||||
pub fn n_namesz(&self, endian: Elf::Endian) -> u32 {
|
||||
self.header.n_namesz(endian)
|
||||
}
|
||||
|
||||
/// Return the `n_descsz` field of the `NoteHeader`.
|
||||
pub fn n_descsz(&self, endian: Elf::Endian) -> u32 {
|
||||
self.header.n_descsz(endian)
|
||||
}
|
||||
|
||||
/// Return the bytes for the name field following the `NoteHeader`.
|
||||
///
|
||||
/// This field is usually a string including one or more trailing null bytes
|
||||
/// (but it is not required to be).
|
||||
///
|
||||
/// The length of this field is given by `n_namesz`.
|
||||
pub fn name_bytes(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Return the bytes for the name field following the `NoteHeader`,
|
||||
/// excluding all trailing null bytes.
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
let mut name = self.name;
|
||||
while let [rest @ .., 0] = name {
|
||||
name = rest;
|
||||
}
|
||||
name
|
||||
}
|
||||
|
||||
/// Return the bytes for the desc field following the `NoteHeader`.
|
||||
///
|
||||
/// The length of this field is given by `n_descsz`. The meaning
|
||||
/// of this field is determined by `name` and `n_type`.
|
||||
pub fn desc(&self) -> &'data [u8] {
|
||||
self.desc
|
||||
}
|
||||
|
||||
/// Return an iterator for properties if this note's type is [`elf::NT_GNU_PROPERTY_TYPE_0`].
|
||||
pub fn gnu_properties(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
) -> Option<GnuPropertyIterator<'data, Elf::Endian>> {
|
||||
if self.name() != elf::ELF_NOTE_GNU || self.n_type(endian) != elf::NT_GNU_PROPERTY_TYPE_0 {
|
||||
return None;
|
||||
}
|
||||
// Use the ELF class instead of the section alignment.
|
||||
// This matches what other parsers do.
|
||||
let align = if Elf::is_type_64_sized() { 8 } else { 4 };
|
||||
Some(GnuPropertyIterator {
|
||||
endian,
|
||||
align,
|
||||
data: Bytes(self.desc),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::NoteHeader32`] and [`elf::NoteHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait NoteHeader: Debug + Pod {
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn n_namesz(&self, endian: Self::Endian) -> u32;
|
||||
fn n_descsz(&self, endian: Self::Endian) -> u32;
|
||||
fn n_type(&self, endian: Self::Endian) -> u32;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> NoteHeader for elf::NoteHeader32<Endian> {
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn n_namesz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_namesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_descsz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_descsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_type.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> NoteHeader for elf::NoteHeader64<Endian> {
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn n_namesz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_namesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_descsz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_descsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_type.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the properties in a [`elf::NT_GNU_PROPERTY_TYPE_0`] note.
|
||||
///
|
||||
/// Returned by [`Note::gnu_properties`].
|
||||
#[derive(Debug)]
|
||||
pub struct GnuPropertyIterator<'data, Endian: endian::Endian> {
|
||||
endian: Endian,
|
||||
align: usize,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Endian: endian::Endian> GnuPropertyIterator<'data, Endian> {
|
||||
/// Returns the next property.
|
||||
pub fn next(&mut self) -> read::Result<Option<GnuProperty<'data>>> {
|
||||
let mut data = self.data;
|
||||
if data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
(|| -> Result<_, ()> {
|
||||
let pr_type = data.read_at::<U32<Endian>>(0)?.get(self.endian);
|
||||
let pr_datasz = data.read_at::<U32<Endian>>(4)?.get(self.endian) as usize;
|
||||
let pr_data = data.read_bytes_at(8, pr_datasz)?.0;
|
||||
data.skip(util::align(8 + pr_datasz, self.align))?;
|
||||
self.data = data;
|
||||
Ok(Some(GnuProperty { pr_type, pr_data }))
|
||||
})()
|
||||
.read_error("Invalid ELF GNU property")
|
||||
}
|
||||
}
|
||||
|
||||
/// A property in a [`elf::NT_GNU_PROPERTY_TYPE_0`] note.
|
||||
#[derive(Debug)]
|
||||
pub struct GnuProperty<'data> {
|
||||
pr_type: u32,
|
||||
pr_data: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data> GnuProperty<'data> {
|
||||
/// Return the property type.
|
||||
///
|
||||
/// This is one of the `GNU_PROPERTY_*` constants.
|
||||
pub fn pr_type(&self) -> u32 {
|
||||
self.pr_type
|
||||
}
|
||||
|
||||
/// Return the property data.
|
||||
pub fn pr_data(&self) -> &'data [u8] {
|
||||
self.pr_data
|
||||
}
|
||||
|
||||
/// Parse the property data as an unsigned 32-bit integer.
|
||||
pub fn data_u32<E: endian::Endian>(&self, endian: E) -> read::Result<u32> {
|
||||
Bytes(self.pr_data)
|
||||
.read_at::<U32<E>>(0)
|
||||
.read_error("Invalid ELF GNU property data")
|
||||
.map(|val| val.get(endian))
|
||||
}
|
||||
}
|
||||
628
vendor/object/src/read/elf/relocation.rs
vendored
Normal file
628
vendor/object/src/read/elf/relocation.rs
vendored
Normal file
@@ -0,0 +1,628 @@
|
||||
use alloc::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::slice;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{
|
||||
self, Error, ReadRef, Relocation, RelocationEncoding, RelocationKind, RelocationTarget,
|
||||
SectionIndex, SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{ElfFile, FileHeader, SectionHeader, SectionTable};
|
||||
|
||||
/// A mapping from section index to associated relocation sections.
|
||||
#[derive(Debug)]
|
||||
pub struct RelocationSections {
|
||||
relocations: Vec<usize>,
|
||||
}
|
||||
|
||||
impl RelocationSections {
|
||||
/// Create a new mapping using the section table.
|
||||
///
|
||||
/// Skips relocation sections that do not use the given symbol table section.
|
||||
pub fn parse<'data, Elf: FileHeader, R: ReadRef<'data>>(
|
||||
endian: Elf::Endian,
|
||||
sections: &SectionTable<'data, Elf, R>,
|
||||
symbol_section: SectionIndex,
|
||||
) -> read::Result<Self> {
|
||||
let mut relocations = vec![0; sections.len()];
|
||||
for (index, section) in sections.iter().enumerate().rev() {
|
||||
let sh_type = section.sh_type(endian);
|
||||
if sh_type == elf::SHT_REL || sh_type == elf::SHT_RELA {
|
||||
// The symbol indices used in relocations must be for the symbol table
|
||||
// we are expecting to use.
|
||||
let sh_link = SectionIndex(section.sh_link(endian) as usize);
|
||||
if sh_link != symbol_section {
|
||||
continue;
|
||||
}
|
||||
|
||||
let sh_info = section.sh_info(endian) as usize;
|
||||
if sh_info == 0 {
|
||||
// Skip dynamic relocations.
|
||||
continue;
|
||||
}
|
||||
if sh_info >= relocations.len() {
|
||||
return Err(Error("Invalid ELF sh_info for relocation section"));
|
||||
}
|
||||
|
||||
// Handle multiple relocation sections by chaining them.
|
||||
let next = relocations[sh_info];
|
||||
relocations[sh_info] = index;
|
||||
relocations[index] = next;
|
||||
}
|
||||
}
|
||||
Ok(Self { relocations })
|
||||
}
|
||||
|
||||
/// Given a section index, return the section index of the associated relocation section.
|
||||
///
|
||||
/// This may also be called with a relocation section index, and it will return the
|
||||
/// next associated relocation section.
|
||||
pub fn get(&self, index: usize) -> Option<usize> {
|
||||
self.relocations.get(index).cloned().filter(|x| *x != 0)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) enum ElfRelaIterator<'data, Elf: FileHeader> {
|
||||
Rel(slice::Iter<'data, Elf::Rel>),
|
||||
Rela(slice::Iter<'data, Elf::Rela>),
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> ElfRelaIterator<'data, Elf> {
|
||||
fn is_rel(&self) -> bool {
|
||||
match self {
|
||||
ElfRelaIterator::Rel(_) => true,
|
||||
ElfRelaIterator::Rela(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> Iterator for ElfRelaIterator<'data, Elf> {
|
||||
type Item = Elf::Rela;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
ElfRelaIterator::Rel(ref mut i) => i.next().cloned().map(Self::Item::from),
|
||||
ElfRelaIterator::Rela(ref mut i) => i.next().cloned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the dynamic relocations in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfDynamicRelocationIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfDynamicRelocationIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the dynamic relocations in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfDynamicRelocationIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfDynamicRelocationIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the dynamic relocations in an [`ElfFile`].
|
||||
pub struct ElfDynamicRelocationIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// The current relocation section index.
|
||||
pub(super) section_index: SectionIndex,
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) relocations: Option<ElfRelaIterator<'data, Elf>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfDynamicRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let endian = self.file.endian;
|
||||
loop {
|
||||
if let Some(ref mut relocations) = self.relocations {
|
||||
if let Some(reloc) = relocations.next() {
|
||||
let relocation =
|
||||
parse_relocation(self.file.header, endian, reloc, relocations.is_rel());
|
||||
return Some((reloc.r_offset(endian).into(), relocation));
|
||||
}
|
||||
self.relocations = None;
|
||||
}
|
||||
|
||||
let section = self.file.sections.section(self.section_index).ok()?;
|
||||
self.section_index.0 += 1;
|
||||
|
||||
let sh_link = SectionIndex(section.sh_link(endian) as usize);
|
||||
if sh_link != self.file.dynamic_symbols.section() {
|
||||
continue;
|
||||
}
|
||||
|
||||
match section.sh_type(endian) {
|
||||
elf::SHT_REL => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rel(relocations.iter()));
|
||||
}
|
||||
}
|
||||
elf::SHT_RELA => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rela(relocations.iter()));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> fmt::Debug for ElfDynamicRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ElfDynamicRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the relocations for an [`ElfSection32`](super::ElfSection32).
|
||||
pub type ElfSectionRelocationIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSectionRelocationIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the relocations for an [`ElfSection64`](super::ElfSection64).
|
||||
pub type ElfSectionRelocationIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSectionRelocationIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the relocations for an [`ElfSection`](super::ElfSection).
|
||||
pub struct ElfSectionRelocationIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// The current pointer in the chain of relocation sections.
|
||||
pub(super) section_index: SectionIndex,
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) relocations: Option<ElfRelaIterator<'data, Elf>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfSectionRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let endian = self.file.endian;
|
||||
loop {
|
||||
if let Some(ref mut relocations) = self.relocations {
|
||||
if let Some(reloc) = relocations.next() {
|
||||
let relocation =
|
||||
parse_relocation(self.file.header, endian, reloc, relocations.is_rel());
|
||||
return Some((reloc.r_offset(endian).into(), relocation));
|
||||
}
|
||||
self.relocations = None;
|
||||
}
|
||||
self.section_index = SectionIndex(self.file.relocations.get(self.section_index.0)?);
|
||||
// The construction of RelocationSections ensures section_index is valid.
|
||||
let section = self.file.sections.section(self.section_index).unwrap();
|
||||
match section.sh_type(endian) {
|
||||
elf::SHT_REL => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rel(relocations.iter()));
|
||||
}
|
||||
}
|
||||
elf::SHT_RELA => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rela(relocations.iter()));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> fmt::Debug for ElfSectionRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ElfSectionRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_relocation<Elf: FileHeader>(
|
||||
header: &Elf,
|
||||
endian: Elf::Endian,
|
||||
reloc: Elf::Rela,
|
||||
implicit_addend: bool,
|
||||
) -> Relocation {
|
||||
let mut encoding = RelocationEncoding::Generic;
|
||||
let is_mips64el = header.is_mips64el(endian);
|
||||
let (kind, size) = match header.e_machine(endian) {
|
||||
elf::EM_AARCH64 => {
|
||||
if header.is_type_64() {
|
||||
match reloc.r_type(endian, false) {
|
||||
elf::R_AARCH64_ABS64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_AARCH64_ABS32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_AARCH64_ABS16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_AARCH64_PREL64 => (RelocationKind::Relative, 64),
|
||||
elf::R_AARCH64_PREL32 => (RelocationKind::Relative, 32),
|
||||
elf::R_AARCH64_PREL16 => (RelocationKind::Relative, 16),
|
||||
elf::R_AARCH64_CALL26 => {
|
||||
encoding = RelocationEncoding::AArch64Call;
|
||||
(RelocationKind::PltRelative, 26)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
}
|
||||
} else {
|
||||
match reloc.r_type(endian, false) {
|
||||
elf::R_AARCH64_P32_ABS32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
}
|
||||
}
|
||||
}
|
||||
elf::EM_ARM => match reloc.r_type(endian, false) {
|
||||
elf::R_ARM_ABS32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_AVR => match reloc.r_type(endian, false) {
|
||||
elf::R_AVR_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_AVR_16 => (RelocationKind::Absolute, 16),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_BPF => match reloc.r_type(endian, false) {
|
||||
elf::R_BPF_64_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_BPF_64_32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_CSKY => match reloc.r_type(endian, false) {
|
||||
elf::R_CKCORE_ADDR32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_CKCORE_PCREL32 => (RelocationKind::Relative, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_386 => match reloc.r_type(endian, false) {
|
||||
elf::R_386_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_386_PC32 => (RelocationKind::Relative, 32),
|
||||
elf::R_386_GOT32 => (RelocationKind::Got, 32),
|
||||
elf::R_386_PLT32 => (RelocationKind::PltRelative, 32),
|
||||
elf::R_386_GOTOFF => (RelocationKind::GotBaseOffset, 32),
|
||||
elf::R_386_GOTPC => (RelocationKind::GotBaseRelative, 32),
|
||||
elf::R_386_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_386_PC16 => (RelocationKind::Relative, 16),
|
||||
elf::R_386_8 => (RelocationKind::Absolute, 8),
|
||||
elf::R_386_PC8 => (RelocationKind::Relative, 8),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_X86_64 => match reloc.r_type(endian, false) {
|
||||
elf::R_X86_64_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_X86_64_PC32 => (RelocationKind::Relative, 32),
|
||||
elf::R_X86_64_GOT32 => (RelocationKind::Got, 32),
|
||||
elf::R_X86_64_PLT32 => (RelocationKind::PltRelative, 32),
|
||||
elf::R_X86_64_GOTPCREL => (RelocationKind::GotRelative, 32),
|
||||
elf::R_X86_64_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_X86_64_32S => {
|
||||
encoding = RelocationEncoding::X86Signed;
|
||||
(RelocationKind::Absolute, 32)
|
||||
}
|
||||
elf::R_X86_64_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_X86_64_PC16 => (RelocationKind::Relative, 16),
|
||||
elf::R_X86_64_8 => (RelocationKind::Absolute, 8),
|
||||
elf::R_X86_64_PC8 => (RelocationKind::Relative, 8),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_HEXAGON => match reloc.r_type(endian, false) {
|
||||
elf::R_HEX_32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_LOONGARCH => match reloc.r_type(endian, false) {
|
||||
elf::R_LARCH_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_LARCH_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_LARCH_32_PCREL => (RelocationKind::Relative, 32),
|
||||
elf::R_LARCH_64_PCREL => (RelocationKind::Relative, 64),
|
||||
elf::R_LARCH_B16 => {
|
||||
encoding = RelocationEncoding::LoongArchBranch;
|
||||
(RelocationKind::Relative, 16)
|
||||
}
|
||||
elf::R_LARCH_B21 => {
|
||||
encoding = RelocationEncoding::LoongArchBranch;
|
||||
(RelocationKind::Relative, 21)
|
||||
}
|
||||
elf::R_LARCH_B26 => {
|
||||
encoding = RelocationEncoding::LoongArchBranch;
|
||||
(RelocationKind::Relative, 26)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_MIPS => match reloc.r_type(endian, is_mips64el) {
|
||||
elf::R_MIPS_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_MIPS_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_MIPS_64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_MSP430 => match reloc.r_type(endian, false) {
|
||||
elf::R_MSP430_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_MSP430_16_BYTE => (RelocationKind::Absolute, 16),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_PPC => match reloc.r_type(endian, false) {
|
||||
elf::R_PPC_ADDR32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_PPC64 => match reloc.r_type(endian, false) {
|
||||
elf::R_PPC64_ADDR32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_PPC64_ADDR64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_RISCV => match reloc.r_type(endian, false) {
|
||||
elf::R_RISCV_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_RISCV_64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_S390 => match reloc.r_type(endian, false) {
|
||||
elf::R_390_8 => (RelocationKind::Absolute, 8),
|
||||
elf::R_390_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_390_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_390_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_390_PC16 => (RelocationKind::Relative, 16),
|
||||
elf::R_390_PC32 => (RelocationKind::Relative, 32),
|
||||
elf::R_390_PC64 => (RelocationKind::Relative, 64),
|
||||
elf::R_390_PC16DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::Relative, 16)
|
||||
}
|
||||
elf::R_390_PC32DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::Relative, 32)
|
||||
}
|
||||
elf::R_390_PLT16DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::PltRelative, 16)
|
||||
}
|
||||
elf::R_390_PLT32DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::PltRelative, 32)
|
||||
}
|
||||
elf::R_390_GOT16 => (RelocationKind::Got, 16),
|
||||
elf::R_390_GOT32 => (RelocationKind::Got, 32),
|
||||
elf::R_390_GOT64 => (RelocationKind::Got, 64),
|
||||
elf::R_390_GOTENT => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::GotRelative, 32)
|
||||
}
|
||||
elf::R_390_GOTOFF16 => (RelocationKind::GotBaseOffset, 16),
|
||||
elf::R_390_GOTOFF32 => (RelocationKind::GotBaseOffset, 32),
|
||||
elf::R_390_GOTOFF64 => (RelocationKind::GotBaseOffset, 64),
|
||||
elf::R_390_GOTPC => (RelocationKind::GotBaseRelative, 64),
|
||||
elf::R_390_GOTPCDBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::GotBaseRelative, 32)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_SBF => match reloc.r_type(endian, false) {
|
||||
elf::R_SBF_64_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_SBF_64_32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_SHARC => match reloc.r_type(endian, false) {
|
||||
elf::R_SHARC_ADDR24_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 24)
|
||||
}
|
||||
elf::R_SHARC_ADDR32_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 32)
|
||||
}
|
||||
elf::R_SHARC_ADDR_VAR_V3 => {
|
||||
encoding = RelocationEncoding::Generic;
|
||||
(RelocationKind::Absolute, 32)
|
||||
}
|
||||
elf::R_SHARC_PCRSHORT_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Relative, 6)
|
||||
}
|
||||
elf::R_SHARC_PCRLONG_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Relative, 24)
|
||||
}
|
||||
elf::R_SHARC_DATA6_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 6)
|
||||
}
|
||||
elf::R_SHARC_DATA16_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 16)
|
||||
}
|
||||
elf::R_SHARC_DATA6_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Absolute, 6)
|
||||
}
|
||||
elf::R_SHARC_DATA7_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Absolute, 7)
|
||||
}
|
||||
elf::R_SHARC_DATA16_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Absolute, 16)
|
||||
}
|
||||
elf::R_SHARC_PCR6_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Relative, 16)
|
||||
}
|
||||
elf::R_SHARC_ADDR_VAR16_V3 => {
|
||||
encoding = RelocationEncoding::Generic;
|
||||
(RelocationKind::Absolute, 16)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_SPARC | elf::EM_SPARC32PLUS | elf::EM_SPARCV9 => {
|
||||
match reloc.r_type(endian, false) {
|
||||
elf::R_SPARC_32 | elf::R_SPARC_UA32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_SPARC_64 | elf::R_SPARC_UA64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
}
|
||||
}
|
||||
elf::EM_XTENSA => match reloc.r_type(endian, false) {
|
||||
elf::R_XTENSA_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_XTENSA_32_PCREL => (RelocationKind::Relative, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
_ => (RelocationKind::Elf(reloc.r_type(endian, false)), 0),
|
||||
};
|
||||
let sym = reloc.r_sym(endian, is_mips64el) as usize;
|
||||
let target = if sym == 0 {
|
||||
RelocationTarget::Absolute
|
||||
} else {
|
||||
RelocationTarget::Symbol(SymbolIndex(sym))
|
||||
};
|
||||
Relocation {
|
||||
kind,
|
||||
encoding,
|
||||
size,
|
||||
target,
|
||||
addend: reloc.r_addend(endian).into(),
|
||||
implicit_addend,
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::Rel32`] and [`elf::Rel64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Rel: Debug + Pod + Clone {
|
||||
type Word: Into<u64>;
|
||||
type Sword: Into<i64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn r_info(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn r_sym(&self, endian: Self::Endian) -> u32;
|
||||
fn r_type(&self, endian: Self::Endian) -> u32;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rel for elf::Rel32<Endian> {
|
||||
type Word = u32;
|
||||
type Sword = i32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_info.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_sym(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_type(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rel for elf::Rel64<Endian> {
|
||||
type Word = u64;
|
||||
type Sword = i64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_info.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_sym(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_type(endian)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::Rela32`] and [`elf::Rela64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Rela: Debug + Pod + Clone {
|
||||
type Word: Into<u64>;
|
||||
type Sword: Into<i64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn r_info(&self, endian: Self::Endian, is_mips64el: bool) -> Self::Word;
|
||||
fn r_addend(&self, endian: Self::Endian) -> Self::Sword;
|
||||
fn r_sym(&self, endian: Self::Endian, is_mips64el: bool) -> u32;
|
||||
fn r_type(&self, endian: Self::Endian, is_mips64el: bool) -> u32;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rela for elf::Rela32<Endian> {
|
||||
type Word = u32;
|
||||
type Sword = i32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian, _is_mips64el: bool) -> Self::Word {
|
||||
self.r_info.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_addend(&self, endian: Self::Endian) -> Self::Sword {
|
||||
self.r_addend.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian, _is_mips64el: bool) -> u32 {
|
||||
self.r_sym(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian, _is_mips64el: bool) -> u32 {
|
||||
self.r_type(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rela for elf::Rela64<Endian> {
|
||||
type Word = u64;
|
||||
type Sword = i64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian, is_mips64el: bool) -> Self::Word {
|
||||
self.get_r_info(endian, is_mips64el)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_addend(&self, endian: Self::Endian) -> Self::Sword {
|
||||
self.r_addend.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian, is_mips64el: bool) -> u32 {
|
||||
self.r_sym(endian, is_mips64el)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian, is_mips64el: bool) -> u32 {
|
||||
self.r_type(endian, is_mips64el)
|
||||
}
|
||||
}
|
||||
1150
vendor/object/src/read/elf/section.rs
vendored
Normal file
1150
vendor/object/src/read/elf/section.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
334
vendor/object/src/read/elf/segment.rs
vendored
Normal file
334
vendor/object/src/read/elf/segment.rs
vendored
Normal file
@@ -0,0 +1,334 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{mem, slice, str};
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{self, Bytes, ObjectSegment, ReadError, ReadRef, SegmentFlags};
|
||||
|
||||
use super::{ElfFile, FileHeader, NoteIterator};
|
||||
|
||||
/// An iterator for the segments in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSegmentIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegmentIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the segments in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSegmentIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegmentIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the segments in an [`ElfFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct ElfSegmentIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) iter: slice::Iter<'data, Elf::ProgramHeader>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfSegmentIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = ElfSegment<'data, 'file, Elf, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
for segment in self.iter.by_ref() {
|
||||
if segment.p_type(self.file.endian) == elf::PT_LOAD {
|
||||
return Some(ElfSegment {
|
||||
file: self.file,
|
||||
segment,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSegment32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegment<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A segment in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSegment64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegment<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A segment in an [`ElfFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct ElfSegment<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) segment: &'data Elf::ProgramHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ElfSegment<'data, 'file, Elf, R> {
|
||||
fn bytes(&self) -> read::Result<&'data [u8]> {
|
||||
self.segment
|
||||
.data(self.file.endian, self.file.data)
|
||||
.read_error("Invalid ELF segment size or offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> read::private::Sealed for ElfSegment<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> ObjectSegment<'data> for ElfSegment<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.segment.p_vaddr(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.segment.p_memsz(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.segment.p_align(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
self.segment.file_range(self.file.endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn data(&self) -> read::Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> read::Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> read::Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> read::Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let p_flags = self.segment.p_flags(self.file.endian);
|
||||
SegmentFlags::Elf { p_flags }
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::ProgramHeader32`] and [`elf::ProgramHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait ProgramHeader: Debug + Pod {
|
||||
type Elf: FileHeader<ProgramHeader = Self, Endian = Self::Endian, Word = Self::Word>;
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn p_type(&self, endian: Self::Endian) -> u32;
|
||||
fn p_flags(&self, endian: Self::Endian) -> u32;
|
||||
fn p_offset(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_vaddr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_paddr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_filesz(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_memsz(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_align(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
/// Return the offset and size of the segment in the file.
|
||||
fn file_range(&self, endian: Self::Endian) -> (u64, u64) {
|
||||
(self.p_offset(endian).into(), self.p_filesz(endian).into())
|
||||
}
|
||||
|
||||
/// Return the segment data.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> Result<&'data [u8], ()> {
|
||||
let (offset, size) = self.file_range(endian);
|
||||
data.read_bytes_at(offset, size)
|
||||
}
|
||||
|
||||
/// Return the segment data as a slice of the given type.
|
||||
///
|
||||
/// Allows padding at the end of the data.
|
||||
/// Returns `Ok(&[])` if the segment has no data.
|
||||
/// Returns `Err` for invalid values, including bad alignment.
|
||||
fn data_as_array<'data, T: Pod, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> Result<&'data [T], ()> {
|
||||
let mut data = self.data(endian, data).map(Bytes)?;
|
||||
data.read_slice(data.len() / mem::size_of::<T>())
|
||||
}
|
||||
|
||||
/// Return the segment data in the given virtual address range
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment does not contain the address.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data_range<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
address: u64,
|
||||
size: u64,
|
||||
) -> Result<Option<&'data [u8]>, ()> {
|
||||
Ok(read::util::data_range(
|
||||
self.data(endian, data)?,
|
||||
self.p_vaddr(endian).into(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
/// Return entries in a dynamic segment.
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment is not `PT_DYNAMIC`.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn dynamic<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<Option<&'data [<Self::Elf as FileHeader>::Dyn]>> {
|
||||
if self.p_type(endian) != elf::PT_DYNAMIC {
|
||||
return Ok(None);
|
||||
}
|
||||
let dynamic = self
|
||||
.data_as_array(endian, data)
|
||||
.read_error("Invalid ELF dynamic segment offset or size")?;
|
||||
Ok(Some(dynamic))
|
||||
}
|
||||
|
||||
/// Return a note iterator for the segment data.
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment does not contain notes.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn notes<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<Option<NoteIterator<'data, Self::Elf>>> {
|
||||
if self.p_type(endian) != elf::PT_NOTE {
|
||||
return Ok(None);
|
||||
}
|
||||
let data = self
|
||||
.data(endian, data)
|
||||
.read_error("Invalid ELF note segment offset or size")?;
|
||||
let notes = NoteIterator::new(endian, self.p_align(endian), data)?;
|
||||
Ok(Some(notes))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> ProgramHeader for elf::ProgramHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
type Elf = elf::FileHeader32<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn p_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_vaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_vaddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_paddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_paddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_filesz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_filesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_memsz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_memsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_align(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_align.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> ProgramHeader for elf::ProgramHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
type Elf = elf::FileHeader64<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn p_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_vaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_vaddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_paddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_paddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_filesz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_filesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_memsz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_memsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_align(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_align.get(endian)
|
||||
}
|
||||
}
|
||||
595
vendor/object/src/read/elf/symbol.rs
vendored
Normal file
595
vendor/object/src/read/elf/symbol.rs
vendored
Normal file
@@ -0,0 +1,595 @@
|
||||
use alloc::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::slice;
|
||||
use core::str;
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, SectionIndex, SymbolFlags,
|
||||
SymbolIndex, SymbolKind, SymbolMap, SymbolMapEntry, SymbolScope, SymbolSection,
|
||||
};
|
||||
use crate::{elf, U32};
|
||||
|
||||
use super::{FileHeader, SectionHeader, SectionTable};
|
||||
|
||||
/// A table of symbol entries in an ELF file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`SectionTable::symbols`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SymbolTable<'data, Elf: FileHeader, R = &'data [u8]>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
section: SectionIndex,
|
||||
string_section: SectionIndex,
|
||||
shndx_section: SectionIndex,
|
||||
symbols: &'data [Elf::Sym],
|
||||
strings: StringTable<'data, R>,
|
||||
shndx: &'data [U32<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader, R: ReadRef<'data>> Default for SymbolTable<'data, Elf, R> {
|
||||
fn default() -> Self {
|
||||
SymbolTable {
|
||||
section: SectionIndex(0),
|
||||
string_section: SectionIndex(0),
|
||||
shndx_section: SectionIndex(0),
|
||||
symbols: &[],
|
||||
strings: Default::default(),
|
||||
shndx: &[],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader, R: ReadRef<'data>> SymbolTable<'data, Elf, R> {
|
||||
/// Parse the given symbol table section.
|
||||
pub fn parse(
|
||||
endian: Elf::Endian,
|
||||
data: R,
|
||||
sections: &SectionTable<'data, Elf, R>,
|
||||
section_index: SectionIndex,
|
||||
section: &Elf::SectionHeader,
|
||||
) -> read::Result<SymbolTable<'data, Elf, R>> {
|
||||
debug_assert!(
|
||||
section.sh_type(endian) == elf::SHT_DYNSYM
|
||||
|| section.sh_type(endian) == elf::SHT_SYMTAB
|
||||
);
|
||||
|
||||
let symbols = section
|
||||
.data_as_array(endian, data)
|
||||
.read_error("Invalid ELF symbol table data")?;
|
||||
|
||||
let link = SectionIndex(section.sh_link(endian) as usize);
|
||||
let strings = sections.strings(endian, data, link)?;
|
||||
|
||||
let mut shndx_section = SectionIndex(0);
|
||||
let mut shndx = &[][..];
|
||||
for (i, s) in sections.iter().enumerate() {
|
||||
if s.sh_type(endian) == elf::SHT_SYMTAB_SHNDX
|
||||
&& s.sh_link(endian) as usize == section_index.0
|
||||
{
|
||||
shndx_section = SectionIndex(i);
|
||||
shndx = s
|
||||
.data_as_array(endian, data)
|
||||
.read_error("Invalid ELF symtab_shndx data")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(SymbolTable {
|
||||
section: section_index,
|
||||
string_section: link,
|
||||
symbols,
|
||||
strings,
|
||||
shndx,
|
||||
shndx_section,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the section index of this symbol table.
|
||||
#[inline]
|
||||
pub fn section(&self) -> SectionIndex {
|
||||
self.section
|
||||
}
|
||||
|
||||
/// Return the section index of the shndx table.
|
||||
#[inline]
|
||||
pub fn shndx_section(&self) -> SectionIndex {
|
||||
self.shndx_section
|
||||
}
|
||||
|
||||
/// Return the section index of the linked string table.
|
||||
#[inline]
|
||||
pub fn string_section(&self) -> SectionIndex {
|
||||
self.string_section
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Return the symbol table.
|
||||
#[inline]
|
||||
pub fn symbols(&self) -> &'data [Elf::Sym] {
|
||||
self.symbols
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, Elf::Sym> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbols.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
|
||||
/// Return the symbol at the given index.
|
||||
pub fn symbol(&self, index: usize) -> read::Result<&'data Elf::Sym> {
|
||||
self.symbols
|
||||
.get(index)
|
||||
.read_error("Invalid ELF symbol index")
|
||||
}
|
||||
|
||||
/// Return the extended section index for the given symbol if present.
|
||||
#[inline]
|
||||
pub fn shndx(&self, endian: Elf::Endian, index: usize) -> Option<u32> {
|
||||
self.shndx.get(index).map(|x| x.get(endian))
|
||||
}
|
||||
|
||||
/// Return the section index for the given symbol.
|
||||
///
|
||||
/// This uses the extended section index if present.
|
||||
pub fn symbol_section(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
symbol: &'data Elf::Sym,
|
||||
index: usize,
|
||||
) -> read::Result<Option<SectionIndex>> {
|
||||
match symbol.st_shndx(endian) {
|
||||
elf::SHN_UNDEF => Ok(None),
|
||||
elf::SHN_XINDEX => self
|
||||
.shndx(endian, index)
|
||||
.read_error("Missing ELF symbol extended index")
|
||||
.map(|index| Some(SectionIndex(index as usize))),
|
||||
shndx if shndx < elf::SHN_LORESERVE => Ok(Some(SectionIndex(shndx.into()))),
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol name for the given symbol.
|
||||
pub fn symbol_name(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
symbol: &'data Elf::Sym,
|
||||
) -> read::Result<&'data [u8]> {
|
||||
symbol.name(endian, self.strings)
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to a user-defined map entry.
|
||||
pub fn map<Entry: SymbolMapEntry, F: Fn(&'data Elf::Sym) -> Option<Entry>>(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
f: F,
|
||||
) -> SymbolMap<Entry> {
|
||||
let mut symbols = Vec::with_capacity(self.symbols.len());
|
||||
for symbol in self.symbols {
|
||||
if !symbol.is_definition(endian) {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry) = f(symbol) {
|
||||
symbols.push(entry);
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbolTable32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolTable<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A symbol table in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbolTable64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolTable<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol table in an [`ElfFile`](super::ElfFile).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ElfSymbolTable<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Elf, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for ElfSymbolTable<'data, 'file, Elf, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ObjectSymbolTable<'data>
|
||||
for ElfSymbolTable<'data, 'file, Elf, R>
|
||||
{
|
||||
type Symbol = ElfSymbol<'data, 'file, Elf, R>;
|
||||
type SymbolIterator = ElfSymbolIterator<'data, 'file, Elf, R>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
ElfSymbolIterator {
|
||||
endian: self.endian,
|
||||
symbols: self.symbols,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> read::Result<Self::Symbol> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(ElfSymbol {
|
||||
endian: self.endian,
|
||||
symbols: self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbolIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the symbols in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSymbolIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the symbols in an [`ElfFile`](super::ElfFile).
|
||||
pub struct ElfSymbolIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Elf, R>,
|
||||
pub(super) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> fmt::Debug
|
||||
for ElfSymbolIterator<'data, 'file, Elf, R>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ElfSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> Iterator
|
||||
for ElfSymbolIterator<'data, 'file, Elf, R>
|
||||
{
|
||||
type Item = ElfSymbol<'data, 'file, Elf, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.symbols.symbols.get(index)?;
|
||||
self.index += 1;
|
||||
Some(ElfSymbol {
|
||||
endian: self.endian,
|
||||
symbols: self.symbols,
|
||||
index: SymbolIndex(index),
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbol32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbol<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A symbol in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSymbol64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbol<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol in an [`ElfFile`](super::ElfFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ElfSymbol<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Elf, R>,
|
||||
pub(super) index: SymbolIndex,
|
||||
pub(super) symbol: &'data Elf::Sym,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ElfSymbol<'data, 'file, Elf, R> {
|
||||
/// Return a reference to the raw symbol structure.
|
||||
#[inline]
|
||||
pub fn raw_symbol(&self) -> &'data Elf::Sym {
|
||||
self.symbol
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for ElfSymbol<'data, 'file, Elf, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ObjectSymbol<'data>
|
||||
for ElfSymbol<'data, 'file, Elf, R>
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&'data [u8]> {
|
||||
self.symbol.name(self.endian, self.symbols.strings())
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 ELF symbol name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.symbol.st_value(self.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.symbol.st_size(self.endian).into()
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
match self.symbol.st_type() {
|
||||
elf::STT_NOTYPE if self.index.0 == 0 => SymbolKind::Null,
|
||||
elf::STT_NOTYPE => SymbolKind::Unknown,
|
||||
elf::STT_OBJECT | elf::STT_COMMON => SymbolKind::Data,
|
||||
elf::STT_FUNC | elf::STT_GNU_IFUNC => SymbolKind::Text,
|
||||
elf::STT_SECTION => SymbolKind::Section,
|
||||
elf::STT_FILE => SymbolKind::File,
|
||||
elf::STT_TLS => SymbolKind::Tls,
|
||||
_ => SymbolKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.symbol.st_shndx(self.endian) {
|
||||
elf::SHN_UNDEF => SymbolSection::Undefined,
|
||||
elf::SHN_ABS => {
|
||||
if self.symbol.st_type() == elf::STT_FILE {
|
||||
SymbolSection::None
|
||||
} else {
|
||||
SymbolSection::Absolute
|
||||
}
|
||||
}
|
||||
elf::SHN_COMMON => SymbolSection::Common,
|
||||
elf::SHN_XINDEX => match self.symbols.shndx(self.endian, self.index.0) {
|
||||
Some(index) => SymbolSection::Section(SectionIndex(index as usize)),
|
||||
None => SymbolSection::Unknown,
|
||||
},
|
||||
index if index < elf::SHN_LORESERVE => {
|
||||
SymbolSection::Section(SectionIndex(index as usize))
|
||||
}
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.st_shndx(self.endian) == elf::SHN_UNDEF
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
self.symbol.is_definition(self.endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.st_shndx(self.endian) == elf::SHN_COMMON
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.symbol.st_bind() == elf::STB_WEAK
|
||||
}
|
||||
|
||||
fn scope(&self) -> SymbolScope {
|
||||
if self.symbol.st_shndx(self.endian) == elf::SHN_UNDEF {
|
||||
SymbolScope::Unknown
|
||||
} else {
|
||||
match self.symbol.st_bind() {
|
||||
elf::STB_LOCAL => SymbolScope::Compilation,
|
||||
elf::STB_GLOBAL | elf::STB_WEAK => {
|
||||
if self.symbol.st_visibility() == elf::STV_HIDDEN {
|
||||
SymbolScope::Linkage
|
||||
} else {
|
||||
SymbolScope::Dynamic
|
||||
}
|
||||
}
|
||||
_ => SymbolScope::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
self.symbol.st_bind() != elf::STB_LOCAL
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
self.symbol.st_bind() == elf::STB_LOCAL
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
SymbolFlags::Elf {
|
||||
st_info: self.symbol.st_info(),
|
||||
st_other: self.symbol.st_other(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::Sym32`] and [`elf::Sym64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Sym: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn st_name(&self, endian: Self::Endian) -> u32;
|
||||
fn st_info(&self) -> u8;
|
||||
fn st_bind(&self) -> u8;
|
||||
fn st_type(&self) -> u8;
|
||||
fn st_other(&self) -> u8;
|
||||
fn st_visibility(&self) -> u8;
|
||||
fn st_shndx(&self, endian: Self::Endian) -> u16;
|
||||
fn st_value(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn st_size(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
/// Parse the symbol name from the string table.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> read::Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.st_name(endian))
|
||||
.read_error("Invalid ELF symbol name offset")
|
||||
}
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
#[inline]
|
||||
fn is_undefined(&self, endian: Self::Endian) -> bool {
|
||||
self.st_shndx(endian) == elf::SHN_UNDEF
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
fn is_definition(&self, endian: Self::Endian) -> bool {
|
||||
let shndx = self.st_shndx(endian);
|
||||
if shndx == elf::SHN_UNDEF || (shndx >= elf::SHN_LORESERVE && shndx != elf::SHN_XINDEX) {
|
||||
return false;
|
||||
}
|
||||
match self.st_type() {
|
||||
elf::STT_NOTYPE => self.st_size(endian).into() != 0,
|
||||
elf::STT_FUNC | elf::STT_OBJECT => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Sym for elf::Sym32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn st_name(&self, endian: Self::Endian) -> u32 {
|
||||
self.st_name.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_info(&self) -> u8 {
|
||||
self.st_info
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_bind(&self) -> u8 {
|
||||
self.st_bind()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_type(&self) -> u8 {
|
||||
self.st_type()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_other(&self) -> u8 {
|
||||
self.st_other
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_visibility(&self) -> u8 {
|
||||
self.st_visibility()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_shndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.st_shndx.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_value.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_size.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Sym for elf::Sym64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn st_name(&self, endian: Self::Endian) -> u32 {
|
||||
self.st_name.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_info(&self) -> u8 {
|
||||
self.st_info
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_bind(&self) -> u8 {
|
||||
self.st_bind()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_type(&self) -> u8 {
|
||||
self.st_type()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_other(&self) -> u8 {
|
||||
self.st_other
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_visibility(&self) -> u8 {
|
||||
self.st_visibility()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_shndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.st_shndx.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_value.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_size.get(endian)
|
||||
}
|
||||
}
|
||||
424
vendor/object/src/read/elf/version.rs
vendored
Normal file
424
vendor/object/src/read/elf/version.rs
vendored
Normal file
@@ -0,0 +1,424 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::read::{Bytes, ReadError, ReadRef, Result, StringTable};
|
||||
use crate::{elf, endian};
|
||||
|
||||
use super::FileHeader;
|
||||
|
||||
/// A version index.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct VersionIndex(pub u16);
|
||||
|
||||
impl VersionIndex {
|
||||
/// Return the version index.
|
||||
pub fn index(&self) -> u16 {
|
||||
self.0 & elf::VERSYM_VERSION
|
||||
}
|
||||
|
||||
/// Return true if it is the local index.
|
||||
pub fn is_local(&self) -> bool {
|
||||
self.index() == elf::VER_NDX_LOCAL
|
||||
}
|
||||
|
||||
/// Return true if it is the global index.
|
||||
pub fn is_global(&self) -> bool {
|
||||
self.index() == elf::VER_NDX_GLOBAL
|
||||
}
|
||||
|
||||
/// Return the hidden flag.
|
||||
pub fn is_hidden(&self) -> bool {
|
||||
self.0 & elf::VERSYM_HIDDEN != 0
|
||||
}
|
||||
}
|
||||
|
||||
/// A version definition or requirement.
|
||||
///
|
||||
/// This is derived from entries in the [`elf::SHT_GNU_VERDEF`] and [`elf::SHT_GNU_VERNEED`] sections.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct Version<'data> {
|
||||
name: &'data [u8],
|
||||
hash: u32,
|
||||
// Used to keep track of valid indices in `VersionTable`.
|
||||
valid: bool,
|
||||
}
|
||||
|
||||
impl<'data> Version<'data> {
|
||||
/// Return the version name.
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Return hash of the version name.
|
||||
pub fn hash(&self) -> u32 {
|
||||
self.hash
|
||||
}
|
||||
}
|
||||
|
||||
/// A table of version definitions and requirements.
|
||||
///
|
||||
/// It allows looking up the version information for a given symbol index.
|
||||
///
|
||||
/// This is derived from entries in the [`elf::SHT_GNU_VERSYM`], [`elf::SHT_GNU_VERDEF`]
|
||||
/// and [`elf::SHT_GNU_VERNEED`] sections.
|
||||
///
|
||||
/// Returned by [`SectionTable::versions`](super::SectionTable::versions).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VersionTable<'data, Elf: FileHeader> {
|
||||
symbols: &'data [elf::Versym<Elf::Endian>],
|
||||
versions: Vec<Version<'data>>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> Default for VersionTable<'data, Elf> {
|
||||
fn default() -> Self {
|
||||
VersionTable {
|
||||
symbols: &[],
|
||||
versions: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VersionTable<'data, Elf> {
|
||||
/// Parse the version sections.
|
||||
pub fn parse<R: ReadRef<'data>>(
|
||||
endian: Elf::Endian,
|
||||
versyms: &'data [elf::Versym<Elf::Endian>],
|
||||
verdefs: Option<VerdefIterator<'data, Elf>>,
|
||||
verneeds: Option<VerneedIterator<'data, Elf>>,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<Self> {
|
||||
let mut max_index = 0;
|
||||
if let Some(mut verdefs) = verdefs.clone() {
|
||||
while let Some((verdef, _)) = verdefs.next()? {
|
||||
if verdef.vd_flags.get(endian) & elf::VER_FLG_BASE != 0 {
|
||||
continue;
|
||||
}
|
||||
let index = verdef.vd_ndx.get(endian) & elf::VERSYM_VERSION;
|
||||
if max_index < index {
|
||||
max_index = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(mut verneeds) = verneeds.clone() {
|
||||
while let Some((_, mut vernauxs)) = verneeds.next()? {
|
||||
while let Some(vernaux) = vernauxs.next()? {
|
||||
let index = vernaux.vna_other.get(endian) & elf::VERSYM_VERSION;
|
||||
if max_index < index {
|
||||
max_index = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Indices should be sequential, but this could be up to
|
||||
// 32k * size_of::<Version>() if max_index is bad.
|
||||
let mut versions = vec![Version::default(); max_index as usize + 1];
|
||||
|
||||
if let Some(mut verdefs) = verdefs {
|
||||
while let Some((verdef, mut verdauxs)) = verdefs.next()? {
|
||||
if verdef.vd_flags.get(endian) & elf::VER_FLG_BASE != 0 {
|
||||
continue;
|
||||
}
|
||||
let index = verdef.vd_ndx.get(endian) & elf::VERSYM_VERSION;
|
||||
if index <= elf::VER_NDX_GLOBAL {
|
||||
// TODO: return error?
|
||||
continue;
|
||||
}
|
||||
if let Some(verdaux) = verdauxs.next()? {
|
||||
versions[usize::from(index)] = Version {
|
||||
name: verdaux.name(endian, strings)?,
|
||||
hash: verdef.vd_hash.get(endian),
|
||||
valid: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(mut verneeds) = verneeds {
|
||||
while let Some((_, mut vernauxs)) = verneeds.next()? {
|
||||
while let Some(vernaux) = vernauxs.next()? {
|
||||
let index = vernaux.vna_other.get(endian) & elf::VERSYM_VERSION;
|
||||
if index <= elf::VER_NDX_GLOBAL {
|
||||
// TODO: return error?
|
||||
continue;
|
||||
}
|
||||
versions[usize::from(index)] = Version {
|
||||
name: vernaux.name(endian, strings)?,
|
||||
hash: vernaux.vna_hash.get(endian),
|
||||
valid: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(VersionTable {
|
||||
symbols: versyms,
|
||||
versions,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return true if the version table is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// Return version index for a given symbol index.
|
||||
pub fn version_index(&self, endian: Elf::Endian, index: usize) -> VersionIndex {
|
||||
let version_index = match self.symbols.get(index) {
|
||||
Some(x) => x.0.get(endian),
|
||||
// Ideally this would be VER_NDX_LOCAL for undefined symbols,
|
||||
// but currently there are no checks that need this distinction.
|
||||
None => elf::VER_NDX_GLOBAL,
|
||||
};
|
||||
VersionIndex(version_index)
|
||||
}
|
||||
|
||||
/// Return version information for a given symbol version index.
|
||||
///
|
||||
/// Returns `Ok(None)` for local and global versions.
|
||||
/// Returns `Err(_)` if index is invalid.
|
||||
pub fn version(&self, index: VersionIndex) -> Result<Option<&Version<'data>>> {
|
||||
if index.index() <= elf::VER_NDX_GLOBAL {
|
||||
return Ok(None);
|
||||
}
|
||||
self.versions
|
||||
.get(usize::from(index.index()))
|
||||
.filter(|version| version.valid)
|
||||
.read_error("Invalid ELF symbol version index")
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
/// Return true if the given symbol index satisfies the requirements of `need`.
|
||||
///
|
||||
/// Returns false for any error.
|
||||
///
|
||||
/// Note: this function hasn't been fully tested and is likely to be incomplete.
|
||||
pub fn matches(&self, endian: Elf::Endian, index: usize, need: Option<&Version<'_>>) -> bool {
|
||||
let version_index = self.version_index(endian, index);
|
||||
let def = match self.version(version_index) {
|
||||
Ok(def) => def,
|
||||
Err(_) => return false,
|
||||
};
|
||||
match (def, need) {
|
||||
(Some(def), Some(need)) => need.hash == def.hash && need.name == def.name,
|
||||
(None, Some(_need)) => {
|
||||
// Version must be present if needed.
|
||||
false
|
||||
}
|
||||
(Some(_def), None) => {
|
||||
// For a dlsym call, use the newest version.
|
||||
// TODO: if not a dlsym call, then use the oldest version.
|
||||
!version_index.is_hidden()
|
||||
}
|
||||
(None, None) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the entries in an ELF [`elf::SHT_GNU_VERDEF`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VerdefIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VerdefIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8]) -> Self {
|
||||
VerdefIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Verdef` entry.
|
||||
pub fn next(
|
||||
&mut self,
|
||||
) -> Result<Option<(&'data elf::Verdef<Elf::Endian>, VerdauxIterator<'data, Elf>)>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let verdef = self
|
||||
.data
|
||||
.read_at::<elf::Verdef<_>>(0)
|
||||
.read_error("ELF verdef is too short")?;
|
||||
|
||||
let mut verdaux_data = self.data;
|
||||
verdaux_data
|
||||
.skip(verdef.vd_aux.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vd_aux")?;
|
||||
let verdaux =
|
||||
VerdauxIterator::new(self.endian, verdaux_data.0, verdef.vd_cnt.get(self.endian));
|
||||
|
||||
let next = verdef.vd_next.get(self.endian);
|
||||
if next != 0 {
|
||||
self.data
|
||||
.skip(next as usize)
|
||||
.read_error("Invalid ELF vd_next")?;
|
||||
} else {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
Ok(Some((verdef, verdaux)))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the auxiliary records for an entry in an ELF [`elf::SHT_GNU_VERDEF`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VerdauxIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
count: u16,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VerdauxIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8], count: u16) -> Self {
|
||||
VerdauxIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
count,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Verdaux` entry.
|
||||
pub fn next(&mut self) -> Result<Option<&'data elf::Verdaux<Elf::Endian>>> {
|
||||
if self.count == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let verdaux = self
|
||||
.data
|
||||
.read_at::<elf::Verdaux<_>>(0)
|
||||
.read_error("ELF verdaux is too short")?;
|
||||
|
||||
self.data
|
||||
.skip(verdaux.vda_next.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vda_next")?;
|
||||
self.count -= 1;
|
||||
Ok(Some(verdaux))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the entries in an ELF [`elf::SHT_GNU_VERNEED`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VerneedIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VerneedIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8]) -> Self {
|
||||
VerneedIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Verneed` entry.
|
||||
pub fn next(
|
||||
&mut self,
|
||||
) -> Result<
|
||||
Option<(
|
||||
&'data elf::Verneed<Elf::Endian>,
|
||||
VernauxIterator<'data, Elf>,
|
||||
)>,
|
||||
> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let verneed = self
|
||||
.data
|
||||
.read_at::<elf::Verneed<_>>(0)
|
||||
.read_error("ELF verneed is too short")?;
|
||||
|
||||
let mut vernaux_data = self.data;
|
||||
vernaux_data
|
||||
.skip(verneed.vn_aux.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vn_aux")?;
|
||||
let vernaux =
|
||||
VernauxIterator::new(self.endian, vernaux_data.0, verneed.vn_cnt.get(self.endian));
|
||||
|
||||
let next = verneed.vn_next.get(self.endian);
|
||||
if next != 0 {
|
||||
self.data
|
||||
.skip(next as usize)
|
||||
.read_error("Invalid ELF vn_next")?;
|
||||
} else {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
Ok(Some((verneed, vernaux)))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the auxiliary records for an entry in an ELF [`elf::SHT_GNU_VERNEED`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VernauxIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
count: u16,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VernauxIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8], count: u16) -> Self {
|
||||
VernauxIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
count,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Vernaux` entry.
|
||||
pub fn next(&mut self) -> Result<Option<&'data elf::Vernaux<Elf::Endian>>> {
|
||||
if self.count == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let vernaux = self
|
||||
.data
|
||||
.read_at::<elf::Vernaux<_>>(0)
|
||||
.read_error("ELF vernaux is too short")?;
|
||||
|
||||
self.data
|
||||
.skip(vernaux.vna_next.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vna_next")?;
|
||||
self.count -= 1;
|
||||
Ok(Some(vernaux))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> elf::Verdaux<Endian> {
|
||||
/// Parse the version name from the string table.
|
||||
pub fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.vda_name.get(endian))
|
||||
.read_error("Invalid ELF vda_name")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> elf::Verneed<Endian> {
|
||||
/// Parse the file from the string table.
|
||||
pub fn file<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.vn_file.get(endian))
|
||||
.read_error("Invalid ELF vn_file")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> elf::Vernaux<Endian> {
|
||||
/// Parse the version name from the string table.
|
||||
pub fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.vna_name.get(endian))
|
||||
.read_error("Invalid ELF vna_name")
|
||||
}
|
||||
}
|
||||
344
vendor/object/src/read/macho/dyld_cache.rs
vendored
Normal file
344
vendor/object/src/read/macho/dyld_cache.rs
vendored
Normal file
@@ -0,0 +1,344 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::slice;
|
||||
|
||||
use crate::read::{Error, File, ReadError, ReadRef, Result};
|
||||
use crate::{macho, Architecture, Endian, Endianness};
|
||||
|
||||
/// A parsed representation of the dyld shared cache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldCache<'data, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
endian: E,
|
||||
data: R,
|
||||
subcaches: Vec<DyldSubCache<'data, E, R>>,
|
||||
mappings: &'data [macho::DyldCacheMappingInfo<E>],
|
||||
images: &'data [macho::DyldCacheImageInfo<E>],
|
||||
arch: Architecture,
|
||||
}
|
||||
|
||||
/// Information about a subcache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldSubCache<'data, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
data: R,
|
||||
mappings: &'data [macho::DyldCacheMappingInfo<E>],
|
||||
}
|
||||
|
||||
// This is the offset of the images_across_all_subcaches_count field.
|
||||
const MIN_HEADER_SIZE_SUBCACHES: u32 = 0x1c4;
|
||||
|
||||
impl<'data, E, R> DyldCache<'data, E, R>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw dyld shared cache data.
|
||||
///
|
||||
/// For shared caches from macOS 12 / iOS 15 and above, the subcache files need to be
|
||||
/// supplied as well, in the correct order, with the `.symbols` subcache last (if present).
|
||||
/// For example, `data` would be the data for `dyld_shared_cache_x86_64`,
|
||||
/// and `subcache_data` would be the data for `[dyld_shared_cache_x86_64.1, dyld_shared_cache_x86_64.2, ...]`.
|
||||
pub fn parse(data: R, subcache_data: &[R]) -> Result<Self> {
|
||||
let header = macho::DyldCacheHeader::parse(data)?;
|
||||
let (arch, endian) = header.parse_magic()?;
|
||||
let mappings = header.mappings(endian, data)?;
|
||||
|
||||
let symbols_subcache_uuid = header.symbols_subcache_uuid(endian);
|
||||
let subcaches_info = header.subcaches(endian, data)?.unwrap_or(&[]);
|
||||
|
||||
if subcache_data.len() != subcaches_info.len() + symbols_subcache_uuid.is_some() as usize {
|
||||
return Err(Error("Incorrect number of SubCaches"));
|
||||
}
|
||||
|
||||
// Split out the .symbols subcache data from the other subcaches.
|
||||
let (symbols_subcache_data_and_uuid, subcache_data) =
|
||||
if let Some(symbols_uuid) = symbols_subcache_uuid {
|
||||
let (sym_data, rest_data) = subcache_data.split_last().unwrap();
|
||||
(Some((*sym_data, symbols_uuid)), rest_data)
|
||||
} else {
|
||||
(None, subcache_data)
|
||||
};
|
||||
|
||||
// Read the regular SubCaches (.1, .2, ...), if present.
|
||||
let mut subcaches = Vec::new();
|
||||
for (&data, info) in subcache_data.iter().zip(subcaches_info.iter()) {
|
||||
let sc_header = macho::DyldCacheHeader::<E>::parse(data)?;
|
||||
if sc_header.uuid != info.uuid {
|
||||
return Err(Error("Unexpected SubCache UUID"));
|
||||
}
|
||||
let mappings = sc_header.mappings(endian, data)?;
|
||||
subcaches.push(DyldSubCache { data, mappings });
|
||||
}
|
||||
|
||||
// Read the .symbols SubCache, if present.
|
||||
// Other than the UUID verification, the symbols SubCache is currently unused.
|
||||
let _symbols_subcache = match symbols_subcache_data_and_uuid {
|
||||
Some((data, uuid)) => {
|
||||
let sc_header = macho::DyldCacheHeader::<E>::parse(data)?;
|
||||
if sc_header.uuid != uuid {
|
||||
return Err(Error("Unexpected .symbols SubCache UUID"));
|
||||
}
|
||||
let mappings = sc_header.mappings(endian, data)?;
|
||||
Some(DyldSubCache { data, mappings })
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let images = header.images(endian, data)?;
|
||||
Ok(DyldCache {
|
||||
endian,
|
||||
data,
|
||||
subcaches,
|
||||
mappings,
|
||||
images,
|
||||
arch,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the architecture type of the file.
|
||||
pub fn architecture(&self) -> Architecture {
|
||||
self.arch
|
||||
}
|
||||
|
||||
/// Get the endianness of the file.
|
||||
#[inline]
|
||||
pub fn endianness(&self) -> Endianness {
|
||||
if self.is_little_endian() {
|
||||
Endianness::Little
|
||||
} else {
|
||||
Endianness::Big
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the file is little endian, false if it is big endian.
|
||||
pub fn is_little_endian(&self) -> bool {
|
||||
self.endian.is_little_endian()
|
||||
}
|
||||
|
||||
/// Iterate over the images in this cache.
|
||||
pub fn images<'cache>(&'cache self) -> DyldCacheImageIterator<'data, 'cache, E, R> {
|
||||
DyldCacheImageIterator {
|
||||
cache: self,
|
||||
iter: self.images.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the address in a mapping and return the cache or subcache data it was found in,
|
||||
/// together with the translated file offset.
|
||||
pub fn data_and_offset_for_address(&self, address: u64) -> Option<(R, u64)> {
|
||||
if let Some(file_offset) = address_to_file_offset(address, self.endian, self.mappings) {
|
||||
return Some((self.data, file_offset));
|
||||
}
|
||||
for subcache in &self.subcaches {
|
||||
if let Some(file_offset) =
|
||||
address_to_file_offset(address, self.endian, subcache.mappings)
|
||||
{
|
||||
return Some((subcache.data, file_offset));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over all the images (dylibs) in the dyld shared cache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldCacheImageIterator<'data, 'cache, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
cache: &'cache DyldCache<'data, E, R>,
|
||||
iter: slice::Iter<'data, macho::DyldCacheImageInfo<E>>,
|
||||
}
|
||||
|
||||
impl<'data, 'cache, E, R> Iterator for DyldCacheImageIterator<'data, 'cache, E, R>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = DyldCacheImage<'data, 'cache, E, R>;
|
||||
|
||||
fn next(&mut self) -> Option<DyldCacheImage<'data, 'cache, E, R>> {
|
||||
let image_info = self.iter.next()?;
|
||||
Some(DyldCacheImage {
|
||||
cache: self.cache,
|
||||
image_info,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// One image (dylib) from inside the dyld shared cache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldCacheImage<'data, 'cache, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(crate) cache: &'cache DyldCache<'data, E, R>,
|
||||
image_info: &'data macho::DyldCacheImageInfo<E>,
|
||||
}
|
||||
|
||||
impl<'data, 'cache, E, R> DyldCacheImage<'data, 'cache, E, R>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// The file system path of this image.
|
||||
pub fn path(&self) -> Result<&'data str> {
|
||||
let path = self.image_info.path(self.cache.endian, self.cache.data)?;
|
||||
// The path should always be ascii, so from_utf8 should always succeed.
|
||||
let path = core::str::from_utf8(path).map_err(|_| Error("Path string not valid utf-8"))?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// The subcache data which contains the Mach-O header for this image,
|
||||
/// together with the file offset at which this image starts.
|
||||
pub fn image_data_and_offset(&self) -> Result<(R, u64)> {
|
||||
let address = self.image_info.address.get(self.cache.endian);
|
||||
self.cache
|
||||
.data_and_offset_for_address(address)
|
||||
.ok_or(Error("Address not found in any mapping"))
|
||||
}
|
||||
|
||||
/// Parse this image into an Object.
|
||||
pub fn parse_object(&self) -> Result<File<'data, R>> {
|
||||
File::parse_dyld_cache_image(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> macho::DyldCacheHeader<E> {
|
||||
/// Read the dyld cache header.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(data: R) -> Result<&'data Self> {
|
||||
data.read_at::<macho::DyldCacheHeader<E>>(0)
|
||||
.read_error("Invalid dyld cache header size or alignment")
|
||||
}
|
||||
|
||||
/// Returns (arch, endian) based on the magic string.
|
||||
pub fn parse_magic(&self) -> Result<(Architecture, E)> {
|
||||
let (arch, is_big_endian) = match &self.magic {
|
||||
b"dyld_v1 i386\0" => (Architecture::I386, false),
|
||||
b"dyld_v1 x86_64\0" => (Architecture::X86_64, false),
|
||||
b"dyld_v1 x86_64h\0" => (Architecture::X86_64, false),
|
||||
b"dyld_v1 ppc\0" => (Architecture::PowerPc, true),
|
||||
b"dyld_v1 armv6\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7f\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7s\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7k\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 arm64\0" => (Architecture::Aarch64, false),
|
||||
b"dyld_v1 arm64e\0" => (Architecture::Aarch64, false),
|
||||
_ => return Err(Error("Unrecognized dyld cache magic")),
|
||||
};
|
||||
let endian =
|
||||
E::from_big_endian(is_big_endian).read_error("Unsupported dyld cache endian")?;
|
||||
Ok((arch, endian))
|
||||
}
|
||||
|
||||
/// Return the mapping information table.
|
||||
pub fn mappings<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<&'data [macho::DyldCacheMappingInfo<E>]> {
|
||||
data.read_slice_at::<macho::DyldCacheMappingInfo<E>>(
|
||||
self.mapping_offset.get(endian).into(),
|
||||
self.mapping_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld cache mapping size or alignment")
|
||||
}
|
||||
|
||||
/// Return the information about subcaches, if present.
|
||||
pub fn subcaches<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<Option<&'data [macho::DyldSubCacheInfo<E>]>> {
|
||||
if self.mapping_offset.get(endian) >= MIN_HEADER_SIZE_SUBCACHES {
|
||||
let subcaches = data
|
||||
.read_slice_at::<macho::DyldSubCacheInfo<E>>(
|
||||
self.subcaches_offset.get(endian).into(),
|
||||
self.subcaches_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld subcaches size or alignment")?;
|
||||
Ok(Some(subcaches))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the UUID for the .symbols subcache, if present.
|
||||
pub fn symbols_subcache_uuid(&self, endian: E) -> Option<[u8; 16]> {
|
||||
if self.mapping_offset.get(endian) >= MIN_HEADER_SIZE_SUBCACHES {
|
||||
let uuid = self.symbols_subcache_uuid;
|
||||
if uuid != [0; 16] {
|
||||
return Some(uuid);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the image information table.
|
||||
pub fn images<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<&'data [macho::DyldCacheImageInfo<E>]> {
|
||||
if self.mapping_offset.get(endian) >= MIN_HEADER_SIZE_SUBCACHES {
|
||||
data.read_slice_at::<macho::DyldCacheImageInfo<E>>(
|
||||
self.images_across_all_subcaches_offset.get(endian).into(),
|
||||
self.images_across_all_subcaches_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld cache image size or alignment")
|
||||
} else {
|
||||
data.read_slice_at::<macho::DyldCacheImageInfo<E>>(
|
||||
self.images_offset.get(endian).into(),
|
||||
self.images_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld cache image size or alignment")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> macho::DyldCacheImageInfo<E> {
|
||||
/// The file system path of this image.
|
||||
pub fn path<'data, R: ReadRef<'data>>(&self, endian: E, data: R) -> Result<&'data [u8]> {
|
||||
let r_start = self.path_file_offset.get(endian).into();
|
||||
let r_end = data.len().read_error("Couldn't get data len()")?;
|
||||
data.read_bytes_at_until(r_start..r_end, 0)
|
||||
.read_error("Couldn't read dyld cache image path")
|
||||
}
|
||||
|
||||
/// Find the file offset of the image by looking up its address in the mappings.
|
||||
pub fn file_offset(
|
||||
&self,
|
||||
endian: E,
|
||||
mappings: &[macho::DyldCacheMappingInfo<E>],
|
||||
) -> Result<u64> {
|
||||
let address = self.address.get(endian);
|
||||
address_to_file_offset(address, endian, mappings)
|
||||
.read_error("Invalid dyld cache image address")
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the file offset of the image by looking up its address in the mappings.
|
||||
pub fn address_to_file_offset<E: Endian>(
|
||||
address: u64,
|
||||
endian: E,
|
||||
mappings: &[macho::DyldCacheMappingInfo<E>],
|
||||
) -> Option<u64> {
|
||||
for mapping in mappings {
|
||||
let mapping_address = mapping.address.get(endian);
|
||||
if address >= mapping_address
|
||||
&& address < mapping_address.wrapping_add(mapping.size.get(endian))
|
||||
{
|
||||
return Some(address - mapping_address + mapping.file_offset.get(endian));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
122
vendor/object/src/read/macho/fat.rs
vendored
Normal file
122
vendor/object/src/read/macho/fat.rs
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
use crate::read::{Architecture, Error, ReadError, ReadRef, Result};
|
||||
use crate::{macho, BigEndian, Pod};
|
||||
|
||||
pub use macho::{FatArch32, FatArch64, FatHeader};
|
||||
|
||||
impl FatHeader {
|
||||
/// Attempt to parse a fat header.
|
||||
///
|
||||
/// Does not validate the magic value.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(file: R) -> Result<&'data FatHeader> {
|
||||
file.read_at::<FatHeader>(0)
|
||||
.read_error("Invalid fat header size or alignment")
|
||||
}
|
||||
|
||||
/// Attempt to parse a fat header and 32-bit fat arches.
|
||||
pub fn parse_arch32<'data, R: ReadRef<'data>>(file: R) -> Result<&'data [FatArch32]> {
|
||||
let mut offset = 0;
|
||||
let header = file
|
||||
.read::<FatHeader>(&mut offset)
|
||||
.read_error("Invalid fat header size or alignment")?;
|
||||
if header.magic.get(BigEndian) != macho::FAT_MAGIC {
|
||||
return Err(Error("Invalid 32-bit fat magic"));
|
||||
}
|
||||
file.read_slice::<FatArch32>(&mut offset, header.nfat_arch.get(BigEndian) as usize)
|
||||
.read_error("Invalid nfat_arch")
|
||||
}
|
||||
|
||||
/// Attempt to parse a fat header and 64-bit fat arches.
|
||||
pub fn parse_arch64<'data, R: ReadRef<'data>>(file: R) -> Result<&'data [FatArch64]> {
|
||||
let mut offset = 0;
|
||||
let header = file
|
||||
.read::<FatHeader>(&mut offset)
|
||||
.read_error("Invalid fat header size or alignment")?;
|
||||
if header.magic.get(BigEndian) != macho::FAT_MAGIC_64 {
|
||||
return Err(Error("Invalid 64-bit fat magic"));
|
||||
}
|
||||
file.read_slice::<FatArch64>(&mut offset, header.nfat_arch.get(BigEndian) as usize)
|
||||
.read_error("Invalid nfat_arch")
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::FatArch32`] and [`macho::FatArch64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FatArch: Pod {
|
||||
type Word: Into<u64>;
|
||||
|
||||
fn cputype(&self) -> u32;
|
||||
fn cpusubtype(&self) -> u32;
|
||||
fn offset(&self) -> Self::Word;
|
||||
fn size(&self) -> Self::Word;
|
||||
fn align(&self) -> u32;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match self.cputype() {
|
||||
macho::CPU_TYPE_ARM => Architecture::Arm,
|
||||
macho::CPU_TYPE_ARM64 => Architecture::Aarch64,
|
||||
macho::CPU_TYPE_X86 => Architecture::I386,
|
||||
macho::CPU_TYPE_X86_64 => Architecture::X86_64,
|
||||
macho::CPU_TYPE_MIPS => Architecture::Mips,
|
||||
macho::CPU_TYPE_POWERPC => Architecture::PowerPc,
|
||||
macho::CPU_TYPE_POWERPC64 => Architecture::PowerPc64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
(self.offset().into(), self.size().into())
|
||||
}
|
||||
|
||||
fn data<'data, R: ReadRef<'data>>(&self, file: R) -> Result<&'data [u8]> {
|
||||
file.read_bytes_at(self.offset().into(), self.size().into())
|
||||
.read_error("Invalid fat arch offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl FatArch for FatArch32 {
|
||||
type Word = u32;
|
||||
|
||||
fn cputype(&self) -> u32 {
|
||||
self.cputype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self) -> u32 {
|
||||
self.cpusubtype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn offset(&self) -> Self::Word {
|
||||
self.offset.get(BigEndian)
|
||||
}
|
||||
|
||||
fn size(&self) -> Self::Word {
|
||||
self.size.get(BigEndian)
|
||||
}
|
||||
|
||||
fn align(&self) -> u32 {
|
||||
self.align.get(BigEndian)
|
||||
}
|
||||
}
|
||||
|
||||
impl FatArch for FatArch64 {
|
||||
type Word = u64;
|
||||
|
||||
fn cputype(&self) -> u32 {
|
||||
self.cputype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self) -> u32 {
|
||||
self.cpusubtype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn offset(&self) -> Self::Word {
|
||||
self.offset.get(BigEndian)
|
||||
}
|
||||
|
||||
fn size(&self) -> Self::Word {
|
||||
self.size.get(BigEndian)
|
||||
}
|
||||
|
||||
fn align(&self) -> u32 {
|
||||
self.align.get(BigEndian)
|
||||
}
|
||||
}
|
||||
781
vendor/object/src/read/macho/file.rs
vendored
Normal file
781
vendor/object/src/read/macho/file.rs
vendored
Normal file
@@ -0,0 +1,781 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::{mem, str};
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, ComdatKind, Error, Export, FileFlags, Import, NoDynamicRelocationIterator,
|
||||
Object, ObjectComdat, ObjectKind, ObjectMap, ObjectSection, ReadError, ReadRef, Result,
|
||||
SectionIndex, SubArchitecture, SymbolIndex,
|
||||
};
|
||||
use crate::{endian, macho, BigEndian, ByteString, Endian, Endianness, Pod};
|
||||
|
||||
use super::{
|
||||
DyldCacheImage, LoadCommandIterator, MachOSection, MachOSectionInternal, MachOSectionIterator,
|
||||
MachOSegment, MachOSegmentInternal, MachOSegmentIterator, MachOSymbol, MachOSymbolIterator,
|
||||
MachOSymbolTable, Nlist, Section, Segment, SymbolTable,
|
||||
};
|
||||
|
||||
/// A 32-bit Mach-O object file.
|
||||
///
|
||||
/// This is a file that starts with [`macho::MachHeader32`], and corresponds
|
||||
/// to [`crate::FileKind::MachO32`].
|
||||
pub type MachOFile32<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOFile<'data, macho::MachHeader32<Endian>, R>;
|
||||
/// A 64-bit Mach-O object file.
|
||||
///
|
||||
/// This is a file that starts with [`macho::MachHeader64`], and corresponds
|
||||
/// to [`crate::FileKind::MachO64`].
|
||||
pub type MachOFile64<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOFile<'data, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A partially parsed Mach-O file.
|
||||
///
|
||||
/// Most of the functionality of this type is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOFile<'data, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Mach::Endian,
|
||||
pub(super) data: R,
|
||||
pub(super) header_offset: u64,
|
||||
pub(super) header: &'data Mach,
|
||||
pub(super) segments: Vec<MachOSegmentInternal<'data, Mach, R>>,
|
||||
pub(super) sections: Vec<MachOSectionInternal<'data, Mach>>,
|
||||
pub(super) symbols: SymbolTable<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, Mach, R> MachOFile<'data, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw Mach-O file data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let header = Mach::parse(data, 0)?;
|
||||
let endian = header.endian()?;
|
||||
|
||||
// Build a list of segments and sections to make some operations more efficient.
|
||||
let mut segments = Vec::new();
|
||||
let mut sections = Vec::new();
|
||||
let mut symbols = SymbolTable::default();
|
||||
if let Ok(mut commands) = header.load_commands(endian, data, 0) {
|
||||
while let Ok(Some(command)) = commands.next() {
|
||||
if let Some((segment, section_data)) = Mach::Segment::from_command(command)? {
|
||||
let segment_index = segments.len();
|
||||
segments.push(MachOSegmentInternal { segment, data });
|
||||
for section in segment.sections(endian, section_data)? {
|
||||
let index = SectionIndex(sections.len() + 1);
|
||||
sections.push(MachOSectionInternal::parse(index, segment_index, section));
|
||||
}
|
||||
} else if let Some(symtab) = command.symtab()? {
|
||||
symbols = symtab.symbols(endian, data)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(MachOFile {
|
||||
endian,
|
||||
data,
|
||||
header_offset: 0,
|
||||
header,
|
||||
segments,
|
||||
sections,
|
||||
symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse the Mach-O file for the given image from the dyld shared cache.
|
||||
/// This will read different sections from different subcaches, if necessary.
|
||||
pub fn parse_dyld_cache_image<'cache, E: Endian>(
|
||||
image: &DyldCacheImage<'data, 'cache, E, R>,
|
||||
) -> Result<Self> {
|
||||
let (data, header_offset) = image.image_data_and_offset()?;
|
||||
let header = Mach::parse(data, header_offset)?;
|
||||
let endian = header.endian()?;
|
||||
|
||||
// Build a list of sections to make some operations more efficient.
|
||||
// Also build a list of segments, because we need to remember which ReadRef
|
||||
// to read each section's data from. Only the DyldCache knows this information,
|
||||
// and we won't have access to it once we've exited this function.
|
||||
let mut segments = Vec::new();
|
||||
let mut sections = Vec::new();
|
||||
let mut linkedit_data: Option<R> = None;
|
||||
let mut symtab = None;
|
||||
if let Ok(mut commands) = header.load_commands(endian, data, header_offset) {
|
||||
while let Ok(Some(command)) = commands.next() {
|
||||
if let Some((segment, section_data)) = Mach::Segment::from_command(command)? {
|
||||
// Each segment can be stored in a different subcache. Get the segment's
|
||||
// address and look it up in the cache mappings, to find the correct cache data.
|
||||
let addr = segment.vmaddr(endian).into();
|
||||
let (data, _offset) = image
|
||||
.cache
|
||||
.data_and_offset_for_address(addr)
|
||||
.read_error("Could not find segment data in dyld shared cache")?;
|
||||
if segment.name() == macho::SEG_LINKEDIT.as_bytes() {
|
||||
linkedit_data = Some(data);
|
||||
}
|
||||
let segment_index = segments.len();
|
||||
segments.push(MachOSegmentInternal { segment, data });
|
||||
|
||||
for section in segment.sections(endian, section_data)? {
|
||||
let index = SectionIndex(sections.len() + 1);
|
||||
sections.push(MachOSectionInternal::parse(index, segment_index, section));
|
||||
}
|
||||
} else if let Some(st) = command.symtab()? {
|
||||
symtab = Some(st);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The symbols are found in the __LINKEDIT segment, so make sure to read them from the
|
||||
// correct subcache.
|
||||
let symbols = match (symtab, linkedit_data) {
|
||||
(Some(symtab), Some(linkedit_data)) => symtab.symbols(endian, linkedit_data)?,
|
||||
_ => SymbolTable::default(),
|
||||
};
|
||||
|
||||
Ok(MachOFile {
|
||||
endian,
|
||||
data,
|
||||
header_offset,
|
||||
header,
|
||||
segments,
|
||||
sections,
|
||||
symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the section at the given index.
|
||||
#[inline]
|
||||
pub(super) fn section_internal(
|
||||
&self,
|
||||
index: SectionIndex,
|
||||
) -> Result<&MachOSectionInternal<'data, Mach>> {
|
||||
index
|
||||
.0
|
||||
.checked_sub(1)
|
||||
.and_then(|index| self.sections.get(index))
|
||||
.read_error("Invalid Mach-O section index")
|
||||
}
|
||||
|
||||
pub(super) fn segment_internal(
|
||||
&self,
|
||||
index: usize,
|
||||
) -> Result<&MachOSegmentInternal<'data, Mach, R>> {
|
||||
self.segments
|
||||
.get(index)
|
||||
.read_error("Invalid Mach-O segment index")
|
||||
}
|
||||
|
||||
/// Returns the endianness.
|
||||
pub fn endian(&self) -> Mach::Endian {
|
||||
self.endian
|
||||
}
|
||||
|
||||
/// Returns the raw data.
|
||||
pub fn data(&self) -> R {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// Returns the raw Mach-O file header.
|
||||
pub fn raw_header(&self) -> &'data Mach {
|
||||
self.header
|
||||
}
|
||||
|
||||
/// Return the `LC_BUILD_VERSION` load command if present.
|
||||
pub fn build_version(&self) -> Result<Option<&'data macho::BuildVersionCommand<Mach::Endian>>> {
|
||||
let mut commands = self
|
||||
.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Some(build_version) = command.build_version()? {
|
||||
return Ok(Some(build_version));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Mach, R> read::private::Sealed for MachOFile<'data, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Object<'data, 'file> for MachOFile<'data, Mach, R>
|
||||
where
|
||||
'data: 'file,
|
||||
Mach: MachHeader,
|
||||
R: 'file + ReadRef<'data>,
|
||||
{
|
||||
type Segment = MachOSegment<'data, 'file, Mach, R>;
|
||||
type SegmentIterator = MachOSegmentIterator<'data, 'file, Mach, R>;
|
||||
type Section = MachOSection<'data, 'file, Mach, R>;
|
||||
type SectionIterator = MachOSectionIterator<'data, 'file, Mach, R>;
|
||||
type Comdat = MachOComdat<'data, 'file, Mach, R>;
|
||||
type ComdatIterator = MachOComdatIterator<'data, 'file, Mach, R>;
|
||||
type Symbol = MachOSymbol<'data, 'file, Mach, R>;
|
||||
type SymbolIterator = MachOSymbolIterator<'data, 'file, Mach, R>;
|
||||
type SymbolTable = MachOSymbolTable<'data, 'file, Mach, R>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match self.header.cputype(self.endian) {
|
||||
macho::CPU_TYPE_ARM => Architecture::Arm,
|
||||
macho::CPU_TYPE_ARM64 => Architecture::Aarch64,
|
||||
macho::CPU_TYPE_ARM64_32 => Architecture::Aarch64_Ilp32,
|
||||
macho::CPU_TYPE_X86 => Architecture::I386,
|
||||
macho::CPU_TYPE_X86_64 => Architecture::X86_64,
|
||||
macho::CPU_TYPE_MIPS => Architecture::Mips,
|
||||
macho::CPU_TYPE_POWERPC => Architecture::PowerPc,
|
||||
macho::CPU_TYPE_POWERPC64 => Architecture::PowerPc64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
match (
|
||||
self.header.cputype(self.endian),
|
||||
self.header.cpusubtype(self.endian),
|
||||
) {
|
||||
(macho::CPU_TYPE_ARM64, macho::CPU_SUBTYPE_ARM64E) => Some(SubArchitecture::Arm64E),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.header.is_little_endian()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
self.header.is_type_64()
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
match self.header.filetype(self.endian) {
|
||||
macho::MH_OBJECT => ObjectKind::Relocatable,
|
||||
macho::MH_EXECUTE => ObjectKind::Executable,
|
||||
macho::MH_CORE => ObjectKind::Core,
|
||||
macho::MH_DYLIB => ObjectKind::Dynamic,
|
||||
_ => ObjectKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> MachOSegmentIterator<'data, 'file, Mach, R> {
|
||||
MachOSegmentIterator {
|
||||
file: self,
|
||||
iter: self.segments.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<MachOSection<'data, 'file, Mach, R>> {
|
||||
// Translate the "." prefix to the "__" prefix used by OSX/Mach-O, eg
|
||||
// ".debug_info" to "__debug_info", and limit to 16 bytes total.
|
||||
let system_name = if section_name.starts_with(b".") {
|
||||
if section_name.len() > 15 {
|
||||
Some(§ion_name[1..15])
|
||||
} else {
|
||||
Some(§ion_name[1..])
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let cmp_section_name = |section: &MachOSection<'data, 'file, Mach, R>| {
|
||||
section
|
||||
.name_bytes()
|
||||
.map(|name| {
|
||||
section_name == name
|
||||
|| system_name
|
||||
.filter(|system_name| {
|
||||
name.starts_with(b"__") && name[2..] == **system_name
|
||||
})
|
||||
.is_some()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
};
|
||||
|
||||
self.sections().find(cmp_section_name)
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> Result<MachOSection<'data, 'file, Mach, R>> {
|
||||
let internal = *self.section_internal(index)?;
|
||||
Ok(MachOSection {
|
||||
file: self,
|
||||
internal,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> MachOSectionIterator<'data, 'file, Mach, R> {
|
||||
MachOSectionIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> MachOComdatIterator<'data, 'file, Mach, R> {
|
||||
MachOComdatIterator { file: self }
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> Result<MachOSymbol<'data, 'file, Mach, R>> {
|
||||
let nlist = self.symbols.symbol(index.0)?;
|
||||
MachOSymbol::new(self, index, nlist).read_error("Unsupported Mach-O symbol index")
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> MachOSymbolIterator<'data, 'file, Mach, R> {
|
||||
MachOSymbolIterator {
|
||||
file: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_table(&'file self) -> Option<MachOSymbolTable<'data, 'file, Mach, R>> {
|
||||
Some(MachOSymbolTable { file: self })
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> MachOSymbolIterator<'data, 'file, Mach, R> {
|
||||
MachOSymbolIterator {
|
||||
file: self,
|
||||
index: self.symbols.len(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_symbol_table(&'file self) -> Option<MachOSymbolTable<'data, 'file, Mach, R>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn object_map(&'file self) -> ObjectMap<'data> {
|
||||
self.symbols.object_map(self.endian)
|
||||
}
|
||||
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>> {
|
||||
let mut dysymtab = None;
|
||||
let mut libraries = Vec::new();
|
||||
let twolevel = self.header.flags(self.endian) & macho::MH_TWOLEVEL != 0;
|
||||
if twolevel {
|
||||
libraries.push(&[][..]);
|
||||
}
|
||||
let mut commands = self
|
||||
.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Some(command) = command.dysymtab()? {
|
||||
dysymtab = Some(command);
|
||||
}
|
||||
if twolevel {
|
||||
if let Some(dylib) = command.dylib()? {
|
||||
libraries.push(command.string(self.endian, dylib.dylib.name)?);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut imports = Vec::new();
|
||||
if let Some(dysymtab) = dysymtab {
|
||||
let index = dysymtab.iundefsym.get(self.endian) as usize;
|
||||
let number = dysymtab.nundefsym.get(self.endian) as usize;
|
||||
for i in index..(index.wrapping_add(number)) {
|
||||
let symbol = self.symbols.symbol(i)?;
|
||||
let name = symbol.name(self.endian, self.symbols.strings())?;
|
||||
let library = if twolevel {
|
||||
libraries
|
||||
.get(symbol.library_ordinal(self.endian) as usize)
|
||||
.copied()
|
||||
.read_error("Invalid Mach-O symbol library ordinal")?
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
imports.push(Import {
|
||||
name: ByteString(name),
|
||||
library: ByteString(library),
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
let mut dysymtab = None;
|
||||
let mut commands = self
|
||||
.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Some(command) = command.dysymtab()? {
|
||||
dysymtab = Some(command);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let mut exports = Vec::new();
|
||||
if let Some(dysymtab) = dysymtab {
|
||||
let index = dysymtab.iextdefsym.get(self.endian) as usize;
|
||||
let number = dysymtab.nextdefsym.get(self.endian) as usize;
|
||||
for i in index..(index.wrapping_add(number)) {
|
||||
let symbol = self.symbols.symbol(i)?;
|
||||
let name = symbol.name(self.endian, self.symbols.strings())?;
|
||||
let address = symbol.n_value(self.endian).into();
|
||||
exports.push(Export {
|
||||
name: ByteString(name),
|
||||
address,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(exports)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_relocations(&'file self) -> Option<NoDynamicRelocationIterator> {
|
||||
None
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.section_by_name(".debug_info").is_some()
|
||||
}
|
||||
|
||||
fn mach_uuid(&self) -> Result<Option<[u8; 16]>> {
|
||||
self.header.uuid(self.endian, self.data, self.header_offset)
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn entry(&self) -> u64 {
|
||||
if let Ok(mut commands) =
|
||||
self.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)
|
||||
{
|
||||
while let Ok(Some(command)) = commands.next() {
|
||||
if let Ok(Some(command)) = command.entry_point() {
|
||||
return command.entryoff.get(self.endian);
|
||||
}
|
||||
}
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::MachO {
|
||||
flags: self.header.flags(self.endian),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`MachOFile64`].
|
||||
pub type MachOComdatIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the COMDAT section groups in a [`MachOFile64`].
|
||||
pub type MachOComdatIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`MachOFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOComdatIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOComdatIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOComdat<'data, 'file, Mach, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`MachOFile32`].
|
||||
pub type MachOComdat32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdat<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
|
||||
/// A COMDAT section group in a [`MachOFile64`].
|
||||
pub type MachOComdat64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdat<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A COMDAT section group in a [`MachOFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOComdat<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOComdat<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectComdat<'data> for MachOComdat<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type SectionIterator = MachOComdatSectionIterator<'data, 'file, Mach, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`MachOFile32`].
|
||||
pub type MachOComdatSectionIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatSectionIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the sections in a COMDAT section group in a [`MachOFile64`].
|
||||
pub type MachOComdatSectionIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatSectionIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`MachOFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOComdatSectionIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOComdatSectionIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::MachHeader32`] and [`macho::MachHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait MachHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
type Segment: Segment<Endian = Self::Endian, Section = Self::Section>;
|
||||
type Section: Section<Endian = Self::Endian>;
|
||||
type Nlist: Nlist<Endian = Self::Endian>;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
fn is_type_64(&self) -> bool;
|
||||
|
||||
/// Return true if the `magic` field signifies big-endian.
|
||||
fn is_big_endian(&self) -> bool;
|
||||
|
||||
/// Return true if the `magic` field signifies little-endian.
|
||||
fn is_little_endian(&self) -> bool;
|
||||
|
||||
fn magic(&self) -> u32;
|
||||
fn cputype(&self, endian: Self::Endian) -> u32;
|
||||
fn cpusubtype(&self, endian: Self::Endian) -> u32;
|
||||
fn filetype(&self, endian: Self::Endian) -> u32;
|
||||
fn ncmds(&self, endian: Self::Endian) -> u32;
|
||||
fn sizeofcmds(&self, endian: Self::Endian) -> u32;
|
||||
fn flags(&self, endian: Self::Endian) -> u32;
|
||||
|
||||
// Provided methods.
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// Also checks that the magic field in the file header is a supported format.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: u64) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read_at::<Self>(offset)
|
||||
.read_error("Invalid Mach-O header size or alignment")?;
|
||||
if !header.is_supported() {
|
||||
return Err(Error("Unsupported Mach-O header"));
|
||||
}
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn is_supported(&self) -> bool {
|
||||
self.is_little_endian() || self.is_big_endian()
|
||||
}
|
||||
|
||||
fn endian(&self) -> Result<Self::Endian> {
|
||||
Self::Endian::from_big_endian(self.is_big_endian()).read_error("Unsupported Mach-O endian")
|
||||
}
|
||||
|
||||
fn load_commands<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
header_offset: u64,
|
||||
) -> Result<LoadCommandIterator<'data, Self::Endian>> {
|
||||
let data = data
|
||||
.read_bytes_at(
|
||||
header_offset + mem::size_of::<Self>() as u64,
|
||||
self.sizeofcmds(endian).into(),
|
||||
)
|
||||
.read_error("Invalid Mach-O load command table size")?;
|
||||
Ok(LoadCommandIterator::new(endian, data, self.ncmds(endian)))
|
||||
}
|
||||
|
||||
/// Return the UUID from the `LC_UUID` load command, if one is present.
|
||||
fn uuid<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
header_offset: u64,
|
||||
) -> Result<Option<[u8; 16]>> {
|
||||
let mut commands = self.load_commands(endian, data, header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Ok(Some(uuid)) = command.uuid() {
|
||||
return Ok(Some(uuid.uuid));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> MachHeader for macho::MachHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
type Segment = macho::SegmentCommand32<Endian>;
|
||||
type Section = macho::Section32<Endian>;
|
||||
type Nlist = macho::Nlist32<Endian>;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_big_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_MAGIC
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_CIGAM
|
||||
}
|
||||
|
||||
fn magic(&self) -> u32 {
|
||||
self.magic.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cputype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cputype.get(endian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cpusubtype.get(endian)
|
||||
}
|
||||
|
||||
fn filetype(&self, endian: Self::Endian) -> u32 {
|
||||
self.filetype.get(endian)
|
||||
}
|
||||
|
||||
fn ncmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.ncmds.get(endian)
|
||||
}
|
||||
|
||||
fn sizeofcmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.sizeofcmds.get(endian)
|
||||
}
|
||||
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> MachHeader for macho::MachHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
type Segment = macho::SegmentCommand64<Endian>;
|
||||
type Section = macho::Section64<Endian>;
|
||||
type Nlist = macho::Nlist64<Endian>;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_big_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_MAGIC_64
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_CIGAM_64
|
||||
}
|
||||
|
||||
fn magic(&self) -> u32 {
|
||||
self.magic.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cputype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cputype.get(endian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cpusubtype.get(endian)
|
||||
}
|
||||
|
||||
fn filetype(&self, endian: Self::Endian) -> u32 {
|
||||
self.filetype.get(endian)
|
||||
}
|
||||
|
||||
fn ncmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.ncmds.get(endian)
|
||||
}
|
||||
|
||||
fn sizeofcmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.sizeofcmds.get(endian)
|
||||
}
|
||||
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
382
vendor/object/src/read/macho/load_command.rs
vendored
Normal file
382
vendor/object/src/read/macho/load_command.rs
vendored
Normal file
@@ -0,0 +1,382 @@
|
||||
use core::marker::PhantomData;
|
||||
use core::mem;
|
||||
|
||||
use crate::endian::Endian;
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::macho::{MachHeader, SymbolTable};
|
||||
use crate::read::{Bytes, Error, ReadError, ReadRef, Result, StringTable};
|
||||
|
||||
/// An iterator for the load commands from a [`MachHeader`].
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct LoadCommandIterator<'data, E: Endian> {
|
||||
endian: E,
|
||||
data: Bytes<'data>,
|
||||
ncmds: u32,
|
||||
}
|
||||
|
||||
impl<'data, E: Endian> LoadCommandIterator<'data, E> {
|
||||
pub(super) fn new(endian: E, data: &'data [u8], ncmds: u32) -> Self {
|
||||
LoadCommandIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
ncmds,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next load command.
|
||||
pub fn next(&mut self) -> Result<Option<LoadCommandData<'data, E>>> {
|
||||
if self.ncmds == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
let header = self
|
||||
.data
|
||||
.read_at::<macho::LoadCommand<E>>(0)
|
||||
.read_error("Invalid Mach-O load command header")?;
|
||||
let cmd = header.cmd.get(self.endian);
|
||||
let cmdsize = header.cmdsize.get(self.endian) as usize;
|
||||
if cmdsize < mem::size_of::<macho::LoadCommand<E>>() {
|
||||
return Err(Error("Invalid Mach-O load command size"));
|
||||
}
|
||||
let data = self
|
||||
.data
|
||||
.read_bytes(cmdsize)
|
||||
.read_error("Invalid Mach-O load command size")?;
|
||||
self.ncmds -= 1;
|
||||
Ok(Some(LoadCommandData {
|
||||
cmd,
|
||||
data,
|
||||
marker: Default::default(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// The data for a [`macho::LoadCommand`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct LoadCommandData<'data, E: Endian> {
|
||||
cmd: u32,
|
||||
// Includes the header.
|
||||
data: Bytes<'data>,
|
||||
marker: PhantomData<E>,
|
||||
}
|
||||
|
||||
impl<'data, E: Endian> LoadCommandData<'data, E> {
|
||||
/// Return the `cmd` field of the [`macho::LoadCommand`].
|
||||
///
|
||||
/// This is one of the `LC_` constants.
|
||||
pub fn cmd(&self) -> u32 {
|
||||
self.cmd
|
||||
}
|
||||
|
||||
/// Return the `cmdsize` field of the [`macho::LoadCommand`].
|
||||
pub fn cmdsize(&self) -> u32 {
|
||||
self.data.len() as u32
|
||||
}
|
||||
|
||||
/// Parse the data as the given type.
|
||||
#[inline]
|
||||
pub fn data<T: Pod>(&self) -> Result<&'data T> {
|
||||
self.data
|
||||
.read_at(0)
|
||||
.read_error("Invalid Mach-O command size")
|
||||
}
|
||||
|
||||
/// Raw bytes of this [`macho::LoadCommand`] structure.
|
||||
pub fn raw_data(&self) -> &'data [u8] {
|
||||
self.data.0
|
||||
}
|
||||
|
||||
/// Parse a load command string value.
|
||||
///
|
||||
/// Strings used by load commands are specified by offsets that are
|
||||
/// relative to the load command header.
|
||||
pub fn string(&self, endian: E, s: macho::LcStr<E>) -> Result<&'data [u8]> {
|
||||
self.data
|
||||
.read_string_at(s.offset.get(endian) as usize)
|
||||
.read_error("Invalid load command string offset")
|
||||
}
|
||||
|
||||
/// Parse the command data according to the `cmd` field.
|
||||
pub fn variant(&self) -> Result<LoadCommandVariant<'data, E>> {
|
||||
Ok(match self.cmd {
|
||||
macho::LC_SEGMENT => {
|
||||
let mut data = self.data;
|
||||
let segment = data.read().read_error("Invalid Mach-O command size")?;
|
||||
LoadCommandVariant::Segment32(segment, data.0)
|
||||
}
|
||||
macho::LC_SYMTAB => LoadCommandVariant::Symtab(self.data()?),
|
||||
macho::LC_THREAD | macho::LC_UNIXTHREAD => {
|
||||
let mut data = self.data;
|
||||
let thread = data.read().read_error("Invalid Mach-O command size")?;
|
||||
LoadCommandVariant::Thread(thread, data.0)
|
||||
}
|
||||
macho::LC_DYSYMTAB => LoadCommandVariant::Dysymtab(self.data()?),
|
||||
macho::LC_LOAD_DYLIB
|
||||
| macho::LC_LOAD_WEAK_DYLIB
|
||||
| macho::LC_REEXPORT_DYLIB
|
||||
| macho::LC_LAZY_LOAD_DYLIB
|
||||
| macho::LC_LOAD_UPWARD_DYLIB => LoadCommandVariant::Dylib(self.data()?),
|
||||
macho::LC_ID_DYLIB => LoadCommandVariant::IdDylib(self.data()?),
|
||||
macho::LC_LOAD_DYLINKER => LoadCommandVariant::LoadDylinker(self.data()?),
|
||||
macho::LC_ID_DYLINKER => LoadCommandVariant::IdDylinker(self.data()?),
|
||||
macho::LC_PREBOUND_DYLIB => LoadCommandVariant::PreboundDylib(self.data()?),
|
||||
macho::LC_ROUTINES => LoadCommandVariant::Routines32(self.data()?),
|
||||
macho::LC_SUB_FRAMEWORK => LoadCommandVariant::SubFramework(self.data()?),
|
||||
macho::LC_SUB_UMBRELLA => LoadCommandVariant::SubUmbrella(self.data()?),
|
||||
macho::LC_SUB_CLIENT => LoadCommandVariant::SubClient(self.data()?),
|
||||
macho::LC_SUB_LIBRARY => LoadCommandVariant::SubLibrary(self.data()?),
|
||||
macho::LC_TWOLEVEL_HINTS => LoadCommandVariant::TwolevelHints(self.data()?),
|
||||
macho::LC_PREBIND_CKSUM => LoadCommandVariant::PrebindCksum(self.data()?),
|
||||
macho::LC_SEGMENT_64 => {
|
||||
let mut data = self.data;
|
||||
let segment = data.read().read_error("Invalid Mach-O command size")?;
|
||||
LoadCommandVariant::Segment64(segment, data.0)
|
||||
}
|
||||
macho::LC_ROUTINES_64 => LoadCommandVariant::Routines64(self.data()?),
|
||||
macho::LC_UUID => LoadCommandVariant::Uuid(self.data()?),
|
||||
macho::LC_RPATH => LoadCommandVariant::Rpath(self.data()?),
|
||||
macho::LC_CODE_SIGNATURE
|
||||
| macho::LC_SEGMENT_SPLIT_INFO
|
||||
| macho::LC_FUNCTION_STARTS
|
||||
| macho::LC_DATA_IN_CODE
|
||||
| macho::LC_DYLIB_CODE_SIGN_DRS
|
||||
| macho::LC_LINKER_OPTIMIZATION_HINT
|
||||
| macho::LC_DYLD_EXPORTS_TRIE
|
||||
| macho::LC_DYLD_CHAINED_FIXUPS => LoadCommandVariant::LinkeditData(self.data()?),
|
||||
macho::LC_ENCRYPTION_INFO => LoadCommandVariant::EncryptionInfo32(self.data()?),
|
||||
macho::LC_DYLD_INFO | macho::LC_DYLD_INFO_ONLY => {
|
||||
LoadCommandVariant::DyldInfo(self.data()?)
|
||||
}
|
||||
macho::LC_VERSION_MIN_MACOSX
|
||||
| macho::LC_VERSION_MIN_IPHONEOS
|
||||
| macho::LC_VERSION_MIN_TVOS
|
||||
| macho::LC_VERSION_MIN_WATCHOS => LoadCommandVariant::VersionMin(self.data()?),
|
||||
macho::LC_DYLD_ENVIRONMENT => LoadCommandVariant::DyldEnvironment(self.data()?),
|
||||
macho::LC_MAIN => LoadCommandVariant::EntryPoint(self.data()?),
|
||||
macho::LC_SOURCE_VERSION => LoadCommandVariant::SourceVersion(self.data()?),
|
||||
macho::LC_ENCRYPTION_INFO_64 => LoadCommandVariant::EncryptionInfo64(self.data()?),
|
||||
macho::LC_LINKER_OPTION => LoadCommandVariant::LinkerOption(self.data()?),
|
||||
macho::LC_NOTE => LoadCommandVariant::Note(self.data()?),
|
||||
macho::LC_BUILD_VERSION => LoadCommandVariant::BuildVersion(self.data()?),
|
||||
macho::LC_FILESET_ENTRY => LoadCommandVariant::FilesetEntry(self.data()?),
|
||||
_ => LoadCommandVariant::Other,
|
||||
})
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::SegmentCommand32`].
|
||||
///
|
||||
/// Returns the segment command and the data containing the sections.
|
||||
pub fn segment_32(self) -> Result<Option<(&'data macho::SegmentCommand32<E>, &'data [u8])>> {
|
||||
if self.cmd == macho::LC_SEGMENT {
|
||||
let mut data = self.data;
|
||||
let segment = data.read().read_error("Invalid Mach-O command size")?;
|
||||
Ok(Some((segment, data.0)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::SymtabCommand`].
|
||||
///
|
||||
/// Returns the segment command and the data containing the sections.
|
||||
pub fn symtab(self) -> Result<Option<&'data macho::SymtabCommand<E>>> {
|
||||
if self.cmd == macho::LC_SYMTAB {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::DysymtabCommand`].
|
||||
pub fn dysymtab(self) -> Result<Option<&'data macho::DysymtabCommand<E>>> {
|
||||
if self.cmd == macho::LC_DYSYMTAB {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::DylibCommand`].
|
||||
pub fn dylib(self) -> Result<Option<&'data macho::DylibCommand<E>>> {
|
||||
if self.cmd == macho::LC_LOAD_DYLIB
|
||||
|| self.cmd == macho::LC_LOAD_WEAK_DYLIB
|
||||
|| self.cmd == macho::LC_REEXPORT_DYLIB
|
||||
|| self.cmd == macho::LC_LAZY_LOAD_DYLIB
|
||||
|| self.cmd == macho::LC_LOAD_UPWARD_DYLIB
|
||||
{
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::UuidCommand`].
|
||||
pub fn uuid(self) -> Result<Option<&'data macho::UuidCommand<E>>> {
|
||||
if self.cmd == macho::LC_UUID {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::SegmentCommand64`].
|
||||
pub fn segment_64(self) -> Result<Option<(&'data macho::SegmentCommand64<E>, &'data [u8])>> {
|
||||
if self.cmd == macho::LC_SEGMENT_64 {
|
||||
let mut data = self.data;
|
||||
let command = data.read().read_error("Invalid Mach-O command size")?;
|
||||
Ok(Some((command, data.0)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::DyldInfoCommand`].
|
||||
pub fn dyld_info(self) -> Result<Option<&'data macho::DyldInfoCommand<E>>> {
|
||||
if self.cmd == macho::LC_DYLD_INFO || self.cmd == macho::LC_DYLD_INFO_ONLY {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as an [`macho::EntryPointCommand`].
|
||||
pub fn entry_point(self) -> Result<Option<&'data macho::EntryPointCommand<E>>> {
|
||||
if self.cmd == macho::LC_MAIN {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::BuildVersionCommand`].
|
||||
pub fn build_version(self) -> Result<Option<&'data macho::BuildVersionCommand<E>>> {
|
||||
if self.cmd == macho::LC_BUILD_VERSION {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A [`macho::LoadCommand`] that has been interpreted according to its `cmd` field.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[non_exhaustive]
|
||||
pub enum LoadCommandVariant<'data, E: Endian> {
|
||||
/// `LC_SEGMENT`
|
||||
Segment32(&'data macho::SegmentCommand32<E>, &'data [u8]),
|
||||
/// `LC_SYMTAB`
|
||||
Symtab(&'data macho::SymtabCommand<E>),
|
||||
// obsolete: `LC_SYMSEG`
|
||||
//Symseg(&'data macho::SymsegCommand<E>),
|
||||
/// `LC_THREAD` or `LC_UNIXTHREAD`
|
||||
Thread(&'data macho::ThreadCommand<E>, &'data [u8]),
|
||||
// obsolete: `LC_IDFVMLIB` or `LC_LOADFVMLIB`
|
||||
//Fvmlib(&'data macho::FvmlibCommand<E>),
|
||||
// obsolete: `LC_IDENT`
|
||||
//Ident(&'data macho::IdentCommand<E>),
|
||||
// internal: `LC_FVMFILE`
|
||||
//Fvmfile(&'data macho::FvmfileCommand<E>),
|
||||
// internal: `LC_PREPAGE`
|
||||
/// `LC_DYSYMTAB`
|
||||
Dysymtab(&'data macho::DysymtabCommand<E>),
|
||||
/// `LC_LOAD_DYLIB`, `LC_LOAD_WEAK_DYLIB`, `LC_REEXPORT_DYLIB`,
|
||||
/// `LC_LAZY_LOAD_DYLIB`, or `LC_LOAD_UPWARD_DYLIB`
|
||||
Dylib(&'data macho::DylibCommand<E>),
|
||||
/// `LC_ID_DYLIB`
|
||||
IdDylib(&'data macho::DylibCommand<E>),
|
||||
/// `LC_LOAD_DYLINKER`
|
||||
LoadDylinker(&'data macho::DylinkerCommand<E>),
|
||||
/// `LC_ID_DYLINKER`
|
||||
IdDylinker(&'data macho::DylinkerCommand<E>),
|
||||
/// `LC_PREBOUND_DYLIB`
|
||||
PreboundDylib(&'data macho::PreboundDylibCommand<E>),
|
||||
/// `LC_ROUTINES`
|
||||
Routines32(&'data macho::RoutinesCommand32<E>),
|
||||
/// `LC_SUB_FRAMEWORK`
|
||||
SubFramework(&'data macho::SubFrameworkCommand<E>),
|
||||
/// `LC_SUB_UMBRELLA`
|
||||
SubUmbrella(&'data macho::SubUmbrellaCommand<E>),
|
||||
/// `LC_SUB_CLIENT`
|
||||
SubClient(&'data macho::SubClientCommand<E>),
|
||||
/// `LC_SUB_LIBRARY`
|
||||
SubLibrary(&'data macho::SubLibraryCommand<E>),
|
||||
/// `LC_TWOLEVEL_HINTS`
|
||||
TwolevelHints(&'data macho::TwolevelHintsCommand<E>),
|
||||
/// `LC_PREBIND_CKSUM`
|
||||
PrebindCksum(&'data macho::PrebindCksumCommand<E>),
|
||||
/// `LC_SEGMENT_64`
|
||||
Segment64(&'data macho::SegmentCommand64<E>, &'data [u8]),
|
||||
/// `LC_ROUTINES_64`
|
||||
Routines64(&'data macho::RoutinesCommand64<E>),
|
||||
/// `LC_UUID`
|
||||
Uuid(&'data macho::UuidCommand<E>),
|
||||
/// `LC_RPATH`
|
||||
Rpath(&'data macho::RpathCommand<E>),
|
||||
/// `LC_CODE_SIGNATURE`, `LC_SEGMENT_SPLIT_INFO`, `LC_FUNCTION_STARTS`,
|
||||
/// `LC_DATA_IN_CODE`, `LC_DYLIB_CODE_SIGN_DRS`, `LC_LINKER_OPTIMIZATION_HINT`,
|
||||
/// `LC_DYLD_EXPORTS_TRIE`, or `LC_DYLD_CHAINED_FIXUPS`.
|
||||
LinkeditData(&'data macho::LinkeditDataCommand<E>),
|
||||
/// `LC_ENCRYPTION_INFO`
|
||||
EncryptionInfo32(&'data macho::EncryptionInfoCommand32<E>),
|
||||
/// `LC_DYLD_INFO` or `LC_DYLD_INFO_ONLY`
|
||||
DyldInfo(&'data macho::DyldInfoCommand<E>),
|
||||
/// `LC_VERSION_MIN_MACOSX`, `LC_VERSION_MIN_IPHONEOS`, `LC_VERSION_MIN_WATCHOS`,
|
||||
/// or `LC_VERSION_MIN_TVOS`
|
||||
VersionMin(&'data macho::VersionMinCommand<E>),
|
||||
/// `LC_DYLD_ENVIRONMENT`
|
||||
DyldEnvironment(&'data macho::DylinkerCommand<E>),
|
||||
/// `LC_MAIN`
|
||||
EntryPoint(&'data macho::EntryPointCommand<E>),
|
||||
/// `LC_SOURCE_VERSION`
|
||||
SourceVersion(&'data macho::SourceVersionCommand<E>),
|
||||
/// `LC_ENCRYPTION_INFO_64`
|
||||
EncryptionInfo64(&'data macho::EncryptionInfoCommand64<E>),
|
||||
/// `LC_LINKER_OPTION`
|
||||
LinkerOption(&'data macho::LinkerOptionCommand<E>),
|
||||
/// `LC_NOTE`
|
||||
Note(&'data macho::NoteCommand<E>),
|
||||
/// `LC_BUILD_VERSION`
|
||||
BuildVersion(&'data macho::BuildVersionCommand<E>),
|
||||
/// `LC_FILESET_ENTRY`
|
||||
FilesetEntry(&'data macho::FilesetEntryCommand<E>),
|
||||
/// An unrecognized or obsolete load command.
|
||||
Other,
|
||||
}
|
||||
|
||||
impl<E: Endian> macho::SymtabCommand<E> {
|
||||
/// Return the symbol table that this command references.
|
||||
pub fn symbols<'data, Mach: MachHeader<Endian = E>, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<SymbolTable<'data, Mach, R>> {
|
||||
let symbols = data
|
||||
.read_slice_at(
|
||||
self.symoff.get(endian).into(),
|
||||
self.nsyms.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid Mach-O symbol table offset or size")?;
|
||||
let str_start: u64 = self.stroff.get(endian).into();
|
||||
let str_end = str_start
|
||||
.checked_add(self.strsize.get(endian).into())
|
||||
.read_error("Invalid Mach-O string table length")?;
|
||||
let strings = StringTable::new(data, str_start, str_end);
|
||||
Ok(SymbolTable::new(symbols, strings))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::LittleEndian;
|
||||
|
||||
#[test]
|
||||
fn cmd_size_invalid() {
|
||||
let mut commands = LoadCommandIterator::new(LittleEndian, &[0; 8], 10);
|
||||
assert!(commands.next().is_err());
|
||||
let mut commands = LoadCommandIterator::new(LittleEndian, &[0, 0, 0, 0, 7, 0, 0, 0, 0], 10);
|
||||
assert!(commands.next().is_err());
|
||||
let mut commands = LoadCommandIterator::new(LittleEndian, &[0, 0, 0, 0, 8, 0, 0, 0, 0], 10);
|
||||
assert!(commands.next().is_ok());
|
||||
}
|
||||
}
|
||||
72
vendor/object/src/read/macho/mod.rs
vendored
Normal file
72
vendor/object/src/read/macho/mod.rs
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
//! Support for reading Mach-O files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between 32-bit and 64-bit Mach-O
|
||||
//! files. The primary trait for this is [`MachHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`MachOFile`] implements the [`Object`](crate::read::Object) trait for Mach-O files.
|
||||
//! [`MachOFile`] is parameterised by [`MachHeader`] to allow reading both 32-bit and
|
||||
//! 64-bit Mach-O files. There are type aliases for these parameters ([`MachOFile32`] and
|
||||
//! [`MachOFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`MachHeader`] trait can be directly used to parse both [`macho::MachHeader32`]
|
||||
//! and [`macho::MachHeader64`]. Additionally, [`FatHeader`] and the [`FatArch`] trait
|
||||
//! can be used to iterate images in multi-architecture binaries, and [`DyldCache`] can
|
||||
//! be used to locate images in a dyld shared cache.
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::macho;
|
||||
//! use object::read::macho::{MachHeader, Nlist};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let header = macho::MachHeader64::<object::Endianness>::parse(&*data, 0)?;
|
||||
//! let endian = header.endian()?;
|
||||
//! let mut commands = header.load_commands(endian, &*data, 0)?;
|
||||
//! while let Some(command) = commands.next()? {
|
||||
//! if let Some(symtab_command) = command.symtab()? {
|
||||
//! let symbols = symtab_command.symbols::<macho::MachHeader64<_>, _>(endian, &*data)?;
|
||||
//! for symbol in symbols.iter() {
|
||||
//! let name = symbol.name(endian, symbols.strings())?;
|
||||
//! println!("{}", String::from_utf8_lossy(name));
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::macho;
|
||||
|
||||
mod dyld_cache;
|
||||
pub use dyld_cache::*;
|
||||
|
||||
mod fat;
|
||||
pub use fat::*;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod load_command;
|
||||
pub use load_command::*;
|
||||
|
||||
mod segment;
|
||||
pub use segment::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
158
vendor/object/src/read/macho/relocation.rs
vendored
Normal file
158
vendor/object/src/read/macho/relocation.rs
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
use core::{fmt, slice};
|
||||
|
||||
use crate::endian::Endianness;
|
||||
use crate::macho;
|
||||
use crate::read::{
|
||||
ReadRef, Relocation, RelocationEncoding, RelocationKind, RelocationTarget, SectionIndex,
|
||||
SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{MachHeader, MachOFile};
|
||||
|
||||
/// An iterator for the relocations in a [`MachOSection32`](super::MachOSection32).
|
||||
pub type MachORelocationIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachORelocationIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the relocations in a [`MachOSection64`](super::MachOSection64).
|
||||
pub type MachORelocationIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachORelocationIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the relocations in a [`MachOSection`](super::MachOSection).
|
||||
pub struct MachORelocationIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) relocations: slice::Iter<'data, macho::Relocation<Mach::Endian>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachORelocationIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut paired_addend = 0;
|
||||
loop {
|
||||
let reloc = self.relocations.next()?;
|
||||
let endian = self.file.endian;
|
||||
let cputype = self.file.header.cputype(endian);
|
||||
if reloc.r_scattered(endian, cputype) {
|
||||
// FIXME: handle scattered relocations
|
||||
// We need to add `RelocationTarget::Address` for this.
|
||||
continue;
|
||||
}
|
||||
let reloc = reloc.info(self.file.endian);
|
||||
let mut encoding = RelocationEncoding::Generic;
|
||||
let kind = match cputype {
|
||||
macho::CPU_TYPE_ARM => match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::ARM_RELOC_VANILLA, false) => RelocationKind::Absolute,
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
},
|
||||
macho::CPU_TYPE_ARM64 | macho::CPU_TYPE_ARM64_32 => {
|
||||
match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::ARM64_RELOC_UNSIGNED, false) => RelocationKind::Absolute,
|
||||
(macho::ARM64_RELOC_ADDEND, _) => {
|
||||
paired_addend = i64::from(reloc.r_symbolnum)
|
||||
.wrapping_shl(64 - 24)
|
||||
.wrapping_shr(64 - 24);
|
||||
continue;
|
||||
}
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
}
|
||||
}
|
||||
macho::CPU_TYPE_X86 => match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::GENERIC_RELOC_VANILLA, false) => RelocationKind::Absolute,
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
},
|
||||
macho::CPU_TYPE_X86_64 => match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::X86_64_RELOC_UNSIGNED, false) => RelocationKind::Absolute,
|
||||
(macho::X86_64_RELOC_SIGNED, true) => {
|
||||
encoding = RelocationEncoding::X86RipRelative;
|
||||
RelocationKind::Relative
|
||||
}
|
||||
(macho::X86_64_RELOC_BRANCH, true) => {
|
||||
encoding = RelocationEncoding::X86Branch;
|
||||
RelocationKind::Relative
|
||||
}
|
||||
(macho::X86_64_RELOC_GOT, true) => RelocationKind::GotRelative,
|
||||
(macho::X86_64_RELOC_GOT_LOAD, true) => {
|
||||
encoding = RelocationEncoding::X86RipRelativeMovq;
|
||||
RelocationKind::GotRelative
|
||||
}
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
},
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
};
|
||||
let size = 8 << reloc.r_length;
|
||||
let target = if reloc.r_extern {
|
||||
RelocationTarget::Symbol(SymbolIndex(reloc.r_symbolnum as usize))
|
||||
} else {
|
||||
RelocationTarget::Section(SectionIndex(reloc.r_symbolnum as usize))
|
||||
};
|
||||
let implicit_addend = paired_addend == 0;
|
||||
let mut addend = paired_addend;
|
||||
if reloc.r_pcrel {
|
||||
// For PC relative relocations on some architectures, the
|
||||
// addend does not include the offset required due to the
|
||||
// PC being different from the place of the relocation.
|
||||
// This differs from other file formats, so adjust the
|
||||
// addend here to account for this.
|
||||
match cputype {
|
||||
macho::CPU_TYPE_X86 => {
|
||||
addend -= 1 << reloc.r_length;
|
||||
}
|
||||
macho::CPU_TYPE_X86_64 => {
|
||||
addend -= 1 << reloc.r_length;
|
||||
match reloc.r_type {
|
||||
macho::X86_64_RELOC_SIGNED_1 => addend -= 1,
|
||||
macho::X86_64_RELOC_SIGNED_2 => addend -= 2,
|
||||
macho::X86_64_RELOC_SIGNED_4 => addend -= 4,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
// TODO: maybe missing support for some architectures and relocations
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
return Some((
|
||||
reloc.r_address as u64,
|
||||
Relocation {
|
||||
kind,
|
||||
encoding,
|
||||
size,
|
||||
target,
|
||||
addend,
|
||||
implicit_addend,
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> fmt::Debug for MachORelocationIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("MachORelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
389
vendor/object/src/read/macho/section.rs
vendored
Normal file
389
vendor/object/src/read/macho/section.rs
vendored
Normal file
@@ -0,0 +1,389 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{fmt, result, slice, str};
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{
|
||||
self, CompressedData, CompressedFileRange, ObjectSection, ReadError, ReadRef, Result,
|
||||
SectionFlags, SectionIndex, SectionKind,
|
||||
};
|
||||
|
||||
use super::{MachHeader, MachOFile, MachORelocationIterator};
|
||||
|
||||
/// An iterator for the sections in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSectionIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSectionIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the sections in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSectionIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSectionIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the sections in a [`MachOFile`].
|
||||
pub struct MachOSectionIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) iter: slice::Iter<'file, MachOSectionInternal<'data, Mach>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> fmt::Debug for MachOSectionIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// It's painful to do much better than this
|
||||
f.debug_struct("MachOSectionIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOSectionIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOSection<'data, 'file, Mach, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|&internal| MachOSection {
|
||||
file: self.file,
|
||||
internal,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSection32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSection<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A section in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSection64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSection<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A section in a [`MachOFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOSection<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) internal: MachOSectionInternal<'data, Mach>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> MachOSection<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
let segment_index = self.internal.segment_index;
|
||||
let segment = self.file.segment_internal(segment_index)?;
|
||||
self.internal
|
||||
.section
|
||||
.data(self.file.endian, segment.data)
|
||||
.read_error("Invalid Mach-O section size or offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSection<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSection<'data> for MachOSection<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type RelocationIterator = MachORelocationIterator<'data, 'file, Mach, R>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.internal.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.internal.section.addr(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.internal.section.size(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
let align = self.internal.section.align(self.file.endian);
|
||||
if align < 64 {
|
||||
1 << align
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
self.internal.section.file_range(self.file.endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
Ok(self.internal.section.name())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
str::from_utf8(self.internal.section.name())
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O section name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(Some(self.internal.section.segment_name()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(Some(
|
||||
str::from_utf8(self.internal.section.segment_name())
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O segment name")?,
|
||||
))
|
||||
}
|
||||
|
||||
fn kind(&self) -> SectionKind {
|
||||
self.internal.kind
|
||||
}
|
||||
|
||||
fn relocations(&self) -> MachORelocationIterator<'data, 'file, Mach, R> {
|
||||
MachORelocationIterator {
|
||||
file: self.file,
|
||||
relocations: self
|
||||
.internal
|
||||
.section
|
||||
.relocations(self.file.endian, self.file.data)
|
||||
.unwrap_or(&[])
|
||||
.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::MachO {
|
||||
flags: self.internal.section.flags(self.file.endian),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct MachOSectionInternal<'data, Mach: MachHeader> {
|
||||
pub index: SectionIndex,
|
||||
pub segment_index: usize,
|
||||
pub kind: SectionKind,
|
||||
pub section: &'data Mach::Section,
|
||||
}
|
||||
|
||||
impl<'data, Mach: MachHeader> MachOSectionInternal<'data, Mach> {
|
||||
pub(super) fn parse(
|
||||
index: SectionIndex,
|
||||
segment_index: usize,
|
||||
section: &'data Mach::Section,
|
||||
) -> Self {
|
||||
// TODO: we don't validate flags, should we?
|
||||
let kind = match (section.segment_name(), section.name()) {
|
||||
(b"__TEXT", b"__text") => SectionKind::Text,
|
||||
(b"__TEXT", b"__const") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__cstring") => SectionKind::ReadOnlyString,
|
||||
(b"__TEXT", b"__literal4") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__literal8") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__literal16") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__eh_frame") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__gcc_except_tab") => SectionKind::ReadOnlyData,
|
||||
(b"__DATA", b"__data") => SectionKind::Data,
|
||||
(b"__DATA", b"__const") => SectionKind::ReadOnlyData,
|
||||
(b"__DATA", b"__bss") => SectionKind::UninitializedData,
|
||||
(b"__DATA", b"__common") => SectionKind::Common,
|
||||
(b"__DATA", b"__thread_data") => SectionKind::Tls,
|
||||
(b"__DATA", b"__thread_bss") => SectionKind::UninitializedTls,
|
||||
(b"__DATA", b"__thread_vars") => SectionKind::TlsVariables,
|
||||
(b"__DWARF", _) => SectionKind::Debug,
|
||||
_ => SectionKind::Unknown,
|
||||
};
|
||||
MachOSectionInternal {
|
||||
index,
|
||||
segment_index,
|
||||
kind,
|
||||
section,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::Section32`] and [`macho::Section64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Section: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn sectname(&self) -> &[u8; 16];
|
||||
fn segname(&self) -> &[u8; 16];
|
||||
fn addr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn size(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn offset(&self, endian: Self::Endian) -> u32;
|
||||
fn align(&self, endian: Self::Endian) -> u32;
|
||||
fn reloff(&self, endian: Self::Endian) -> u32;
|
||||
fn nreloc(&self, endian: Self::Endian) -> u32;
|
||||
fn flags(&self, endian: Self::Endian) -> u32;
|
||||
|
||||
/// Return the `sectname` bytes up until the null terminator.
|
||||
fn name(&self) -> &[u8] {
|
||||
let sectname = &self.sectname()[..];
|
||||
match memchr::memchr(b'\0', sectname) {
|
||||
Some(end) => §name[..end],
|
||||
None => sectname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `segname` bytes up until the null terminator.
|
||||
fn segment_name(&self) -> &[u8] {
|
||||
let segname = &self.segname()[..];
|
||||
match memchr::memchr(b'\0', segname) {
|
||||
Some(end) => &segname[..end],
|
||||
None => segname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the section in the file.
|
||||
///
|
||||
/// Returns `None` for sections that have no data in the file.
|
||||
fn file_range(&self, endian: Self::Endian) -> Option<(u64, u64)> {
|
||||
match self.flags(endian) & macho::SECTION_TYPE {
|
||||
macho::S_ZEROFILL | macho::S_GB_ZEROFILL | macho::S_THREAD_LOCAL_ZEROFILL => None,
|
||||
_ => Some((self.offset(endian).into(), self.size(endian).into())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section data.
|
||||
///
|
||||
/// Returns `Ok(&[])` if the section has no data.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> result::Result<&'data [u8], ()> {
|
||||
if let Some((offset, size)) = self.file_range(endian) {
|
||||
data.read_bytes_at(offset, size)
|
||||
} else {
|
||||
Ok(&[])
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the relocation array.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn relocations<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> Result<&'data [macho::Relocation<Self::Endian>]> {
|
||||
data.read_slice_at(self.reloff(endian).into(), self.nreloc(endian) as usize)
|
||||
.read_error("Invalid Mach-O relocations offset or number")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Section for macho::Section32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
fn sectname(&self) -> &[u8; 16] {
|
||||
&self.sectname
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn addr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.addr.get(endian)
|
||||
}
|
||||
fn size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.size.get(endian)
|
||||
}
|
||||
fn offset(&self, endian: Self::Endian) -> u32 {
|
||||
self.offset.get(endian)
|
||||
}
|
||||
fn align(&self, endian: Self::Endian) -> u32 {
|
||||
self.align.get(endian)
|
||||
}
|
||||
fn reloff(&self, endian: Self::Endian) -> u32 {
|
||||
self.reloff.get(endian)
|
||||
}
|
||||
fn nreloc(&self, endian: Self::Endian) -> u32 {
|
||||
self.nreloc.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Section for macho::Section64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
fn sectname(&self) -> &[u8; 16] {
|
||||
&self.sectname
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn addr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.addr.get(endian)
|
||||
}
|
||||
fn size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.size.get(endian)
|
||||
}
|
||||
fn offset(&self, endian: Self::Endian) -> u32 {
|
||||
self.offset.get(endian)
|
||||
}
|
||||
fn align(&self, endian: Self::Endian) -> u32 {
|
||||
self.align.get(endian)
|
||||
}
|
||||
fn reloff(&self, endian: Self::Endian) -> u32 {
|
||||
self.reloff.get(endian)
|
||||
}
|
||||
fn nreloc(&self, endian: Self::Endian) -> u32 {
|
||||
self.nreloc.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
303
vendor/object/src/read/macho/segment.rs
vendored
Normal file
303
vendor/object/src/read/macho/segment.rs
vendored
Normal file
@@ -0,0 +1,303 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{result, slice, str};
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{self, ObjectSegment, ReadError, ReadRef, Result, SegmentFlags};
|
||||
|
||||
use super::{LoadCommandData, MachHeader, MachOFile, Section};
|
||||
|
||||
/// An iterator for the segments in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSegmentIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegmentIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the segments in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSegmentIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegmentIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the segments in a [`MachOFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct MachOSegmentIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) iter: slice::Iter<'file, MachOSegmentInternal<'data, Mach, R>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOSegmentIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOSegment<'data, 'file, Mach, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|internal| MachOSegment {
|
||||
file: self.file,
|
||||
internal,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSegment32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegment<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A segment in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSegment64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegment<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A segment in a [`MachOFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOSegment<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
internal: &'file MachOSegmentInternal<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> MachOSegment<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.internal
|
||||
.segment
|
||||
.data(self.file.endian, self.file.data)
|
||||
.read_error("Invalid Mach-O segment size or offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSegment<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSegment<'data> for MachOSegment<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.internal.segment.vmaddr(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.internal.segment.vmsize(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
// Page size.
|
||||
0x1000
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
self.internal.segment.file_range(self.file.endian)
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(Some(self.internal.segment.name()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
Ok(Some(
|
||||
str::from_utf8(self.internal.segment.name())
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O segment name")?,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let flags = self.internal.segment.flags(self.file.endian);
|
||||
let maxprot = self.internal.segment.maxprot(self.file.endian);
|
||||
let initprot = self.internal.segment.initprot(self.file.endian);
|
||||
SegmentFlags::MachO {
|
||||
flags,
|
||||
maxprot,
|
||||
initprot,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct MachOSegmentInternal<'data, Mach: MachHeader, R: ReadRef<'data>> {
|
||||
pub data: R,
|
||||
pub segment: &'data Mach::Segment,
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::SegmentCommand32`] and [`macho::SegmentCommand64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Segment: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
type Section: Section<Endian = Self::Endian>;
|
||||
|
||||
fn from_command(command: LoadCommandData<'_, Self::Endian>) -> Result<Option<(&Self, &[u8])>>;
|
||||
|
||||
fn cmd(&self, endian: Self::Endian) -> u32;
|
||||
fn cmdsize(&self, endian: Self::Endian) -> u32;
|
||||
fn segname(&self) -> &[u8; 16];
|
||||
fn vmaddr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn vmsize(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn fileoff(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn filesize(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn maxprot(&self, endian: Self::Endian) -> u32;
|
||||
fn initprot(&self, endian: Self::Endian) -> u32;
|
||||
fn nsects(&self, endian: Self::Endian) -> u32;
|
||||
fn flags(&self, endian: Self::Endian) -> u32;
|
||||
|
||||
/// Return the `segname` bytes up until the null terminator.
|
||||
fn name(&self) -> &[u8] {
|
||||
let segname = &self.segname()[..];
|
||||
match memchr::memchr(b'\0', segname) {
|
||||
Some(end) => &segname[..end],
|
||||
None => segname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the segment in the file.
|
||||
fn file_range(&self, endian: Self::Endian) -> (u64, u64) {
|
||||
(self.fileoff(endian).into(), self.filesize(endian).into())
|
||||
}
|
||||
|
||||
/// Get the segment data from the file data.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> result::Result<&'data [u8], ()> {
|
||||
let (offset, size) = self.file_range(endian);
|
||||
data.read_bytes_at(offset, size)
|
||||
}
|
||||
|
||||
/// Get the array of sections from the data following the segment command.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
section_data: R,
|
||||
) -> Result<&'data [Self::Section]> {
|
||||
section_data
|
||||
.read_slice_at(0, self.nsects(endian) as usize)
|
||||
.read_error("Invalid Mach-O number of sections")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Segment for macho::SegmentCommand32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
type Section = macho::Section32<Self::Endian>;
|
||||
|
||||
fn from_command(command: LoadCommandData<'_, Self::Endian>) -> Result<Option<(&Self, &[u8])>> {
|
||||
command.segment_32()
|
||||
}
|
||||
|
||||
fn cmd(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmd.get(endian)
|
||||
}
|
||||
fn cmdsize(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmdsize.get(endian)
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn vmaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmaddr.get(endian)
|
||||
}
|
||||
fn vmsize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmsize.get(endian)
|
||||
}
|
||||
fn fileoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.fileoff.get(endian)
|
||||
}
|
||||
fn filesize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.filesize.get(endian)
|
||||
}
|
||||
fn maxprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.maxprot.get(endian)
|
||||
}
|
||||
fn initprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.initprot.get(endian)
|
||||
}
|
||||
fn nsects(&self, endian: Self::Endian) -> u32 {
|
||||
self.nsects.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Segment for macho::SegmentCommand64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
type Section = macho::Section64<Self::Endian>;
|
||||
|
||||
fn from_command(command: LoadCommandData<'_, Self::Endian>) -> Result<Option<(&Self, &[u8])>> {
|
||||
command.segment_64()
|
||||
}
|
||||
|
||||
fn cmd(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmd.get(endian)
|
||||
}
|
||||
fn cmdsize(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmdsize.get(endian)
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn vmaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmaddr.get(endian)
|
||||
}
|
||||
fn vmsize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmsize.get(endian)
|
||||
}
|
||||
fn fileoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.fileoff.get(endian)
|
||||
}
|
||||
fn filesize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.filesize.get(endian)
|
||||
}
|
||||
fn maxprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.maxprot.get(endian)
|
||||
}
|
||||
fn initprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.initprot.get(endian)
|
||||
}
|
||||
fn nsects(&self, endian: Self::Endian) -> u32 {
|
||||
self.nsects.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
492
vendor/object/src/read/macho/symbol.rs
vendored
Normal file
492
vendor/object/src/read/macho/symbol.rs
vendored
Normal file
@@ -0,0 +1,492 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::{fmt, slice, str};
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, ObjectMap, ObjectMapEntry, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Result,
|
||||
SectionIndex, SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolMap, SymbolMapEntry,
|
||||
SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
use super::{MachHeader, MachOFile};
|
||||
|
||||
/// A table of symbol entries in a Mach-O file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`macho::SymtabCommand::symbols`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SymbolTable<'data, Mach: MachHeader, R = &'data [u8]>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
symbols: &'data [Mach::Nlist],
|
||||
strings: StringTable<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, Mach: MachHeader, R: ReadRef<'data>> Default for SymbolTable<'data, Mach, R> {
|
||||
fn default() -> Self {
|
||||
SymbolTable {
|
||||
symbols: &[],
|
||||
strings: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Mach: MachHeader, R: ReadRef<'data>> SymbolTable<'data, Mach, R> {
|
||||
#[inline]
|
||||
pub(super) fn new(symbols: &'data [Mach::Nlist], strings: StringTable<'data, R>) -> Self {
|
||||
SymbolTable { symbols, strings }
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, Mach::Nlist> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbols.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
|
||||
/// Return the symbol at the given index.
|
||||
pub fn symbol(&self, index: usize) -> Result<&'data Mach::Nlist> {
|
||||
self.symbols
|
||||
.get(index)
|
||||
.read_error("Invalid Mach-O symbol index")
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to a user-defined map entry.
|
||||
pub fn map<Entry: SymbolMapEntry, F: Fn(&'data Mach::Nlist) -> Option<Entry>>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> SymbolMap<Entry> {
|
||||
let mut symbols = Vec::new();
|
||||
for nlist in self.symbols {
|
||||
if !nlist.is_definition() {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry) = f(nlist) {
|
||||
symbols.push(entry);
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to symbol names and object file names.
|
||||
pub fn object_map(&self, endian: Mach::Endian) -> ObjectMap<'data> {
|
||||
let mut symbols = Vec::new();
|
||||
let mut objects = Vec::new();
|
||||
let mut object = None;
|
||||
let mut current_function = None;
|
||||
// Each module starts with one or two N_SO symbols (path, or directory + filename)
|
||||
// and one N_OSO symbol. The module is terminated by an empty N_SO symbol.
|
||||
for nlist in self.symbols {
|
||||
let n_type = nlist.n_type();
|
||||
if n_type & macho::N_STAB == 0 {
|
||||
continue;
|
||||
}
|
||||
// TODO: includes variables too (N_GSYM, N_STSYM). These may need to get their
|
||||
// address from regular symbols though.
|
||||
match n_type {
|
||||
macho::N_SO => {
|
||||
object = None;
|
||||
}
|
||||
macho::N_OSO => {
|
||||
object = None;
|
||||
if let Ok(name) = nlist.name(endian, self.strings) {
|
||||
if !name.is_empty() {
|
||||
object = Some(objects.len());
|
||||
objects.push(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
macho::N_FUN => {
|
||||
if let Ok(name) = nlist.name(endian, self.strings) {
|
||||
if !name.is_empty() {
|
||||
current_function = Some((name, nlist.n_value(endian).into()))
|
||||
} else if let Some((name, address)) = current_function.take() {
|
||||
if let Some(object) = object {
|
||||
symbols.push(ObjectMapEntry {
|
||||
address,
|
||||
size: nlist.n_value(endian).into(),
|
||||
name,
|
||||
object,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
ObjectMap {
|
||||
symbols: SymbolMap::new(symbols),
|
||||
objects,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSymbolTable32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolTable<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A symbol table in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSymbolTable64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolTable<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol table in a [`MachOFile`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct MachOSymbolTable<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSymbolTable<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSymbolTable<'data> for MachOSymbolTable<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Symbol = MachOSymbol<'data, 'file, Mach, R>;
|
||||
type SymbolIterator = MachOSymbolIterator<'data, 'file, Mach, R>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
MachOSymbolIterator {
|
||||
file: self.file,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol> {
|
||||
let nlist = self.file.symbols.symbol(index.0)?;
|
||||
MachOSymbol::new(self.file, index, nlist).read_error("Unsupported Mach-O symbol index")
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSymbolIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the symbols in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSymbolIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the symbols in a [`MachOFile`].
|
||||
pub struct MachOSymbolIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> fmt::Debug for MachOSymbolIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("MachOSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOSymbolIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOSymbol<'data, 'file, Mach, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let index = self.index;
|
||||
let nlist = self.file.symbols.symbols.get(index)?;
|
||||
self.index += 1;
|
||||
if let Some(symbol) = MachOSymbol::new(self.file, SymbolIndex(index), nlist) {
|
||||
return Some(symbol);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSymbol32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbol<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A symbol in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSymbol64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbol<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol in a [`MachOFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct MachOSymbol<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
index: SymbolIndex,
|
||||
nlist: &'data Mach::Nlist,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> MachOSymbol<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) fn new(
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
index: SymbolIndex,
|
||||
nlist: &'data Mach::Nlist,
|
||||
) -> Option<Self> {
|
||||
if nlist.n_type() & macho::N_STAB != 0 {
|
||||
return None;
|
||||
}
|
||||
Some(MachOSymbol { file, index, nlist })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSymbol<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSymbol<'data> for MachOSymbol<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> Result<&'data [u8]> {
|
||||
self.nlist.name(self.file.endian, self.file.symbols.strings)
|
||||
}
|
||||
|
||||
fn name(&self) -> Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O symbol name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.nlist.n_value(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
self.section()
|
||||
.index()
|
||||
.and_then(|index| self.file.section_internal(index).ok())
|
||||
.map(|section| match section.kind {
|
||||
SectionKind::Text => SymbolKind::Text,
|
||||
SectionKind::Data
|
||||
| SectionKind::ReadOnlyData
|
||||
| SectionKind::ReadOnlyString
|
||||
| SectionKind::UninitializedData
|
||||
| SectionKind::Common => SymbolKind::Data,
|
||||
SectionKind::Tls | SectionKind::UninitializedTls | SectionKind::TlsVariables => {
|
||||
SymbolKind::Tls
|
||||
}
|
||||
_ => SymbolKind::Unknown,
|
||||
})
|
||||
.unwrap_or(SymbolKind::Unknown)
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.nlist.n_type() & macho::N_TYPE {
|
||||
macho::N_UNDF => SymbolSection::Undefined,
|
||||
macho::N_ABS => SymbolSection::Absolute,
|
||||
macho::N_SECT => {
|
||||
let n_sect = self.nlist.n_sect();
|
||||
if n_sect != 0 {
|
||||
SymbolSection::Section(SectionIndex(n_sect as usize))
|
||||
} else {
|
||||
SymbolSection::Unknown
|
||||
}
|
||||
}
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.nlist.n_type() & macho::N_TYPE == macho::N_UNDF
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
self.nlist.is_definition()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
// Mach-O common symbols are based on section, not symbol
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.nlist.n_desc(self.file.endian) & (macho::N_WEAK_REF | macho::N_WEAK_DEF) != 0
|
||||
}
|
||||
|
||||
fn scope(&self) -> SymbolScope {
|
||||
let n_type = self.nlist.n_type();
|
||||
if n_type & macho::N_TYPE == macho::N_UNDF {
|
||||
SymbolScope::Unknown
|
||||
} else if n_type & macho::N_EXT == 0 {
|
||||
SymbolScope::Compilation
|
||||
} else if n_type & macho::N_PEXT != 0 {
|
||||
SymbolScope::Linkage
|
||||
} else {
|
||||
SymbolScope::Dynamic
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
self.scope() != SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
self.scope() == SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
let n_desc = self.nlist.n_desc(self.file.endian);
|
||||
SymbolFlags::MachO { n_desc }
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::Nlist32`] and [`macho::Nlist64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Nlist: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn n_strx(&self, endian: Self::Endian) -> u32;
|
||||
fn n_type(&self) -> u8;
|
||||
fn n_sect(&self) -> u8;
|
||||
fn n_desc(&self, endian: Self::Endian) -> u16;
|
||||
fn n_value(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.n_strx(endian))
|
||||
.read_error("Invalid Mach-O symbol name offset")
|
||||
}
|
||||
|
||||
/// Return true if this is a STAB symbol.
|
||||
///
|
||||
/// This determines the meaning of the `n_type` field.
|
||||
fn is_stab(&self) -> bool {
|
||||
self.n_type() & macho::N_STAB != 0
|
||||
}
|
||||
|
||||
/// Return true if this is an undefined symbol.
|
||||
fn is_undefined(&self) -> bool {
|
||||
let n_type = self.n_type();
|
||||
n_type & macho::N_STAB == 0 && n_type & macho::N_TYPE == macho::N_UNDF
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
fn is_definition(&self) -> bool {
|
||||
let n_type = self.n_type();
|
||||
n_type & macho::N_STAB == 0 && n_type & macho::N_TYPE == macho::N_SECT
|
||||
}
|
||||
|
||||
/// Return the library ordinal.
|
||||
///
|
||||
/// This is either a 1-based index into the dylib load commands,
|
||||
/// or a special ordinal.
|
||||
#[inline]
|
||||
fn library_ordinal(&self, endian: Self::Endian) -> u8 {
|
||||
(self.n_desc(endian) >> 8) as u8
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Nlist for macho::Nlist32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
fn n_strx(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_strx.get(endian)
|
||||
}
|
||||
fn n_type(&self) -> u8 {
|
||||
self.n_type
|
||||
}
|
||||
fn n_sect(&self) -> u8 {
|
||||
self.n_sect
|
||||
}
|
||||
fn n_desc(&self, endian: Self::Endian) -> u16 {
|
||||
self.n_desc.get(endian)
|
||||
}
|
||||
fn n_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.n_value.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Nlist for macho::Nlist64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
fn n_strx(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_strx.get(endian)
|
||||
}
|
||||
fn n_type(&self) -> u8 {
|
||||
self.n_type
|
||||
}
|
||||
fn n_sect(&self) -> u8 {
|
||||
self.n_sect
|
||||
}
|
||||
fn n_desc(&self, endian: Self::Endian) -> u16 {
|
||||
self.n_desc.get(endian)
|
||||
}
|
||||
fn n_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.n_value.get(endian)
|
||||
}
|
||||
}
|
||||
860
vendor/object/src/read/mod.rs
vendored
Normal file
860
vendor/object/src/read/mod.rs
vendored
Normal file
@@ -0,0 +1,860 @@
|
||||
//! Interface for reading object files.
|
||||
//!
|
||||
//! ## Unified read API
|
||||
//!
|
||||
//! The [`Object`] trait provides a unified read API for accessing common features of
|
||||
//! object files, such as sections and symbols. There is an implementation of this
|
||||
//! trait for [`File`], which allows reading any file format, as well as implementations
|
||||
//! for each file format:
|
||||
//! [`ElfFile`](elf::ElfFile), [`MachOFile`](macho::MachOFile), [`CoffFile`](coff::CoffFile),
|
||||
//! [`PeFile`](pe::PeFile), [`WasmFile`](wasm::WasmFile), [`XcoffFile`](xcoff::XcoffFile).
|
||||
//!
|
||||
//! ## Low level read API
|
||||
//!
|
||||
//! The submodules for each file format define helpers that operate on the raw structs.
|
||||
//! These can be used instead of the unified API, or in conjunction with it to access
|
||||
//! details that are not available via the unified API.
|
||||
//!
|
||||
//! See the [submodules](#modules) for examples of the low level read API.
|
||||
//!
|
||||
//! ## Naming Convention
|
||||
//!
|
||||
//! Types that form part of the unified API for a file format are prefixed with the
|
||||
//! name of the file format.
|
||||
//!
|
||||
//! ## Example for unified read API
|
||||
//! ```no_run
|
||||
//! use object::{Object, ObjectSection};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let file = object::File::parse(&*data)?;
|
||||
//! for section in file.sections() {
|
||||
//! println!("{}", section.name()?);
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::vec::Vec;
|
||||
use core::{fmt, result};
|
||||
|
||||
use crate::common::*;
|
||||
|
||||
mod read_ref;
|
||||
pub use read_ref::*;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod read_cache;
|
||||
#[cfg(feature = "std")]
|
||||
pub use read_cache::*;
|
||||
|
||||
mod util;
|
||||
pub use util::*;
|
||||
|
||||
#[cfg(any(
|
||||
feature = "coff",
|
||||
feature = "elf",
|
||||
feature = "macho",
|
||||
feature = "pe",
|
||||
feature = "wasm",
|
||||
feature = "xcoff"
|
||||
))]
|
||||
mod any;
|
||||
#[cfg(any(
|
||||
feature = "coff",
|
||||
feature = "elf",
|
||||
feature = "macho",
|
||||
feature = "pe",
|
||||
feature = "wasm",
|
||||
feature = "xcoff"
|
||||
))]
|
||||
pub use any::*;
|
||||
|
||||
#[cfg(feature = "archive")]
|
||||
pub mod archive;
|
||||
|
||||
#[cfg(feature = "coff")]
|
||||
pub mod coff;
|
||||
|
||||
#[cfg(feature = "elf")]
|
||||
pub mod elf;
|
||||
|
||||
#[cfg(feature = "macho")]
|
||||
pub mod macho;
|
||||
|
||||
#[cfg(feature = "pe")]
|
||||
pub mod pe;
|
||||
|
||||
#[cfg(feature = "wasm")]
|
||||
pub mod wasm;
|
||||
|
||||
#[cfg(feature = "xcoff")]
|
||||
pub mod xcoff;
|
||||
|
||||
mod traits;
|
||||
pub use traits::*;
|
||||
|
||||
mod private {
|
||||
pub trait Sealed {}
|
||||
}
|
||||
|
||||
/// The error type used within the read module.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Error(&'static str);
|
||||
|
||||
impl fmt::Display for Error {
|
||||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
/// The result type used within the read module.
|
||||
pub type Result<T> = result::Result<T, Error>;
|
||||
|
||||
trait ReadError<T> {
|
||||
fn read_error(self, error: &'static str) -> Result<T>;
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for result::Result<T, ()> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.map_err(|()| Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for result::Result<T, Error> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.map_err(|_| Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for Option<T> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.ok_or(Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(
|
||||
unix,
|
||||
not(target_os = "macos"),
|
||||
target_pointer_width = "32",
|
||||
feature = "elf"
|
||||
))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = elf::ElfFile32<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(
|
||||
unix,
|
||||
not(target_os = "macos"),
|
||||
target_pointer_width = "64",
|
||||
feature = "elf"
|
||||
))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = elf::ElfFile64<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "macos", target_pointer_width = "32", feature = "macho"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = macho::MachOFile32<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "macos", target_pointer_width = "64", feature = "macho"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = macho::MachOFile64<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "windows", target_pointer_width = "32", feature = "pe"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = pe::PeFile32<'data, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "windows", target_pointer_width = "64", feature = "pe"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = pe::PeFile64<'data, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(feature = "wasm", target_arch = "wasm32", feature = "wasm"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = wasm::WasmFile<'data, R>;
|
||||
|
||||
/// A file format kind.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum FileKind {
|
||||
/// A Unix archive.
|
||||
///
|
||||
/// See [`archive::ArchiveFile`].
|
||||
#[cfg(feature = "archive")]
|
||||
Archive,
|
||||
/// A COFF object file.
|
||||
///
|
||||
/// See [`coff::CoffFile`].
|
||||
#[cfg(feature = "coff")]
|
||||
Coff,
|
||||
/// A COFF bigobj object file.
|
||||
///
|
||||
/// This supports a larger number of sections.
|
||||
///
|
||||
/// See [`coff::CoffBigFile`].
|
||||
#[cfg(feature = "coff")]
|
||||
CoffBig,
|
||||
/// A Windows short import file.
|
||||
///
|
||||
/// See [`coff::ImportFile`].
|
||||
#[cfg(feature = "coff")]
|
||||
CoffImport,
|
||||
/// A dyld cache file containing Mach-O images.
|
||||
///
|
||||
/// See [`macho::DyldCache`]
|
||||
#[cfg(feature = "macho")]
|
||||
DyldCache,
|
||||
/// A 32-bit ELF file.
|
||||
///
|
||||
/// See [`elf::ElfFile32`].
|
||||
#[cfg(feature = "elf")]
|
||||
Elf32,
|
||||
/// A 64-bit ELF file.
|
||||
///
|
||||
/// See [`elf::ElfFile64`].
|
||||
#[cfg(feature = "elf")]
|
||||
Elf64,
|
||||
/// A 32-bit Mach-O file.
|
||||
///
|
||||
/// See [`macho::MachOFile32`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachO32,
|
||||
/// A 64-bit Mach-O file.
|
||||
///
|
||||
/// See [`macho::MachOFile64`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachO64,
|
||||
/// A 32-bit Mach-O fat binary.
|
||||
///
|
||||
/// See [`macho::FatHeader::parse_arch32`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachOFat32,
|
||||
/// A 64-bit Mach-O fat binary.
|
||||
///
|
||||
/// See [`macho::FatHeader::parse_arch64`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachOFat64,
|
||||
/// A 32-bit PE file.
|
||||
///
|
||||
/// See [`pe::PeFile32`].
|
||||
#[cfg(feature = "pe")]
|
||||
Pe32,
|
||||
/// A 64-bit PE file.
|
||||
///
|
||||
/// See [`pe::PeFile64`].
|
||||
#[cfg(feature = "pe")]
|
||||
Pe64,
|
||||
/// A Wasm file.
|
||||
///
|
||||
/// See [`wasm::WasmFile`].
|
||||
#[cfg(feature = "wasm")]
|
||||
Wasm,
|
||||
/// A 32-bit XCOFF file.
|
||||
///
|
||||
/// See [`xcoff::XcoffFile32`].
|
||||
#[cfg(feature = "xcoff")]
|
||||
Xcoff32,
|
||||
/// A 64-bit XCOFF file.
|
||||
///
|
||||
/// See [`xcoff::XcoffFile64`].
|
||||
#[cfg(feature = "xcoff")]
|
||||
Xcoff64,
|
||||
}
|
||||
|
||||
impl FileKind {
|
||||
/// Determine a file kind by parsing the start of the file.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(data: R) -> Result<FileKind> {
|
||||
Self::parse_at(data, 0)
|
||||
}
|
||||
|
||||
/// Determine a file kind by parsing at the given offset.
|
||||
pub fn parse_at<'data, R: ReadRef<'data>>(data: R, offset: u64) -> Result<FileKind> {
|
||||
let magic = data
|
||||
.read_bytes_at(offset, 16)
|
||||
.read_error("Could not read file magic")?;
|
||||
if magic.len() < 16 {
|
||||
return Err(Error("File too short"));
|
||||
}
|
||||
|
||||
let kind = match [magic[0], magic[1], magic[2], magic[3], magic[4], magic[5], magic[6], magic[7]] {
|
||||
#[cfg(feature = "archive")]
|
||||
[b'!', b'<', b'a', b'r', b'c', b'h', b'>', b'\n'] => FileKind::Archive,
|
||||
#[cfg(feature = "macho")]
|
||||
[b'd', b'y', b'l', b'd', b'_', b'v', b'1', b' '] => FileKind::DyldCache,
|
||||
#[cfg(feature = "elf")]
|
||||
[0x7f, b'E', b'L', b'F', 1, ..] => FileKind::Elf32,
|
||||
#[cfg(feature = "elf")]
|
||||
[0x7f, b'E', b'L', b'F', 2, ..] => FileKind::Elf64,
|
||||
#[cfg(feature = "macho")]
|
||||
[0xfe, 0xed, 0xfa, 0xce, ..]
|
||||
| [0xce, 0xfa, 0xed, 0xfe, ..] => FileKind::MachO32,
|
||||
#[cfg(feature = "macho")]
|
||||
| [0xfe, 0xed, 0xfa, 0xcf, ..]
|
||||
| [0xcf, 0xfa, 0xed, 0xfe, ..] => FileKind::MachO64,
|
||||
#[cfg(feature = "macho")]
|
||||
[0xca, 0xfe, 0xba, 0xbe, ..] => FileKind::MachOFat32,
|
||||
#[cfg(feature = "macho")]
|
||||
[0xca, 0xfe, 0xba, 0xbf, ..] => FileKind::MachOFat64,
|
||||
#[cfg(feature = "wasm")]
|
||||
[0x00, b'a', b's', b'm', ..] => FileKind::Wasm,
|
||||
#[cfg(feature = "pe")]
|
||||
[b'M', b'Z', ..] if offset == 0 => {
|
||||
// offset == 0 restriction is because optional_header_magic only looks at offset 0
|
||||
match pe::optional_header_magic(data) {
|
||||
Ok(crate::pe::IMAGE_NT_OPTIONAL_HDR32_MAGIC) => {
|
||||
FileKind::Pe32
|
||||
}
|
||||
Ok(crate::pe::IMAGE_NT_OPTIONAL_HDR64_MAGIC) => {
|
||||
FileKind::Pe64
|
||||
}
|
||||
_ => return Err(Error("Unknown MS-DOS file")),
|
||||
}
|
||||
}
|
||||
// TODO: more COFF machines
|
||||
#[cfg(feature = "coff")]
|
||||
// COFF arm
|
||||
[0xc4, 0x01, ..]
|
||||
// COFF arm64
|
||||
| [0x64, 0xaa, ..]
|
||||
// COFF arm64ec
|
||||
| [0x41, 0xa6, ..]
|
||||
// COFF x86
|
||||
| [0x4c, 0x01, ..]
|
||||
// COFF x86-64
|
||||
| [0x64, 0x86, ..] => FileKind::Coff,
|
||||
#[cfg(feature = "coff")]
|
||||
[0x00, 0x00, 0xff, 0xff, 0x00, 0x00, ..] => FileKind::CoffImport,
|
||||
#[cfg(feature = "coff")]
|
||||
[0x00, 0x00, 0xff, 0xff, 0x02, 0x00, ..] if offset == 0 => {
|
||||
// offset == 0 restriction is because anon_object_class_id only looks at offset 0
|
||||
match coff::anon_object_class_id(data) {
|
||||
Ok(crate::pe::ANON_OBJECT_HEADER_BIGOBJ_CLASS_ID) => FileKind::CoffBig,
|
||||
_ => return Err(Error("Unknown anon object file")),
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "xcoff")]
|
||||
[0x01, 0xdf, ..] => FileKind::Xcoff32,
|
||||
#[cfg(feature = "xcoff")]
|
||||
[0x01, 0xf7, ..] => FileKind::Xcoff64,
|
||||
_ => return Err(Error("Unknown file magic")),
|
||||
};
|
||||
Ok(kind)
|
||||
}
|
||||
}
|
||||
|
||||
/// An object kind.
|
||||
///
|
||||
/// Returned by [`Object::kind`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum ObjectKind {
|
||||
/// The object kind is unknown.
|
||||
Unknown,
|
||||
/// Relocatable object.
|
||||
Relocatable,
|
||||
/// Executable.
|
||||
Executable,
|
||||
/// Dynamic shared object.
|
||||
Dynamic,
|
||||
/// Core.
|
||||
Core,
|
||||
}
|
||||
|
||||
/// The index used to identify a section in a file.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SectionIndex(pub usize);
|
||||
|
||||
/// The index used to identify a symbol in a symbol table.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SymbolIndex(pub usize);
|
||||
|
||||
/// The section where an [`ObjectSymbol`] is defined.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SymbolSection {
|
||||
/// The section is unknown.
|
||||
Unknown,
|
||||
/// The section is not applicable for this symbol (such as file symbols).
|
||||
None,
|
||||
/// The symbol is undefined.
|
||||
Undefined,
|
||||
/// The symbol has an absolute value.
|
||||
Absolute,
|
||||
/// The symbol is a zero-initialized symbol that will be combined with duplicate definitions.
|
||||
Common,
|
||||
/// The symbol is defined in the given section.
|
||||
Section(SectionIndex),
|
||||
}
|
||||
|
||||
impl SymbolSection {
|
||||
/// Returns the section index for the section where the symbol is defined.
|
||||
///
|
||||
/// May return `None` if the symbol is not defined in a section.
|
||||
#[inline]
|
||||
pub fn index(self) -> Option<SectionIndex> {
|
||||
if let SymbolSection::Section(index) = self {
|
||||
Some(index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An entry in a [`SymbolMap`].
|
||||
pub trait SymbolMapEntry {
|
||||
/// The symbol address.
|
||||
fn address(&self) -> u64;
|
||||
}
|
||||
|
||||
/// A map from addresses to symbol information.
|
||||
///
|
||||
/// The symbol information depends on the chosen entry type, such as [`SymbolMapName`].
|
||||
///
|
||||
/// Returned by [`Object::symbol_map`].
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct SymbolMap<T: SymbolMapEntry> {
|
||||
symbols: Vec<T>,
|
||||
}
|
||||
|
||||
impl<T: SymbolMapEntry> SymbolMap<T> {
|
||||
/// Construct a new symbol map.
|
||||
///
|
||||
/// This function will sort the symbols by address.
|
||||
pub fn new(mut symbols: Vec<T>) -> Self {
|
||||
symbols.sort_by_key(|s| s.address());
|
||||
SymbolMap { symbols }
|
||||
}
|
||||
|
||||
/// Get the symbol before the given address.
|
||||
pub fn get(&self, address: u64) -> Option<&T> {
|
||||
let index = match self
|
||||
.symbols
|
||||
.binary_search_by_key(&address, |symbol| symbol.address())
|
||||
{
|
||||
Ok(index) => index,
|
||||
Err(index) => index.checked_sub(1)?,
|
||||
};
|
||||
self.symbols.get(index)
|
||||
}
|
||||
|
||||
/// Get all symbols in the map.
|
||||
#[inline]
|
||||
pub fn symbols(&self) -> &[T] {
|
||||
&self.symbols
|
||||
}
|
||||
}
|
||||
|
||||
/// The type used for entries in a [`SymbolMap`] that maps from addresses to names.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SymbolMapName<'data> {
|
||||
address: u64,
|
||||
name: &'data str,
|
||||
}
|
||||
|
||||
impl<'data> SymbolMapName<'data> {
|
||||
/// Construct a `SymbolMapName`.
|
||||
pub fn new(address: u64, name: &'data str) -> Self {
|
||||
SymbolMapName { address, name }
|
||||
}
|
||||
|
||||
/// The symbol address.
|
||||
#[inline]
|
||||
pub fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
|
||||
/// The symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data str {
|
||||
self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> SymbolMapEntry for SymbolMapName<'data> {
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// A map from addresses to symbol names and object files.
|
||||
///
|
||||
/// This is derived from STAB entries in Mach-O files.
|
||||
///
|
||||
/// Returned by [`Object::object_map`].
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct ObjectMap<'data> {
|
||||
symbols: SymbolMap<ObjectMapEntry<'data>>,
|
||||
objects: Vec<&'data [u8]>,
|
||||
}
|
||||
|
||||
impl<'data> ObjectMap<'data> {
|
||||
/// Get the entry containing the given address.
|
||||
pub fn get(&self, address: u64) -> Option<&ObjectMapEntry<'data>> {
|
||||
self.symbols
|
||||
.get(address)
|
||||
.filter(|entry| entry.size == 0 || address.wrapping_sub(entry.address) < entry.size)
|
||||
}
|
||||
|
||||
/// Get all symbols in the map.
|
||||
#[inline]
|
||||
pub fn symbols(&self) -> &[ObjectMapEntry<'data>] {
|
||||
self.symbols.symbols()
|
||||
}
|
||||
|
||||
/// Get all objects in the map.
|
||||
#[inline]
|
||||
pub fn objects(&self) -> &[&'data [u8]] {
|
||||
&self.objects
|
||||
}
|
||||
}
|
||||
|
||||
/// An [`ObjectMap`] entry.
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ObjectMapEntry<'data> {
|
||||
address: u64,
|
||||
size: u64,
|
||||
name: &'data [u8],
|
||||
object: usize,
|
||||
}
|
||||
|
||||
impl<'data> ObjectMapEntry<'data> {
|
||||
/// Get the symbol address.
|
||||
#[inline]
|
||||
pub fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
|
||||
/// Get the symbol size.
|
||||
///
|
||||
/// This may be 0 if the size is unknown.
|
||||
#[inline]
|
||||
pub fn size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
/// Get the symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Get the index of the object file name.
|
||||
#[inline]
|
||||
pub fn object_index(&self) -> usize {
|
||||
self.object
|
||||
}
|
||||
|
||||
/// Get the object file name.
|
||||
#[inline]
|
||||
pub fn object(&self, map: &ObjectMap<'data>) -> &'data [u8] {
|
||||
map.objects[self.object]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> SymbolMapEntry for ObjectMapEntry<'data> {
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// An imported symbol.
|
||||
///
|
||||
/// Returned by [`Object::imports`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Import<'data> {
|
||||
library: ByteString<'data>,
|
||||
// TODO: or ordinal
|
||||
name: ByteString<'data>,
|
||||
}
|
||||
|
||||
impl<'data> Import<'data> {
|
||||
/// The symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name.0
|
||||
}
|
||||
|
||||
/// The name of the library to import the symbol from.
|
||||
#[inline]
|
||||
pub fn library(&self) -> &'data [u8] {
|
||||
self.library.0
|
||||
}
|
||||
}
|
||||
|
||||
/// An exported symbol.
|
||||
///
|
||||
/// Returned by [`Object::exports`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Export<'data> {
|
||||
// TODO: and ordinal?
|
||||
name: ByteString<'data>,
|
||||
address: u64,
|
||||
}
|
||||
|
||||
impl<'data> Export<'data> {
|
||||
/// The symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name.0
|
||||
}
|
||||
|
||||
/// The virtual address of the symbol.
|
||||
#[inline]
|
||||
pub fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// PDB information from the debug directory in a PE file.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct CodeView<'data> {
|
||||
guid: [u8; 16],
|
||||
path: ByteString<'data>,
|
||||
age: u32,
|
||||
}
|
||||
|
||||
impl<'data> CodeView<'data> {
|
||||
/// The path to the PDB as stored in CodeView.
|
||||
#[inline]
|
||||
pub fn path(&self) -> &'data [u8] {
|
||||
self.path.0
|
||||
}
|
||||
|
||||
/// The age of the PDB.
|
||||
#[inline]
|
||||
pub fn age(&self) -> u32 {
|
||||
self.age
|
||||
}
|
||||
|
||||
/// The GUID of the PDB.
|
||||
#[inline]
|
||||
pub fn guid(&self) -> [u8; 16] {
|
||||
self.guid
|
||||
}
|
||||
}
|
||||
|
||||
/// The target referenced by a [`Relocation`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum RelocationTarget {
|
||||
/// The target is a symbol.
|
||||
Symbol(SymbolIndex),
|
||||
/// The target is a section.
|
||||
Section(SectionIndex),
|
||||
/// The offset is an absolute address.
|
||||
Absolute,
|
||||
}
|
||||
|
||||
/// A relocation entry.
|
||||
///
|
||||
/// Returned by [`Object::dynamic_relocations`] or [`ObjectSection::relocations`].
|
||||
#[derive(Debug)]
|
||||
pub struct Relocation {
|
||||
kind: RelocationKind,
|
||||
encoding: RelocationEncoding,
|
||||
size: u8,
|
||||
target: RelocationTarget,
|
||||
addend: i64,
|
||||
implicit_addend: bool,
|
||||
}
|
||||
|
||||
impl Relocation {
|
||||
/// The operation used to calculate the result of the relocation.
|
||||
#[inline]
|
||||
pub fn kind(&self) -> RelocationKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
/// Information about how the result of the relocation operation is encoded in the place.
|
||||
#[inline]
|
||||
pub fn encoding(&self) -> RelocationEncoding {
|
||||
self.encoding
|
||||
}
|
||||
|
||||
/// The size in bits of the place of the relocation.
|
||||
///
|
||||
/// If 0, then the size is determined by the relocation kind.
|
||||
#[inline]
|
||||
pub fn size(&self) -> u8 {
|
||||
self.size
|
||||
}
|
||||
|
||||
/// The target of the relocation.
|
||||
#[inline]
|
||||
pub fn target(&self) -> RelocationTarget {
|
||||
self.target
|
||||
}
|
||||
|
||||
/// The addend to use in the relocation calculation.
|
||||
#[inline]
|
||||
pub fn addend(&self) -> i64 {
|
||||
self.addend
|
||||
}
|
||||
|
||||
/// Set the addend to use in the relocation calculation.
|
||||
#[inline]
|
||||
pub fn set_addend(&mut self, addend: i64) {
|
||||
self.addend = addend
|
||||
}
|
||||
|
||||
/// Returns true if there is an implicit addend stored in the data at the offset
|
||||
/// to be relocated.
|
||||
#[inline]
|
||||
pub fn has_implicit_addend(&self) -> bool {
|
||||
self.implicit_addend
|
||||
}
|
||||
}
|
||||
|
||||
/// A data compression format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum CompressionFormat {
|
||||
/// The data is uncompressed.
|
||||
None,
|
||||
/// The data is compressed, but the compression format is unknown.
|
||||
Unknown,
|
||||
/// ZLIB/DEFLATE.
|
||||
///
|
||||
/// Used for ELF compression and GNU compressed debug information.
|
||||
Zlib,
|
||||
/// Zstandard.
|
||||
///
|
||||
/// Used for ELF compression.
|
||||
Zstandard,
|
||||
}
|
||||
|
||||
/// A range in a file that may be compressed.
|
||||
///
|
||||
/// Returned by [`ObjectSection::compressed_file_range`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct CompressedFileRange {
|
||||
/// The data compression format.
|
||||
pub format: CompressionFormat,
|
||||
/// The file offset of the compressed data.
|
||||
pub offset: u64,
|
||||
/// The compressed data size.
|
||||
pub compressed_size: u64,
|
||||
/// The uncompressed data size.
|
||||
pub uncompressed_size: u64,
|
||||
}
|
||||
|
||||
impl CompressedFileRange {
|
||||
/// Data that is uncompressed.
|
||||
#[inline]
|
||||
pub fn none(range: Option<(u64, u64)>) -> Self {
|
||||
if let Some((offset, size)) = range {
|
||||
CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
offset,
|
||||
compressed_size: size,
|
||||
uncompressed_size: size,
|
||||
}
|
||||
} else {
|
||||
CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
offset: 0,
|
||||
compressed_size: 0,
|
||||
uncompressed_size: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to [`CompressedData`] by reading from the file.
|
||||
pub fn data<'data, R: ReadRef<'data>>(self, file: R) -> Result<CompressedData<'data>> {
|
||||
let data = file
|
||||
.read_bytes_at(self.offset, self.compressed_size)
|
||||
.read_error("Invalid compressed data size or offset")?;
|
||||
Ok(CompressedData {
|
||||
format: self.format,
|
||||
data,
|
||||
uncompressed_size: self.uncompressed_size,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Data that may be compressed.
|
||||
///
|
||||
/// Returned by [`ObjectSection::compressed_data`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct CompressedData<'data> {
|
||||
/// The data compression format.
|
||||
pub format: CompressionFormat,
|
||||
/// The compressed data.
|
||||
pub data: &'data [u8],
|
||||
/// The uncompressed data size.
|
||||
pub uncompressed_size: u64,
|
||||
}
|
||||
|
||||
impl<'data> CompressedData<'data> {
|
||||
/// Data that is uncompressed.
|
||||
#[inline]
|
||||
pub fn none(data: &'data [u8]) -> Self {
|
||||
CompressedData {
|
||||
format: CompressionFormat::None,
|
||||
data,
|
||||
uncompressed_size: data.len() as u64,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the uncompressed data.
|
||||
///
|
||||
/// Returns an error for invalid data or unsupported compression.
|
||||
/// This includes if the data is compressed but the `compression` feature
|
||||
/// for this crate is disabled.
|
||||
pub fn decompress(self) -> Result<Cow<'data, [u8]>> {
|
||||
match self.format {
|
||||
CompressionFormat::None => Ok(Cow::Borrowed(self.data)),
|
||||
#[cfg(feature = "compression")]
|
||||
CompressionFormat::Zlib => {
|
||||
use core::convert::TryInto;
|
||||
let size = self
|
||||
.uncompressed_size
|
||||
.try_into()
|
||||
.ok()
|
||||
.read_error("Uncompressed data size is too large.")?;
|
||||
let mut decompressed = Vec::with_capacity(size);
|
||||
let mut decompress = flate2::Decompress::new(true);
|
||||
decompress
|
||||
.decompress_vec(
|
||||
self.data,
|
||||
&mut decompressed,
|
||||
flate2::FlushDecompress::Finish,
|
||||
)
|
||||
.ok()
|
||||
.read_error("Invalid zlib compressed data")?;
|
||||
Ok(Cow::Owned(decompressed))
|
||||
}
|
||||
#[cfg(feature = "compression")]
|
||||
CompressionFormat::Zstandard => {
|
||||
use core::convert::TryInto;
|
||||
use std::io::Read;
|
||||
let size = self
|
||||
.uncompressed_size
|
||||
.try_into()
|
||||
.ok()
|
||||
.read_error("Uncompressed data size is too large.")?;
|
||||
let mut decompressed = Vec::with_capacity(size);
|
||||
let mut decoder = ruzstd::StreamingDecoder::new(self.data)
|
||||
.ok()
|
||||
.read_error("Invalid zstd compressed data")?;
|
||||
decoder
|
||||
.read_to_end(&mut decompressed)
|
||||
.ok()
|
||||
.read_error("Invalid zstd compressed data")?;
|
||||
Ok(Cow::Owned(decompressed))
|
||||
}
|
||||
_ => Err(Error("Unsupported compressed data.")),
|
||||
}
|
||||
}
|
||||
}
|
||||
213
vendor/object/src/read/pe/data_directory.rs
vendored
Normal file
213
vendor/object/src/read/pe/data_directory.rs
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
use core::slice;
|
||||
|
||||
use crate::read::{Error, ReadError, ReadRef, Result};
|
||||
use crate::{pe, LittleEndian as LE};
|
||||
|
||||
use super::{
|
||||
DelayLoadImportTable, ExportTable, ImportTable, RelocationBlockIterator, ResourceDirectory,
|
||||
SectionTable,
|
||||
};
|
||||
|
||||
/// The table of data directories in a PE file.
|
||||
///
|
||||
/// Returned by [`ImageNtHeaders::parse`](super::ImageNtHeaders::parse).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct DataDirectories<'data> {
|
||||
entries: &'data [pe::ImageDataDirectory],
|
||||
}
|
||||
|
||||
impl<'data> DataDirectories<'data> {
|
||||
/// Parse the data directory table.
|
||||
///
|
||||
/// `data` must be the remaining optional data following the
|
||||
/// [optional header](pe::ImageOptionalHeader64). `number` must be from the
|
||||
/// [`number_of_rva_and_sizes`](pe::ImageOptionalHeader64::number_of_rva_and_sizes)
|
||||
/// field of the optional header.
|
||||
pub fn parse(data: &'data [u8], number: u32) -> Result<Self> {
|
||||
let entries = data
|
||||
.read_slice_at(0, number as usize)
|
||||
.read_error("Invalid PE number of RVA and sizes")?;
|
||||
Ok(DataDirectories { entries })
|
||||
}
|
||||
|
||||
/// The number of data directories.
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.entries.len()
|
||||
}
|
||||
|
||||
/// Iterator over the data directories.
|
||||
pub fn iter(&self) -> slice::Iter<'data, pe::ImageDataDirectory> {
|
||||
self.entries.iter()
|
||||
}
|
||||
|
||||
/// Iterator which gives the directories as well as their index (one of the IMAGE_DIRECTORY_ENTRY_* constants).
|
||||
pub fn enumerate(&self) -> core::iter::Enumerate<slice::Iter<'data, pe::ImageDataDirectory>> {
|
||||
self.entries.iter().enumerate()
|
||||
}
|
||||
|
||||
/// Returns the data directory at the given index.
|
||||
///
|
||||
/// Index should be one of the `IMAGE_DIRECTORY_ENTRY_*` constants.
|
||||
///
|
||||
/// Returns `None` if the index is larger than the table size,
|
||||
/// or if the entry at the index has a zero virtual address.
|
||||
pub fn get(&self, index: usize) -> Option<&'data pe::ImageDataDirectory> {
|
||||
self.entries
|
||||
.get(index)
|
||||
.filter(|d| d.virtual_address.get(LE) != 0)
|
||||
}
|
||||
|
||||
/// Returns the unparsed export directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn export_directory<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<&'data pe::ImageExportDirectory>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_EXPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let export_data = data_dir.data(data, sections)?;
|
||||
ExportTable::parse_directory(export_data).map(Some)
|
||||
}
|
||||
|
||||
/// Returns the partially parsed export directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn export_table<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<ExportTable<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_EXPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let export_va = data_dir.virtual_address.get(LE);
|
||||
let export_data = data_dir.data(data, sections)?;
|
||||
ExportTable::parse(export_data, export_va).map(Some)
|
||||
}
|
||||
|
||||
/// Returns the partially parsed import directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn import_table<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<ImportTable<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_IMPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let import_va = data_dir.virtual_address.get(LE);
|
||||
let (section_data, section_va) = sections
|
||||
.pe_data_containing(data, import_va)
|
||||
.read_error("Invalid import data dir virtual address")?;
|
||||
Ok(Some(ImportTable::new(section_data, section_va, import_va)))
|
||||
}
|
||||
|
||||
/// Returns the partially parsed delay-load import directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn delay_load_import_table<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<DelayLoadImportTable<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let import_va = data_dir.virtual_address.get(LE);
|
||||
let (section_data, section_va) = sections
|
||||
.pe_data_containing(data, import_va)
|
||||
.read_error("Invalid import data dir virtual address")?;
|
||||
Ok(Some(DelayLoadImportTable::new(
|
||||
section_data,
|
||||
section_va,
|
||||
import_va,
|
||||
)))
|
||||
}
|
||||
|
||||
/// Returns the blocks in the base relocation directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn relocation_blocks<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<RelocationBlockIterator<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_BASERELOC) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let reloc_data = data_dir.data(data, sections)?;
|
||||
Ok(Some(RelocationBlockIterator::new(reloc_data)))
|
||||
}
|
||||
|
||||
/// Returns the resource directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn resource_directory<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<ResourceDirectory<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_RESOURCE) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let rsrc_data = data_dir.data(data, sections)?;
|
||||
Ok(Some(ResourceDirectory::new(rsrc_data)))
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageDataDirectory {
|
||||
/// Return the virtual address range of this directory entry.
|
||||
pub fn address_range(&self) -> (u32, u32) {
|
||||
(self.virtual_address.get(LE), self.size.get(LE))
|
||||
}
|
||||
|
||||
/// Return the file offset and size of this directory entry.
|
||||
///
|
||||
/// This function has some limitations:
|
||||
/// - It requires that the data is contained in a single section.
|
||||
/// - It uses the size field of the directory entry, which is
|
||||
/// not desirable for all data directories.
|
||||
/// - It uses the `virtual_address` of the directory entry as an address,
|
||||
/// which is not valid for `IMAGE_DIRECTORY_ENTRY_SECURITY`.
|
||||
pub fn file_range(&self, sections: &SectionTable<'_>) -> Result<(u32, u32)> {
|
||||
let (offset, section_size) = sections
|
||||
.pe_file_range_at(self.virtual_address.get(LE))
|
||||
.read_error("Invalid data dir virtual address")?;
|
||||
let size = self.size.get(LE);
|
||||
if size > section_size {
|
||||
return Err(Error("Invalid data dir size"));
|
||||
}
|
||||
Ok((offset, size))
|
||||
}
|
||||
|
||||
/// Get the data referenced by this directory entry.
|
||||
///
|
||||
/// This function has some limitations:
|
||||
/// - It requires that the data is contained in a single section.
|
||||
/// - It uses the size field of the directory entry, which is
|
||||
/// not desirable for all data directories.
|
||||
/// - It uses the `virtual_address` of the directory entry as an address,
|
||||
/// which is not valid for `IMAGE_DIRECTORY_ENTRY_SECURITY`.
|
||||
pub fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<&'data [u8]> {
|
||||
sections
|
||||
.pe_data_at(data, self.virtual_address.get(LE))
|
||||
.read_error("Invalid data dir virtual address")?
|
||||
.get(..self.size.get(LE) as usize)
|
||||
.read_error("Invalid data dir size")
|
||||
}
|
||||
}
|
||||
333
vendor/object/src/read/pe/export.rs
vendored
Normal file
333
vendor/object/src/read/pe/export.rs
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::read::{ByteString, Bytes, Error, ReadError, ReadRef, Result};
|
||||
use crate::{pe, LittleEndian as LE, U16Bytes, U32Bytes};
|
||||
|
||||
/// Where an export is pointing to.
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum ExportTarget<'data> {
|
||||
/// The address of the export, relative to the image base.
|
||||
Address(u32),
|
||||
/// Forwarded to an export ordinal in another DLL.
|
||||
///
|
||||
/// This gives the name of the DLL, and the ordinal.
|
||||
ForwardByOrdinal(&'data [u8], u32),
|
||||
/// Forwarded to an export name in another DLL.
|
||||
///
|
||||
/// This gives the name of the DLL, and the export name.
|
||||
ForwardByName(&'data [u8], &'data [u8]),
|
||||
}
|
||||
|
||||
impl<'data> ExportTarget<'data> {
|
||||
/// Returns true if the target is an address.
|
||||
pub fn is_address(&self) -> bool {
|
||||
match self {
|
||||
ExportTarget::Address(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the export is forwarded to another DLL.
|
||||
pub fn is_forward(&self) -> bool {
|
||||
!self.is_address()
|
||||
}
|
||||
}
|
||||
|
||||
/// An export from a PE file.
|
||||
///
|
||||
/// There are multiple kinds of PE exports (with or without a name, and local or forwarded).
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Export<'data> {
|
||||
/// The ordinal of the export.
|
||||
///
|
||||
/// These are sequential, starting at a base specified in the DLL.
|
||||
pub ordinal: u32,
|
||||
/// The name of the export, if known.
|
||||
pub name: Option<&'data [u8]>,
|
||||
/// The target of this export.
|
||||
pub target: ExportTarget<'data>,
|
||||
}
|
||||
|
||||
impl<'a> Debug for Export<'a> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::result::Result<(), core::fmt::Error> {
|
||||
f.debug_struct("Export")
|
||||
.field("ordinal", &self.ordinal)
|
||||
.field("name", &self.name.map(ByteString))
|
||||
.field("target", &self.target)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for ExportTarget<'a> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::result::Result<(), core::fmt::Error> {
|
||||
match self {
|
||||
ExportTarget::Address(address) => write!(f, "Address({:#x})", address),
|
||||
ExportTarget::ForwardByOrdinal(library, ordinal) => write!(
|
||||
f,
|
||||
"ForwardByOrdinal({:?}.#{})",
|
||||
ByteString(library),
|
||||
ordinal
|
||||
),
|
||||
ExportTarget::ForwardByName(library, name) => write!(
|
||||
f,
|
||||
"ForwardByName({:?}.{:?})",
|
||||
ByteString(library),
|
||||
ByteString(name)
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A partially parsed PE export table.
|
||||
///
|
||||
/// Returned by [`DataDirectories::export_table`](super::DataDirectories::export_table).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExportTable<'data> {
|
||||
data: Bytes<'data>,
|
||||
virtual_address: u32,
|
||||
directory: &'data pe::ImageExportDirectory,
|
||||
addresses: &'data [U32Bytes<LE>],
|
||||
names: &'data [U32Bytes<LE>],
|
||||
name_ordinals: &'data [U16Bytes<LE>],
|
||||
}
|
||||
|
||||
impl<'data> ExportTable<'data> {
|
||||
/// Parse the export table given its section data and address.
|
||||
pub fn parse(data: &'data [u8], virtual_address: u32) -> Result<Self> {
|
||||
let directory = Self::parse_directory(data)?;
|
||||
let data = Bytes(data);
|
||||
|
||||
let mut addresses = &[][..];
|
||||
let address_of_functions = directory.address_of_functions.get(LE);
|
||||
if address_of_functions != 0 {
|
||||
addresses = data
|
||||
.read_slice_at::<U32Bytes<_>>(
|
||||
address_of_functions.wrapping_sub(virtual_address) as usize,
|
||||
directory.number_of_functions.get(LE) as usize,
|
||||
)
|
||||
.read_error("Invalid PE export address table")?;
|
||||
}
|
||||
|
||||
let mut names = &[][..];
|
||||
let mut name_ordinals = &[][..];
|
||||
let address_of_names = directory.address_of_names.get(LE);
|
||||
let address_of_name_ordinals = directory.address_of_name_ordinals.get(LE);
|
||||
if address_of_names != 0 {
|
||||
if address_of_name_ordinals == 0 {
|
||||
return Err(Error("Missing PE export ordinal table"));
|
||||
}
|
||||
|
||||
let number = directory.number_of_names.get(LE) as usize;
|
||||
names = data
|
||||
.read_slice_at::<U32Bytes<_>>(
|
||||
address_of_names.wrapping_sub(virtual_address) as usize,
|
||||
number,
|
||||
)
|
||||
.read_error("Invalid PE export name pointer table")?;
|
||||
name_ordinals = data
|
||||
.read_slice_at::<U16Bytes<_>>(
|
||||
address_of_name_ordinals.wrapping_sub(virtual_address) as usize,
|
||||
number,
|
||||
)
|
||||
.read_error("Invalid PE export ordinal table")?;
|
||||
}
|
||||
|
||||
Ok(ExportTable {
|
||||
data,
|
||||
virtual_address,
|
||||
directory,
|
||||
addresses,
|
||||
names,
|
||||
name_ordinals,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse the export directory given its section data.
|
||||
pub fn parse_directory(data: &'data [u8]) -> Result<&'data pe::ImageExportDirectory> {
|
||||
data.read_at::<pe::ImageExportDirectory>(0)
|
||||
.read_error("Invalid PE export dir size")
|
||||
}
|
||||
|
||||
/// Returns the header of the export table.
|
||||
pub fn directory(&self) -> &'data pe::ImageExportDirectory {
|
||||
self.directory
|
||||
}
|
||||
|
||||
/// Returns the base value of ordinals.
|
||||
///
|
||||
/// Adding this to an address index will give an ordinal.
|
||||
pub fn ordinal_base(&self) -> u32 {
|
||||
self.directory.base.get(LE)
|
||||
}
|
||||
|
||||
/// Returns the unparsed address table.
|
||||
///
|
||||
/// An address table entry may be a local address, or the address of a forwarded export entry.
|
||||
/// See [`Self::is_forward`] and [`Self::target_from_address`].
|
||||
pub fn addresses(&self) -> &'data [U32Bytes<LE>] {
|
||||
self.addresses
|
||||
}
|
||||
|
||||
/// Returns the unparsed name pointer table.
|
||||
///
|
||||
/// A name pointer table entry can be used with [`Self::name_from_pointer`].
|
||||
pub fn name_pointers(&self) -> &'data [U32Bytes<LE>] {
|
||||
self.names
|
||||
}
|
||||
|
||||
/// Returns the unparsed ordinal table.
|
||||
///
|
||||
/// An ordinal table entry is a 0-based index into the address table.
|
||||
/// See [`Self::address_by_index`] and [`Self::target_by_index`].
|
||||
pub fn name_ordinals(&self) -> &'data [U16Bytes<LE>] {
|
||||
self.name_ordinals
|
||||
}
|
||||
|
||||
/// Returns an iterator for the entries in the name pointer table and ordinal table.
|
||||
///
|
||||
/// A name pointer table entry can be used with [`Self::name_from_pointer`].
|
||||
///
|
||||
/// An ordinal table entry is a 0-based index into the address table.
|
||||
/// See [`Self::address_by_index`] and [`Self::target_by_index`].
|
||||
pub fn name_iter(&self) -> impl Iterator<Item = (u32, u16)> + 'data {
|
||||
self.names
|
||||
.iter()
|
||||
.map(|x| x.get(LE))
|
||||
.zip(self.name_ordinals.iter().map(|x| x.get(LE)))
|
||||
}
|
||||
|
||||
/// Returns the export address table entry at the given address index.
|
||||
///
|
||||
/// This may be a local address, or the address of a forwarded export entry.
|
||||
/// See [`Self::is_forward`] and [`Self::target_from_address`].
|
||||
///
|
||||
/// `index` is a 0-based index into the export address table.
|
||||
pub fn address_by_index(&self, index: u32) -> Result<u32> {
|
||||
Ok(self
|
||||
.addresses
|
||||
.get(index as usize)
|
||||
.read_error("Invalid PE export address index")?
|
||||
.get(LE))
|
||||
}
|
||||
|
||||
/// Returns the export address table entry at the given ordinal.
|
||||
///
|
||||
/// This may be a local address, or the address of a forwarded export entry.
|
||||
/// See [`Self::is_forward`] and [`Self::target_from_address`].
|
||||
pub fn address_by_ordinal(&self, ordinal: u32) -> Result<u32> {
|
||||
self.address_by_index(ordinal.wrapping_sub(self.ordinal_base()))
|
||||
}
|
||||
|
||||
/// Returns the target of the export at the given address index.
|
||||
///
|
||||
/// `index` is a 0-based index into the export address table.
|
||||
pub fn target_by_index(&self, index: u32) -> Result<ExportTarget<'data>> {
|
||||
self.target_from_address(self.address_by_index(index)?)
|
||||
}
|
||||
|
||||
/// Returns the target of the export at the given ordinal.
|
||||
pub fn target_by_ordinal(&self, ordinal: u32) -> Result<ExportTarget<'data>> {
|
||||
self.target_from_address(self.address_by_ordinal(ordinal)?)
|
||||
}
|
||||
|
||||
/// Convert an export address table entry into a target.
|
||||
pub fn target_from_address(&self, address: u32) -> Result<ExportTarget<'data>> {
|
||||
Ok(if let Some(forward) = self.forward_string(address)? {
|
||||
let i = forward
|
||||
.iter()
|
||||
.position(|x| *x == b'.')
|
||||
.read_error("Missing PE forwarded export separator")?;
|
||||
let library = &forward[..i];
|
||||
match &forward[i + 1..] {
|
||||
[b'#', digits @ ..] => {
|
||||
let ordinal =
|
||||
parse_ordinal(digits).read_error("Invalid PE forwarded export ordinal")?;
|
||||
ExportTarget::ForwardByOrdinal(library, ordinal)
|
||||
}
|
||||
[] => {
|
||||
return Err(Error("Missing PE forwarded export name"));
|
||||
}
|
||||
name => ExportTarget::ForwardByName(library, name),
|
||||
}
|
||||
} else {
|
||||
ExportTarget::Address(address)
|
||||
})
|
||||
}
|
||||
|
||||
fn forward_offset(&self, address: u32) -> Option<usize> {
|
||||
let offset = address.wrapping_sub(self.virtual_address) as usize;
|
||||
if offset < self.data.len() {
|
||||
Some(offset)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the export address table entry is a forward.
|
||||
pub fn is_forward(&self, address: u32) -> bool {
|
||||
self.forward_offset(address).is_some()
|
||||
}
|
||||
|
||||
/// Return the forward string if the export address table entry is a forward.
|
||||
pub fn forward_string(&self, address: u32) -> Result<Option<&'data [u8]>> {
|
||||
if let Some(offset) = self.forward_offset(address) {
|
||||
self.data
|
||||
.read_string_at(offset)
|
||||
.read_error("Invalid PE forwarded export address")
|
||||
.map(Some)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert an export name pointer table entry into a name.
|
||||
pub fn name_from_pointer(&self, name_pointer: u32) -> Result<&'data [u8]> {
|
||||
let offset = name_pointer.wrapping_sub(self.virtual_address);
|
||||
self.data
|
||||
.read_string_at(offset as usize)
|
||||
.read_error("Invalid PE export name pointer")
|
||||
}
|
||||
|
||||
/// Returns the parsed exports in this table.
|
||||
pub fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
// First, let's list all exports.
|
||||
let mut exports = Vec::new();
|
||||
let ordinal_base = self.ordinal_base();
|
||||
for (i, address) in self.addresses.iter().enumerate() {
|
||||
// Convert from an array index to an ordinal.
|
||||
let ordinal = ordinal_base.wrapping_add(i as u32);
|
||||
let target = self.target_from_address(address.get(LE))?;
|
||||
exports.push(Export {
|
||||
ordinal,
|
||||
target,
|
||||
// Might be populated later.
|
||||
name: None,
|
||||
});
|
||||
}
|
||||
|
||||
// Now, check whether some (or all) of them have an associated name.
|
||||
// `ordinal_index` is a 0-based index into `addresses`.
|
||||
for (name_pointer, ordinal_index) in self.name_iter() {
|
||||
let name = self.name_from_pointer(name_pointer)?;
|
||||
exports
|
||||
.get_mut(ordinal_index as usize)
|
||||
.read_error("Invalid PE export ordinal")?
|
||||
.name = Some(name);
|
||||
}
|
||||
|
||||
Ok(exports)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_ordinal(digits: &[u8]) -> Option<u32> {
|
||||
if digits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let mut result: u32 = 0;
|
||||
for &c in digits {
|
||||
let x = (c as char).to_digit(10)?;
|
||||
result = result.checked_mul(10)?.checked_add(x)?;
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
1050
vendor/object/src/read/pe/file.rs
vendored
Normal file
1050
vendor/object/src/read/pe/file.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
337
vendor/object/src/read/pe/import.rs
vendored
Normal file
337
vendor/object/src/read/pe/import.rs
vendored
Normal file
@@ -0,0 +1,337 @@
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use crate::read::{Bytes, ReadError, Result};
|
||||
use crate::{pe, LittleEndian as LE, Pod, U16Bytes};
|
||||
|
||||
use super::ImageNtHeaders;
|
||||
|
||||
/// Information for parsing a PE import table.
|
||||
///
|
||||
/// Returned by [`DataDirectories::import_table`](super::DataDirectories::import_table).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportTable<'data> {
|
||||
section_data: Bytes<'data>,
|
||||
section_address: u32,
|
||||
import_address: u32,
|
||||
}
|
||||
|
||||
impl<'data> ImportTable<'data> {
|
||||
/// Create a new import table parser.
|
||||
///
|
||||
/// The import descriptors start at `import_address`.
|
||||
/// The size declared in the `IMAGE_DIRECTORY_ENTRY_IMPORT` data directory is
|
||||
/// ignored by the Windows loader, and so descriptors will be parsed until a null entry.
|
||||
///
|
||||
/// `section_data` should be from the section containing `import_address`, and
|
||||
/// `section_address` should be the address of that section. Pointers within the
|
||||
/// descriptors and thunks may point to anywhere within the section data.
|
||||
pub fn new(section_data: &'data [u8], section_address: u32, import_address: u32) -> Self {
|
||||
ImportTable {
|
||||
section_data: Bytes(section_data),
|
||||
section_address,
|
||||
import_address,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator for the import descriptors.
|
||||
pub fn descriptors(&self) -> Result<ImportDescriptorIterator<'data>> {
|
||||
let offset = self.import_address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE import descriptor address")?;
|
||||
Ok(ImportDescriptorIterator { data })
|
||||
}
|
||||
|
||||
/// Return a library name given its address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageImportDescriptor::name`].
|
||||
pub fn name(&self, address: u32) -> Result<&'data [u8]> {
|
||||
self.section_data
|
||||
.read_string_at(address.wrapping_sub(self.section_address) as usize)
|
||||
.read_error("Invalid PE import descriptor name")
|
||||
}
|
||||
|
||||
/// Return a list of thunks given its address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageImportDescriptor::original_first_thunk`]
|
||||
/// or [`pe::ImageImportDescriptor::first_thunk`].
|
||||
pub fn thunks(&self, address: u32) -> Result<ImportThunkList<'data>> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE import thunk table address")?;
|
||||
Ok(ImportThunkList { data })
|
||||
}
|
||||
|
||||
/// Parse a thunk.
|
||||
pub fn import<Pe: ImageNtHeaders>(&self, thunk: Pe::ImageThunkData) -> Result<Import<'data>> {
|
||||
if thunk.is_ordinal() {
|
||||
Ok(Import::Ordinal(thunk.ordinal()))
|
||||
} else {
|
||||
let (hint, name) = self.hint_name(thunk.address())?;
|
||||
Ok(Import::Name(hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the hint and name at the given address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageThunkData32`] or [`pe::ImageThunkData64`].
|
||||
///
|
||||
/// The hint is an index into the export name pointer table in the target library.
|
||||
pub fn hint_name(&self, address: u32) -> Result<(u16, &'data [u8])> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE import thunk address")?;
|
||||
let hint = data
|
||||
.read::<U16Bytes<LE>>()
|
||||
.read_error("Missing PE import thunk hint")?
|
||||
.get(LE);
|
||||
let name = data
|
||||
.read_string()
|
||||
.read_error("Missing PE import thunk name")?;
|
||||
Ok((hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// A fallible iterator for the descriptors in the import data directory.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportDescriptorIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> ImportDescriptorIterator<'data> {
|
||||
/// Return the next descriptor.
|
||||
///
|
||||
/// Returns `Ok(None)` when a null descriptor is found.
|
||||
pub fn next(&mut self) -> Result<Option<&'data pe::ImageImportDescriptor>> {
|
||||
let import_desc = self
|
||||
.data
|
||||
.read::<pe::ImageImportDescriptor>()
|
||||
.read_error("Missing PE null import descriptor")?;
|
||||
if import_desc.is_null() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(import_desc))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of import thunks.
|
||||
///
|
||||
/// These may be in the import lookup table, or the import address table.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportThunkList<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> ImportThunkList<'data> {
|
||||
/// Get the thunk at the given index.
|
||||
pub fn get<Pe: ImageNtHeaders>(&self, index: usize) -> Result<Pe::ImageThunkData> {
|
||||
let thunk = self
|
||||
.data
|
||||
.read_at(index * mem::size_of::<Pe::ImageThunkData>())
|
||||
.read_error("Invalid PE import thunk index")?;
|
||||
Ok(*thunk)
|
||||
}
|
||||
|
||||
/// Return the first thunk in the list, and update `self` to point after it.
|
||||
///
|
||||
/// Returns `Ok(None)` when a null thunk is found.
|
||||
pub fn next<Pe: ImageNtHeaders>(&mut self) -> Result<Option<Pe::ImageThunkData>> {
|
||||
let thunk = self
|
||||
.data
|
||||
.read::<Pe::ImageThunkData>()
|
||||
.read_error("Missing PE null import thunk")?;
|
||||
if thunk.address() == 0 {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(*thunk))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed import thunk.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Import<'data> {
|
||||
/// Import by ordinal.
|
||||
Ordinal(u16),
|
||||
/// Import by name.
|
||||
///
|
||||
/// Includes a hint for the index into the export name pointer table in the target library.
|
||||
Name(u16, &'data [u8]),
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`pe::ImageThunkData32`] and [`pe::ImageThunkData64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait ImageThunkData: Debug + Pod {
|
||||
/// Return the raw thunk value.
|
||||
fn raw(self) -> u64;
|
||||
|
||||
/// Returns true if the ordinal flag is set.
|
||||
fn is_ordinal(self) -> bool;
|
||||
|
||||
/// Return the ordinal portion of the thunk.
|
||||
///
|
||||
/// Does not check the ordinal flag.
|
||||
fn ordinal(self) -> u16;
|
||||
|
||||
/// Return the RVA portion of the thunk.
|
||||
///
|
||||
/// Does not check the ordinal flag.
|
||||
fn address(self) -> u32;
|
||||
}
|
||||
|
||||
impl ImageThunkData for pe::ImageThunkData64 {
|
||||
fn raw(self) -> u64 {
|
||||
self.0.get(LE)
|
||||
}
|
||||
|
||||
fn is_ordinal(self) -> bool {
|
||||
self.0.get(LE) & pe::IMAGE_ORDINAL_FLAG64 != 0
|
||||
}
|
||||
|
||||
fn ordinal(self) -> u16 {
|
||||
self.0.get(LE) as u16
|
||||
}
|
||||
|
||||
fn address(self) -> u32 {
|
||||
self.0.get(LE) as u32 & 0x7fff_ffff
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageThunkData for pe::ImageThunkData32 {
|
||||
fn raw(self) -> u64 {
|
||||
self.0.get(LE).into()
|
||||
}
|
||||
|
||||
fn is_ordinal(self) -> bool {
|
||||
self.0.get(LE) & pe::IMAGE_ORDINAL_FLAG32 != 0
|
||||
}
|
||||
|
||||
fn ordinal(self) -> u16 {
|
||||
self.0.get(LE) as u16
|
||||
}
|
||||
|
||||
fn address(self) -> u32 {
|
||||
self.0.get(LE) & 0x7fff_ffff
|
||||
}
|
||||
}
|
||||
|
||||
/// Information for parsing a PE delay-load import table.
|
||||
///
|
||||
/// Returned by
|
||||
/// [`DataDirectories::delay_load_import_table`](super::DataDirectories::delay_load_import_table).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DelayLoadImportTable<'data> {
|
||||
section_data: Bytes<'data>,
|
||||
section_address: u32,
|
||||
import_address: u32,
|
||||
}
|
||||
|
||||
impl<'data> DelayLoadImportTable<'data> {
|
||||
/// Create a new delay load import table parser.
|
||||
///
|
||||
/// The import descriptors start at `import_address`.
|
||||
/// This table works in the same way the import table does: descriptors will be
|
||||
/// parsed until a null entry.
|
||||
///
|
||||
/// `section_data` should be from the section containing `import_address`, and
|
||||
/// `section_address` should be the address of that section. Pointers within the
|
||||
/// descriptors and thunks may point to anywhere within the section data.
|
||||
pub fn new(section_data: &'data [u8], section_address: u32, import_address: u32) -> Self {
|
||||
DelayLoadImportTable {
|
||||
section_data: Bytes(section_data),
|
||||
section_address,
|
||||
import_address,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator for the import descriptors.
|
||||
pub fn descriptors(&self) -> Result<DelayLoadDescriptorIterator<'data>> {
|
||||
let offset = self.import_address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE delay-load import descriptor address")?;
|
||||
Ok(DelayLoadDescriptorIterator { data })
|
||||
}
|
||||
|
||||
/// Return a library name given its address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageDelayloadDescriptor::dll_name_rva`].
|
||||
pub fn name(&self, address: u32) -> Result<&'data [u8]> {
|
||||
self.section_data
|
||||
.read_string_at(address.wrapping_sub(self.section_address) as usize)
|
||||
.read_error("Invalid PE import descriptor name")
|
||||
}
|
||||
|
||||
/// Return a list of thunks given its address.
|
||||
///
|
||||
/// This address may be from the INT, i.e. from
|
||||
/// [`pe::ImageDelayloadDescriptor::import_name_table_rva`].
|
||||
///
|
||||
/// Please note that others RVA values from [`pe::ImageDelayloadDescriptor`] are used
|
||||
/// by the delay loader at runtime to store values, and thus do not point inside the same
|
||||
/// section as the INT. Calling this function on those addresses will fail.
|
||||
pub fn thunks(&self, address: u32) -> Result<ImportThunkList<'data>> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE delay load import thunk table address")?;
|
||||
Ok(ImportThunkList { data })
|
||||
}
|
||||
|
||||
/// Parse a thunk.
|
||||
pub fn import<Pe: ImageNtHeaders>(&self, thunk: Pe::ImageThunkData) -> Result<Import<'data>> {
|
||||
if thunk.is_ordinal() {
|
||||
Ok(Import::Ordinal(thunk.ordinal()))
|
||||
} else {
|
||||
let (hint, name) = self.hint_name(thunk.address())?;
|
||||
Ok(Import::Name(hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the hint and name at the given address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageThunkData32`] or [`pe::ImageThunkData64`].
|
||||
///
|
||||
/// The hint is an index into the export name pointer table in the target library.
|
||||
pub fn hint_name(&self, address: u32) -> Result<(u16, &'data [u8])> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE delay load import thunk address")?;
|
||||
let hint = data
|
||||
.read::<U16Bytes<LE>>()
|
||||
.read_error("Missing PE delay load import thunk hint")?
|
||||
.get(LE);
|
||||
let name = data
|
||||
.read_string()
|
||||
.read_error("Missing PE delay load import thunk name")?;
|
||||
Ok((hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// A fallible iterator for the descriptors in the delay-load data directory.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DelayLoadDescriptorIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> DelayLoadDescriptorIterator<'data> {
|
||||
/// Return the next descriptor.
|
||||
///
|
||||
/// Returns `Ok(None)` when a null descriptor is found.
|
||||
pub fn next(&mut self) -> Result<Option<&'data pe::ImageDelayloadDescriptor>> {
|
||||
let import_desc = self
|
||||
.data
|
||||
.read::<pe::ImageDelayloadDescriptor>()
|
||||
.read_error("Missing PE null delay-load import descriptor")?;
|
||||
if import_desc.is_null() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(import_desc))
|
||||
}
|
||||
}
|
||||
}
|
||||
68
vendor/object/src/read/pe/mod.rs
vendored
Normal file
68
vendor/object/src/read/pe/mod.rs
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
//! Support for reading PE files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between PE32 and PE32+.
|
||||
//! The primary trait for this is [`ImageNtHeaders`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`PeFile`] implements the [`Object`](crate::read::Object) trait for
|
||||
//! PE files. [`PeFile`] is parameterised by [`ImageNtHeaders`] to allow
|
||||
//! reading both PE32 and PE32+. There are type aliases for these parameters
|
||||
//! ([`PeFile32`] and [`PeFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`ImageNtHeaders`] trait can be directly used to parse both
|
||||
//! [`pe::ImageNtHeaders32`] and [`pe::ImageNtHeaders64`].
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::pe;
|
||||
//! use object::read::pe::ImageNtHeaders;
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let dos_header = pe::ImageDosHeader::parse(&*data)?;
|
||||
//! let mut offset = dos_header.nt_headers_offset().into();
|
||||
//! let (nt_headers, data_directories) = pe::ImageNtHeaders64::parse(&*data, &mut offset)?;
|
||||
//! let sections = nt_headers.sections(&*data, offset)?;
|
||||
//! let symbols = nt_headers.symbols(&*data)?;
|
||||
//! for section in sections.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(section.name(symbols.strings())?));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::pe;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod data_directory;
|
||||
pub use data_directory::*;
|
||||
|
||||
mod export;
|
||||
pub use export::*;
|
||||
|
||||
mod import;
|
||||
pub use import::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod resource;
|
||||
pub use resource::*;
|
||||
|
||||
mod rich;
|
||||
pub use rich::*;
|
||||
|
||||
pub use super::coff::{SectionTable, SymbolTable};
|
||||
92
vendor/object/src/read/pe/relocation.rs
vendored
Normal file
92
vendor/object/src/read/pe/relocation.rs
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
use core::slice;
|
||||
|
||||
use crate::endian::{LittleEndian as LE, U16};
|
||||
use crate::pe;
|
||||
use crate::read::{Bytes, Error, ReadError, Result};
|
||||
|
||||
/// An iterator over the relocation blocks in the `.reloc` section of a PE file.
|
||||
///
|
||||
/// Returned by [`DataDirectories::relocation_blocks`](super::DataDirectories::relocation_blocks).
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct RelocationBlockIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> RelocationBlockIterator<'data> {
|
||||
/// Construct a new iterator from the data of the `.reloc` section.
|
||||
pub fn new(data: &'data [u8]) -> Self {
|
||||
RelocationBlockIterator { data: Bytes(data) }
|
||||
}
|
||||
|
||||
/// Read the next relocation page.
|
||||
pub fn next(&mut self) -> Result<Option<RelocationIterator<'data>>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let header = self
|
||||
.data
|
||||
.read::<pe::ImageBaseRelocation>()
|
||||
.read_error("Invalid PE reloc section size")?;
|
||||
let virtual_address = header.virtual_address.get(LE);
|
||||
let size = header.size_of_block.get(LE);
|
||||
if size <= 8 || size & 3 != 0 {
|
||||
return Err(Error("Invalid PE reloc block size"));
|
||||
}
|
||||
let count = (size - 8) / 2;
|
||||
let relocs = self
|
||||
.data
|
||||
.read_slice::<U16<LE>>(count as usize)
|
||||
.read_error("Invalid PE reloc block size")?
|
||||
.iter();
|
||||
Ok(Some(RelocationIterator {
|
||||
virtual_address,
|
||||
size,
|
||||
relocs,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator of the relocations in a block in the `.reloc` section of a PE file.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RelocationIterator<'data> {
|
||||
virtual_address: u32,
|
||||
size: u32,
|
||||
relocs: slice::Iter<'data, U16<LE>>,
|
||||
}
|
||||
|
||||
impl<'data> RelocationIterator<'data> {
|
||||
/// Return the virtual address of the page that this block of relocations applies to.
|
||||
pub fn virtual_address(&self) -> u32 {
|
||||
self.virtual_address
|
||||
}
|
||||
|
||||
/// Return the size in bytes of this block of relocations.
|
||||
pub fn size(&self) -> u32 {
|
||||
self.size
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> Iterator for RelocationIterator<'data> {
|
||||
type Item = Relocation;
|
||||
|
||||
fn next(&mut self) -> Option<Relocation> {
|
||||
loop {
|
||||
let reloc = self.relocs.next()?.get(LE);
|
||||
if reloc != 0 {
|
||||
return Some(Relocation {
|
||||
virtual_address: self.virtual_address.wrapping_add((reloc & 0xfff) as u32),
|
||||
typ: reloc >> 12,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A relocation in the `.reloc` section of a PE file.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct Relocation {
|
||||
/// The virtual address of the relocation.
|
||||
pub virtual_address: u32,
|
||||
/// One of the `pe::IMAGE_REL_BASED_*` constants.
|
||||
pub typ: u16,
|
||||
}
|
||||
209
vendor/object/src/read/pe/resource.rs
vendored
Normal file
209
vendor/object/src/read/pe/resource.rs
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
use alloc::string::String;
|
||||
use core::char;
|
||||
|
||||
use crate::read::{ReadError, ReadRef, Result};
|
||||
use crate::{pe, LittleEndian as LE, U16Bytes};
|
||||
|
||||
/// The `.rsrc` section of a PE file.
|
||||
///
|
||||
/// Returned by [`DataDirectories::resource_directory`](super::DataDirectories::resource_directory).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ResourceDirectory<'data> {
|
||||
data: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data> ResourceDirectory<'data> {
|
||||
/// Construct from the data of the `.rsrc` section.
|
||||
pub fn new(data: &'data [u8]) -> Self {
|
||||
ResourceDirectory { data }
|
||||
}
|
||||
|
||||
/// Parses the root resource directory.
|
||||
pub fn root(&self) -> Result<ResourceDirectoryTable<'data>> {
|
||||
ResourceDirectoryTable::parse(self.data, 0)
|
||||
}
|
||||
}
|
||||
|
||||
/// A table of resource entries.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResourceDirectoryTable<'data> {
|
||||
/// The table header.
|
||||
pub header: &'data pe::ImageResourceDirectory,
|
||||
/// The table entries.
|
||||
pub entries: &'data [pe::ImageResourceDirectoryEntry],
|
||||
}
|
||||
|
||||
impl<'data> ResourceDirectoryTable<'data> {
|
||||
fn parse(data: &'data [u8], offset: u32) -> Result<Self> {
|
||||
let mut offset = u64::from(offset);
|
||||
let header = data
|
||||
.read::<pe::ImageResourceDirectory>(&mut offset)
|
||||
.read_error("Invalid resource table header")?;
|
||||
let entries_count = header.number_of_id_entries.get(LE) as usize
|
||||
+ header.number_of_named_entries.get(LE) as usize;
|
||||
let entries = data
|
||||
.read_slice::<pe::ImageResourceDirectoryEntry>(&mut offset, entries_count)
|
||||
.read_error("Invalid resource table entries")?;
|
||||
Ok(Self { header, entries })
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageResourceDirectoryEntry {
|
||||
/// Returns true if the entry has a name, rather than an ID.
|
||||
pub fn has_name(&self) -> bool {
|
||||
self.name_or_id.get(LE) & pe::IMAGE_RESOURCE_NAME_IS_STRING != 0
|
||||
}
|
||||
|
||||
/// Returns the section offset of the name.
|
||||
///
|
||||
/// Valid if `has_name()` returns true.
|
||||
fn name(&self) -> ResourceName {
|
||||
let offset = self.name_or_id.get(LE) & !pe::IMAGE_RESOURCE_NAME_IS_STRING;
|
||||
ResourceName { offset }
|
||||
}
|
||||
|
||||
/// Returns the ID.
|
||||
///
|
||||
/// Valid if `has_string_name()` returns false.
|
||||
fn id(&self) -> u16 {
|
||||
(self.name_or_id.get(LE) & 0x0000_FFFF) as u16
|
||||
}
|
||||
|
||||
/// Returns the entry name
|
||||
pub fn name_or_id(&self) -> ResourceNameOrId {
|
||||
if self.has_name() {
|
||||
ResourceNameOrId::Name(self.name())
|
||||
} else {
|
||||
ResourceNameOrId::Id(self.id())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the entry is a subtable.
|
||||
pub fn is_table(&self) -> bool {
|
||||
self.offset_to_data_or_directory.get(LE) & pe::IMAGE_RESOURCE_DATA_IS_DIRECTORY != 0
|
||||
}
|
||||
|
||||
/// Returns the section offset of the associated table or data.
|
||||
pub fn data_offset(&self) -> u32 {
|
||||
self.offset_to_data_or_directory.get(LE) & !pe::IMAGE_RESOURCE_DATA_IS_DIRECTORY
|
||||
}
|
||||
|
||||
/// Returns the data associated to this directory entry.
|
||||
pub fn data<'data>(
|
||||
&self,
|
||||
section: ResourceDirectory<'data>,
|
||||
) -> Result<ResourceDirectoryEntryData<'data>> {
|
||||
if self.is_table() {
|
||||
ResourceDirectoryTable::parse(section.data, self.data_offset())
|
||||
.map(ResourceDirectoryEntryData::Table)
|
||||
} else {
|
||||
section
|
||||
.data
|
||||
.read_at::<pe::ImageResourceDataEntry>(self.data_offset().into())
|
||||
.read_error("Invalid resource entry")
|
||||
.map(ResourceDirectoryEntryData::Data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Data associated with a resource directory entry.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ResourceDirectoryEntryData<'data> {
|
||||
/// A subtable entry.
|
||||
Table(ResourceDirectoryTable<'data>),
|
||||
/// A resource data entry.
|
||||
Data(&'data pe::ImageResourceDataEntry),
|
||||
}
|
||||
|
||||
impl<'data> ResourceDirectoryEntryData<'data> {
|
||||
/// Converts to an option of table.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn table(self) -> Option<ResourceDirectoryTable<'data>> {
|
||||
match self {
|
||||
Self::Table(dir) => Some(dir),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts to an option of data entry.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn data(self) -> Option<&'data pe::ImageResourceDataEntry> {
|
||||
match self {
|
||||
Self::Data(rsc) => Some(rsc),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource name.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ResourceName {
|
||||
offset: u32,
|
||||
}
|
||||
|
||||
impl ResourceName {
|
||||
/// Converts to a `String`.
|
||||
pub fn to_string_lossy(&self, directory: ResourceDirectory<'_>) -> Result<String> {
|
||||
let d = self.data(directory)?.iter().map(|c| c.get(LE));
|
||||
|
||||
Ok(char::decode_utf16(d)
|
||||
.map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER))
|
||||
.collect::<String>())
|
||||
}
|
||||
|
||||
/// Returns the string unicode buffer.
|
||||
pub fn data<'data>(
|
||||
&self,
|
||||
directory: ResourceDirectory<'data>,
|
||||
) -> Result<&'data [U16Bytes<LE>]> {
|
||||
let mut offset = u64::from(self.offset);
|
||||
let len = directory
|
||||
.data
|
||||
.read::<U16Bytes<LE>>(&mut offset)
|
||||
.read_error("Invalid resource name offset")?;
|
||||
directory
|
||||
.data
|
||||
.read_slice::<U16Bytes<LE>>(&mut offset, len.get(LE).into())
|
||||
.read_error("Invalid resource name length")
|
||||
}
|
||||
|
||||
/// Returns the string buffer as raw bytes.
|
||||
pub fn raw_data<'data>(&self, directory: ResourceDirectory<'data>) -> Result<&'data [u8]> {
|
||||
self.data(directory).map(crate::pod::bytes_of_slice)
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource name or ID.
|
||||
///
|
||||
/// Can be either a string or a numeric ID.
|
||||
#[derive(Debug)]
|
||||
pub enum ResourceNameOrId {
|
||||
/// A resource name.
|
||||
Name(ResourceName),
|
||||
/// A resource ID.
|
||||
Id(u16),
|
||||
}
|
||||
|
||||
impl ResourceNameOrId {
|
||||
/// Converts to an option of name.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn name(self) -> Option<ResourceName> {
|
||||
match self {
|
||||
Self::Name(name) => Some(name),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts to an option of ID.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn id(self) -> Option<u16> {
|
||||
match self {
|
||||
Self::Id(id) => Some(id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
91
vendor/object/src/read/pe/rich.rs
vendored
Normal file
91
vendor/object/src/read/pe/rich.rs
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
//! PE rich header handling
|
||||
|
||||
use core::mem;
|
||||
|
||||
use crate::pod::bytes_of_slice;
|
||||
use crate::read::Bytes;
|
||||
use crate::{pe, LittleEndian as LE, ReadRef, U32};
|
||||
|
||||
/// Parsed information about a Rich Header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct RichHeaderInfo<'data> {
|
||||
/// The offset at which the rich header starts.
|
||||
pub offset: usize,
|
||||
/// The length (in bytes) of the rich header.
|
||||
///
|
||||
/// This includes the payload, but also the 16-byte start sequence and the
|
||||
/// 8-byte final "Rich" and XOR key.
|
||||
pub length: usize,
|
||||
/// The XOR key used to mask the rich header.
|
||||
///
|
||||
/// Unless the file has been tampered with, it should be equal to a checksum
|
||||
/// of the file header.
|
||||
pub xor_key: u32,
|
||||
masked_entries: &'data [pe::MaskedRichHeaderEntry],
|
||||
}
|
||||
|
||||
/// A PE rich header entry after it has been unmasked.
|
||||
///
|
||||
/// See [`pe::MaskedRichHeaderEntry`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct RichHeaderEntry {
|
||||
/// ID of the component.
|
||||
pub comp_id: u32,
|
||||
/// Number of times this component has been used when building this PE.
|
||||
pub count: u32,
|
||||
}
|
||||
|
||||
impl<'data> RichHeaderInfo<'data> {
|
||||
/// Try to locate a rich header and its entries in the current PE file.
|
||||
pub fn parse<R: ReadRef<'data>>(data: R, nt_header_offset: u64) -> Option<Self> {
|
||||
// Locate the rich header, if any.
|
||||
// It ends with the "Rich" string and an XOR key, before the NT header.
|
||||
let data = data.read_bytes_at(0, nt_header_offset).map(Bytes).ok()?;
|
||||
let end_marker_offset = memmem(data.0, b"Rich", 4)?;
|
||||
let xor_key = *data.read_at::<U32<LE>>(end_marker_offset + 4).ok()?;
|
||||
|
||||
// It starts at the masked "DanS" string and 3 masked zeroes.
|
||||
let masked_start_marker = U32::new(LE, 0x536e_6144 ^ xor_key.get(LE));
|
||||
let start_header = [masked_start_marker, xor_key, xor_key, xor_key];
|
||||
let start_sequence = bytes_of_slice(&start_header);
|
||||
let start_marker_offset = memmem(&data.0[..end_marker_offset], start_sequence, 4)?;
|
||||
|
||||
// Extract the items between the markers.
|
||||
let items_offset = start_marker_offset + start_sequence.len();
|
||||
let items_len = end_marker_offset - items_offset;
|
||||
let item_count = items_len / mem::size_of::<pe::MaskedRichHeaderEntry>();
|
||||
let items = data.read_slice_at(items_offset, item_count).ok()?;
|
||||
Some(RichHeaderInfo {
|
||||
offset: start_marker_offset,
|
||||
// Includes "Rich" marker and the XOR key.
|
||||
length: end_marker_offset - start_marker_offset + 8,
|
||||
xor_key: xor_key.get(LE),
|
||||
masked_entries: items,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns an iterator over the unmasked entries.
|
||||
pub fn unmasked_entries(&self) -> impl Iterator<Item = RichHeaderEntry> + 'data {
|
||||
let xor_key = self.xor_key;
|
||||
self.masked_entries
|
||||
.iter()
|
||||
.map(move |entry| RichHeaderEntry {
|
||||
comp_id: entry.masked_comp_id.get(LE) ^ xor_key,
|
||||
count: entry.masked_count.get(LE) ^ xor_key,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the offset of the first occurrence of needle in the data.
|
||||
///
|
||||
/// The offset must have the given alignment.
|
||||
fn memmem(data: &[u8], needle: &[u8], align: usize) -> Option<usize> {
|
||||
let mut offset = 0;
|
||||
loop {
|
||||
if data.get(offset..)?.get(..needle.len())? == needle {
|
||||
return Some(offset);
|
||||
}
|
||||
offset += align;
|
||||
}
|
||||
}
|
||||
440
vendor/object/src/read/pe/section.rs
vendored
Normal file
440
vendor/object/src/read/pe/section.rs
vendored
Normal file
@@ -0,0 +1,440 @@
|
||||
use core::marker::PhantomData;
|
||||
use core::{cmp, iter, slice, str};
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::pe::ImageSectionHeader;
|
||||
use crate::read::{
|
||||
self, CompressedData, CompressedFileRange, ObjectSection, ObjectSegment, ReadError, ReadRef,
|
||||
Relocation, Result, SectionFlags, SectionIndex, SectionKind, SegmentFlags,
|
||||
};
|
||||
|
||||
use super::{ImageNtHeaders, PeFile, SectionTable};
|
||||
|
||||
/// An iterator for the loadable sections in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSegmentIterator32<'data, 'file, R = &'data [u8]> =
|
||||
PeSegmentIterator<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// An iterator for the loadable sections in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSegmentIterator64<'data, 'file, R = &'data [u8]> =
|
||||
PeSegmentIterator<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// An iterator for the loadable sections in a [`PeFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct PeSegmentIterator<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file PeFile<'data, Pe, R>,
|
||||
pub(super) iter: slice::Iter<'data, pe::ImageSectionHeader>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> Iterator for PeSegmentIterator<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = PeSegment<'data, 'file, Pe, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|section| PeSegment {
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A loadable section in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSegment32<'data, 'file, R = &'data [u8]> =
|
||||
PeSegment<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// A loadable section in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSegment64<'data, 'file, R = &'data [u8]> =
|
||||
PeSegment<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// A loadable section in a [`PeFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct PeSegment<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file PeFile<'data, Pe, R>,
|
||||
section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> read::private::Sealed for PeSegment<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> ObjectSegment<'data> for PeSegment<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE)).wrapping_add(self.file.common.image_base)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
u64::from(self.section.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.file.section_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
let (offset, size) = self.section.pe_file_range();
|
||||
(u64::from(offset), u64::from(size))
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.section.pe_data(self.file.data)
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.data()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
self.section
|
||||
.name(self.file.common.symbols.strings())
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
let name = self.section.name(self.file.common.symbols.strings())?;
|
||||
Ok(Some(
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 PE section name")?,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let characteristics = self.section.characteristics.get(LE);
|
||||
SegmentFlags::Coff { characteristics }
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSectionIterator32<'data, 'file, R = &'data [u8]> =
|
||||
PeSectionIterator<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// An iterator for the sections in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSectionIterator64<'data, 'file, R = &'data [u8]> =
|
||||
PeSectionIterator<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// An iterator for the sections in a [`PeFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct PeSectionIterator<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file PeFile<'data, Pe, R>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, pe::ImageSectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> Iterator for PeSectionIterator<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = PeSection<'data, 'file, Pe, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|(index, section)| PeSection {
|
||||
file: self.file,
|
||||
index: SectionIndex(index + 1),
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSection32<'data, 'file, R = &'data [u8]> =
|
||||
PeSection<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// A section in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSection64<'data, 'file, R = &'data [u8]> =
|
||||
PeSection<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// A section in a [`PeFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct PeSection<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file PeFile<'data, Pe, R>,
|
||||
pub(super) index: SectionIndex,
|
||||
pub(super) section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> read::private::Sealed for PeSection<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> ObjectSection<'data> for PeSection<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type RelocationIterator = PeRelocationIterator<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE)).wrapping_add(self.file.common.image_base)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
u64::from(self.section.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.file.section_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
let (offset, size) = self.section.pe_file_range();
|
||||
if size == 0 {
|
||||
None
|
||||
} else {
|
||||
Some((u64::from(offset), u64::from(size)))
|
||||
}
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.section.pe_data(self.file.data)
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.data()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
self.section.name(self.file.common.symbols.strings())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 PE section name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SectionKind {
|
||||
self.section.kind()
|
||||
}
|
||||
|
||||
fn relocations(&self) -> PeRelocationIterator<'data, 'file, R> {
|
||||
PeRelocationIterator(PhantomData)
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::Coff {
|
||||
characteristics: self.section.characteristics.get(LE),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> SectionTable<'data> {
|
||||
/// Return the file offset of the given virtual address, and the size up
|
||||
/// to the end of the section containing it.
|
||||
///
|
||||
/// Returns `None` if no section contains the address.
|
||||
pub fn pe_file_range_at(&self, va: u32) -> Option<(u32, u32)> {
|
||||
self.iter().find_map(|section| section.pe_file_range_at(va))
|
||||
}
|
||||
|
||||
/// Return the data starting at the given virtual address, up to the end of the
|
||||
/// section containing it.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
///
|
||||
/// Returns `None` if no section contains the address.
|
||||
pub fn pe_data_at<R: ReadRef<'data>>(&self, data: R, va: u32) -> Option<&'data [u8]> {
|
||||
self.iter().find_map(|section| section.pe_data_at(data, va))
|
||||
}
|
||||
|
||||
/// Return the data of the section that contains the given virtual address in a PE file.
|
||||
///
|
||||
/// Also returns the virtual address of that section.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
pub fn pe_data_containing<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
va: u32,
|
||||
) -> Option<(&'data [u8], u32)> {
|
||||
self.iter()
|
||||
.find_map(|section| section.pe_data_containing(data, va))
|
||||
}
|
||||
|
||||
/// Return the section that contains a given virtual address.
|
||||
pub fn section_containing(&self, va: u32) -> Option<&'data ImageSectionHeader> {
|
||||
self.iter().find(|section| section.contains_rva(va))
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageSectionHeader {
|
||||
/// Return the offset and size of the section in a PE file.
|
||||
///
|
||||
/// The size of the range will be the minimum of the file size and virtual size.
|
||||
pub fn pe_file_range(&self) -> (u32, u32) {
|
||||
// Pointer and size will be zero for uninitialized data; we don't need to validate this.
|
||||
let offset = self.pointer_to_raw_data.get(LE);
|
||||
let size = cmp::min(self.virtual_size.get(LE), self.size_of_raw_data.get(LE));
|
||||
(offset, size)
|
||||
}
|
||||
|
||||
/// Return the file offset of the given virtual address, and the remaining size up
|
||||
/// to the end of the section.
|
||||
///
|
||||
/// Returns `None` if the section does not contain the address.
|
||||
pub fn pe_file_range_at(&self, va: u32) -> Option<(u32, u32)> {
|
||||
let section_va = self.virtual_address.get(LE);
|
||||
let offset = va.checked_sub(section_va)?;
|
||||
let (section_offset, section_size) = self.pe_file_range();
|
||||
// Address must be within section (and not at its end).
|
||||
if offset < section_size {
|
||||
Some((section_offset.checked_add(offset)?, section_size - offset))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the virtual address and size of the section.
|
||||
pub fn pe_address_range(&self) -> (u32, u32) {
|
||||
(self.virtual_address.get(LE), self.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
/// Return the section data in a PE file.
|
||||
///
|
||||
/// The length of the data will be the minimum of the file size and virtual size.
|
||||
pub fn pe_data<'data, R: ReadRef<'data>>(&self, data: R) -> Result<&'data [u8]> {
|
||||
let (offset, size) = self.pe_file_range();
|
||||
data.read_bytes_at(offset.into(), size.into())
|
||||
.read_error("Invalid PE section offset or size")
|
||||
}
|
||||
|
||||
/// Return the data starting at the given virtual address, up to the end of the
|
||||
/// section.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
///
|
||||
/// Returns `None` if the section does not contain the address.
|
||||
pub fn pe_data_at<'data, R: ReadRef<'data>>(&self, data: R, va: u32) -> Option<&'data [u8]> {
|
||||
let (offset, size) = self.pe_file_range_at(va)?;
|
||||
data.read_bytes_at(offset.into(), size.into()).ok()
|
||||
}
|
||||
|
||||
/// Tests whether a given RVA is part of this section
|
||||
pub fn contains_rva(&self, va: u32) -> bool {
|
||||
let section_va = self.virtual_address.get(LE);
|
||||
match va.checked_sub(section_va) {
|
||||
None => false,
|
||||
Some(offset) => {
|
||||
// Address must be within section (and not at its end).
|
||||
offset < self.virtual_size.get(LE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section data if it contains the given virtual address.
|
||||
///
|
||||
/// Also returns the virtual address of that section.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
pub fn pe_data_containing<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
va: u32,
|
||||
) -> Option<(&'data [u8], u32)> {
|
||||
let section_va = self.virtual_address.get(LE);
|
||||
let offset = va.checked_sub(section_va)?;
|
||||
let (section_offset, section_size) = self.pe_file_range();
|
||||
// Address must be within section (and not at its end).
|
||||
if offset < section_size {
|
||||
let section_data = data
|
||||
.read_bytes_at(section_offset.into(), section_size.into())
|
||||
.ok()?;
|
||||
Some((section_data, section_va))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the relocations in an [`PeSection`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct PeRelocationIterator<'data, 'file, R = &'data [u8]>(
|
||||
PhantomData<(&'data (), &'file (), R)>,
|
||||
);
|
||||
|
||||
impl<'data, 'file, R> Iterator for PeRelocationIterator<'data, 'file, R> {
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
178
vendor/object/src/read/read_cache.rs
vendored
Normal file
178
vendor/object/src/read/read_cache.rs
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
use core::ops::Range;
|
||||
use std::boxed::Box;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryInto;
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
use std::mem;
|
||||
use std::vec::Vec;
|
||||
|
||||
use crate::read::ReadRef;
|
||||
|
||||
/// An implementation of [`ReadRef`] for data in a stream that implements
|
||||
/// `Read + Seek`.
|
||||
///
|
||||
/// Contains a cache of read-only blocks of data, allowing references to
|
||||
/// them to be returned. Entries in the cache are never removed.
|
||||
/// Entries are keyed on the offset and size of the read.
|
||||
/// Currently overlapping reads are considered separate reads.
|
||||
#[derive(Debug)]
|
||||
pub struct ReadCache<R: Read + Seek> {
|
||||
cache: RefCell<ReadCacheInternal<R>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ReadCacheInternal<R: Read + Seek> {
|
||||
read: R,
|
||||
bufs: HashMap<(u64, u64), Box<[u8]>>,
|
||||
strings: HashMap<(u64, u8), Box<[u8]>>,
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> ReadCache<R> {
|
||||
/// Create an empty `ReadCache` for the given stream.
|
||||
pub fn new(read: R) -> Self {
|
||||
ReadCache {
|
||||
cache: RefCell::new(ReadCacheInternal {
|
||||
read,
|
||||
bufs: HashMap::new(),
|
||||
strings: HashMap::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an implementation of `ReadRef` that restricts reads
|
||||
/// to the given range of the stream.
|
||||
pub fn range(&self, offset: u64, size: u64) -> ReadCacheRange<'_, R> {
|
||||
ReadCacheRange {
|
||||
r: self,
|
||||
offset,
|
||||
size,
|
||||
}
|
||||
}
|
||||
|
||||
/// Free buffers used by the cache.
|
||||
pub fn clear(&mut self) {
|
||||
self.cache.borrow_mut().bufs.clear();
|
||||
}
|
||||
|
||||
/// Unwrap this `ReadCache<R>`, returning the underlying reader.
|
||||
pub fn into_inner(self) -> R {
|
||||
self.cache.into_inner().read
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> ReadRef<'a> for &'a ReadCache<R> {
|
||||
fn len(self) -> Result<u64, ()> {
|
||||
let cache = &mut *self.cache.borrow_mut();
|
||||
cache.read.seek(SeekFrom::End(0)).map_err(|_| ())
|
||||
}
|
||||
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8], ()> {
|
||||
if size == 0 {
|
||||
return Ok(&[]);
|
||||
}
|
||||
let cache = &mut *self.cache.borrow_mut();
|
||||
let buf = match cache.bufs.entry((offset, size)) {
|
||||
Entry::Occupied(entry) => entry.into_mut(),
|
||||
Entry::Vacant(entry) => {
|
||||
let size = size.try_into().map_err(|_| ())?;
|
||||
cache.read.seek(SeekFrom::Start(offset)).map_err(|_| ())?;
|
||||
let mut bytes = vec![0; size].into_boxed_slice();
|
||||
cache.read.read_exact(&mut bytes).map_err(|_| ())?;
|
||||
entry.insert(bytes)
|
||||
}
|
||||
};
|
||||
// Extend the lifetime to that of self.
|
||||
// This is OK because we never mutate or remove entries.
|
||||
Ok(unsafe { mem::transmute::<&[u8], &[u8]>(buf) })
|
||||
}
|
||||
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8], ()> {
|
||||
let cache = &mut *self.cache.borrow_mut();
|
||||
let buf = match cache.strings.entry((range.start, delimiter)) {
|
||||
Entry::Occupied(entry) => entry.into_mut(),
|
||||
Entry::Vacant(entry) => {
|
||||
cache
|
||||
.read
|
||||
.seek(SeekFrom::Start(range.start))
|
||||
.map_err(|_| ())?;
|
||||
|
||||
let max_check: usize = (range.end - range.start).try_into().map_err(|_| ())?;
|
||||
// Strings should be relatively small.
|
||||
// TODO: make this configurable?
|
||||
let max_check = max_check.min(4096);
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
let mut checked = 0;
|
||||
loop {
|
||||
bytes.resize((checked + 256).min(max_check), 0);
|
||||
let read = cache.read.read(&mut bytes[checked..]).map_err(|_| ())?;
|
||||
if read == 0 {
|
||||
return Err(());
|
||||
}
|
||||
if let Some(len) = memchr::memchr(delimiter, &bytes[checked..][..read]) {
|
||||
bytes.truncate(checked + len);
|
||||
break entry.insert(bytes.into_boxed_slice());
|
||||
}
|
||||
checked += read;
|
||||
if checked >= max_check {
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
// Extend the lifetime to that of self.
|
||||
// This is OK because we never mutate or remove entries.
|
||||
Ok(unsafe { mem::transmute::<&[u8], &[u8]>(buf) })
|
||||
}
|
||||
}
|
||||
|
||||
/// An implementation of [`ReadRef`] for a range of data in a stream that
|
||||
/// implements `Read + Seek`.
|
||||
///
|
||||
/// Shares an underlying `ReadCache` with a lifetime of `'a`.
|
||||
#[derive(Debug)]
|
||||
pub struct ReadCacheRange<'a, R: Read + Seek> {
|
||||
r: &'a ReadCache<R>,
|
||||
offset: u64,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> Clone for ReadCacheRange<'a, R> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> Copy for ReadCacheRange<'a, R> {}
|
||||
|
||||
impl<'a, R: Read + Seek> ReadRef<'a> for ReadCacheRange<'a, R> {
|
||||
fn len(self) -> Result<u64, ()> {
|
||||
Ok(self.size)
|
||||
}
|
||||
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8], ()> {
|
||||
if size == 0 {
|
||||
return Ok(&[]);
|
||||
}
|
||||
let end = offset.checked_add(size).ok_or(())?;
|
||||
if end > self.size {
|
||||
return Err(());
|
||||
}
|
||||
let r_offset = self.offset.checked_add(offset).ok_or(())?;
|
||||
self.r.read_bytes_at(r_offset, size)
|
||||
}
|
||||
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8], ()> {
|
||||
let r_start = self.offset.checked_add(range.start).ok_or(())?;
|
||||
let r_end = self.offset.checked_add(range.end).ok_or(())?;
|
||||
let bytes = self.r.read_bytes_at_until(r_start..r_end, delimiter)?;
|
||||
let size = bytes.len().try_into().map_err(|_| ())?;
|
||||
let end = range.start.checked_add(size).ok_or(())?;
|
||||
if end > self.size {
|
||||
return Err(());
|
||||
}
|
||||
Ok(bytes)
|
||||
}
|
||||
}
|
||||
137
vendor/object/src/read/read_ref.rs
vendored
Normal file
137
vendor/object/src/read/read_ref.rs
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
#![allow(clippy::len_without_is_empty)]
|
||||
|
||||
use core::convert::TryInto;
|
||||
use core::ops::Range;
|
||||
use core::{mem, result};
|
||||
|
||||
use crate::pod::{from_bytes, slice_from_bytes, Pod};
|
||||
|
||||
type Result<T> = result::Result<T, ()>;
|
||||
|
||||
/// A trait for reading references to [`Pod`] types from a block of data.
|
||||
///
|
||||
/// This allows parsers to handle both of these cases:
|
||||
/// - the block of data exists in memory, and it is desirable
|
||||
/// to use references to this block instead of copying it,
|
||||
/// - the block of data exists in storage, and it is desirable
|
||||
/// to read on demand to minimize I/O and memory usage.
|
||||
///
|
||||
/// The methods accept `self` by value because `Self` is expected to behave
|
||||
/// similar to a reference: it may be a reference with a lifetime of `'a`,
|
||||
/// or it may be a wrapper of a reference.
|
||||
///
|
||||
/// The `Clone` and `Copy` bounds are for convenience, and since `Self` is
|
||||
/// expected to be similar to a reference, these are easily satisfied.
|
||||
///
|
||||
/// Object file parsers typically use offsets to locate the structures
|
||||
/// in the block, and will most commonly use the `*_at` methods to
|
||||
/// read a structure at a known offset.
|
||||
///
|
||||
/// Occasionally file parsers will need to treat the block as a stream,
|
||||
/// and so convenience methods are provided that update an offset with
|
||||
/// the size that was read.
|
||||
//
|
||||
// An alternative would be for methods to accept `&mut self` and use a
|
||||
// `seek` method instead of the `offset` parameters, but this is less
|
||||
// convenient for implementers.
|
||||
pub trait ReadRef<'a>: Clone + Copy {
|
||||
/// The total size of the block of data.
|
||||
fn len(self) -> Result<u64>;
|
||||
|
||||
/// Get a reference to a `u8` slice at the given offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8]>;
|
||||
|
||||
/// Get a reference to a delimited `u8` slice which starts at range.start.
|
||||
///
|
||||
/// Does not include the delimiter.
|
||||
///
|
||||
/// Returns an error if the range is out of bounds or the delimiter is
|
||||
/// not found in the range.
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8]>;
|
||||
|
||||
/// Get a reference to a `u8` slice at the given offset, and update the offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
fn read_bytes(self, offset: &mut u64, size: u64) -> Result<&'a [u8]> {
|
||||
let bytes = self.read_bytes_at(*offset, size)?;
|
||||
*offset = offset.wrapping_add(size);
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Get a reference to a `Pod` type at the given offset, and update the offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// The default implementation uses `read_bytes`, and returns an error if
|
||||
/// `read_bytes` does not return bytes with the correct alignment for `T`.
|
||||
/// Implementors may want to provide their own implementation that ensures
|
||||
/// the alignment can be satisfied. Alternatively, only use this method with
|
||||
/// types that do not need alignment (see the `unaligned` feature of this crate).
|
||||
fn read<T: Pod>(self, offset: &mut u64) -> Result<&'a T> {
|
||||
let size = mem::size_of::<T>().try_into().map_err(|_| ())?;
|
||||
let bytes = self.read_bytes(offset, size)?;
|
||||
let (t, _) = from_bytes(bytes)?;
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
/// Get a reference to a `Pod` type at the given offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// Also see the `read` method for information regarding alignment of `T`.
|
||||
fn read_at<T: Pod>(self, mut offset: u64) -> Result<&'a T> {
|
||||
self.read(&mut offset)
|
||||
}
|
||||
|
||||
/// Get a reference to a slice of a `Pod` type at the given offset, and update the offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// Also see the `read` method for information regarding alignment of `T`.
|
||||
fn read_slice<T: Pod>(self, offset: &mut u64, count: usize) -> Result<&'a [T]> {
|
||||
let size = count
|
||||
.checked_mul(mem::size_of::<T>())
|
||||
.ok_or(())?
|
||||
.try_into()
|
||||
.map_err(|_| ())?;
|
||||
let bytes = self.read_bytes(offset, size)?;
|
||||
let (t, _) = slice_from_bytes(bytes, count)?;
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
/// Get a reference to a slice of a `Pod` type at the given offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// Also see the `read` method for information regarding alignment of `T`.
|
||||
fn read_slice_at<T: Pod>(self, mut offset: u64, count: usize) -> Result<&'a [T]> {
|
||||
self.read_slice(&mut offset, count)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ReadRef<'a> for &'a [u8] {
|
||||
fn len(self) -> Result<u64> {
|
||||
self.len().try_into().map_err(|_| ())
|
||||
}
|
||||
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8]> {
|
||||
let offset: usize = offset.try_into().map_err(|_| ())?;
|
||||
let size: usize = size.try_into().map_err(|_| ())?;
|
||||
self.get(offset..).ok_or(())?.get(..size).ok_or(())
|
||||
}
|
||||
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8]> {
|
||||
let start: usize = range.start.try_into().map_err(|_| ())?;
|
||||
let end: usize = range.end.try_into().map_err(|_| ())?;
|
||||
let bytes = self.get(start..end).ok_or(())?;
|
||||
match memchr::memchr(delimiter, bytes) {
|
||||
Some(len) => {
|
||||
// This will never fail.
|
||||
bytes.get(..len).ok_or(())
|
||||
}
|
||||
None => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
551
vendor/object/src/read/traits.rs
vendored
Normal file
551
vendor/object/src/read/traits.rs
vendored
Normal file
@@ -0,0 +1,551 @@
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, CodeView, ComdatKind, CompressedData, CompressedFileRange, Export,
|
||||
FileFlags, Import, ObjectKind, ObjectMap, Relocation, Result, SectionFlags, SectionIndex,
|
||||
SectionKind, SegmentFlags, SubArchitecture, SymbolFlags, SymbolIndex, SymbolKind, SymbolMap,
|
||||
SymbolMapName, SymbolScope, SymbolSection,
|
||||
};
|
||||
use crate::Endianness;
|
||||
|
||||
/// An object file.
|
||||
///
|
||||
/// This is the primary trait for the unified read API.
|
||||
pub trait Object<'data: 'file, 'file>: read::private::Sealed {
|
||||
/// A loadable segment in the object file.
|
||||
type Segment: ObjectSegment<'data>;
|
||||
|
||||
/// An iterator for the loadable segments in the object file.
|
||||
type SegmentIterator: Iterator<Item = Self::Segment>;
|
||||
|
||||
/// A section in the object file.
|
||||
type Section: ObjectSection<'data>;
|
||||
|
||||
/// An iterator for the sections in the object file.
|
||||
type SectionIterator: Iterator<Item = Self::Section>;
|
||||
|
||||
/// A COMDAT section group in the object file.
|
||||
type Comdat: ObjectComdat<'data>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in the object file.
|
||||
type ComdatIterator: Iterator<Item = Self::Comdat>;
|
||||
|
||||
/// A symbol in the object file.
|
||||
type Symbol: ObjectSymbol<'data>;
|
||||
|
||||
/// An iterator for symbols in the object file.
|
||||
type SymbolIterator: Iterator<Item = Self::Symbol>;
|
||||
|
||||
/// A symbol table in the object file.
|
||||
type SymbolTable: ObjectSymbolTable<
|
||||
'data,
|
||||
Symbol = Self::Symbol,
|
||||
SymbolIterator = Self::SymbolIterator,
|
||||
>;
|
||||
|
||||
/// An iterator for the dynamic relocations in the file.
|
||||
///
|
||||
/// The first field in the item tuple is the address
|
||||
/// that the relocation applies to.
|
||||
type DynamicRelocationIterator: Iterator<Item = (u64, Relocation)>;
|
||||
|
||||
/// Get the architecture type of the file.
|
||||
fn architecture(&self) -> Architecture;
|
||||
|
||||
/// Get the sub-architecture type of the file if known.
|
||||
///
|
||||
/// A value of `None` has a range of meanings: the file supports all
|
||||
/// sub-architectures, the file does not explicitly specify a
|
||||
/// sub-architecture, or the sub-architecture is currently unrecognized.
|
||||
fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Get the endianness of the file.
|
||||
#[inline]
|
||||
fn endianness(&self) -> Endianness {
|
||||
if self.is_little_endian() {
|
||||
Endianness::Little
|
||||
} else {
|
||||
Endianness::Big
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the file is little endian, false if it is big endian.
|
||||
fn is_little_endian(&self) -> bool;
|
||||
|
||||
/// Return true if the file can contain 64-bit addresses.
|
||||
fn is_64(&self) -> bool;
|
||||
|
||||
/// Return the kind of this object.
|
||||
fn kind(&self) -> ObjectKind;
|
||||
|
||||
/// Get an iterator for the loadable segments in the file.
|
||||
///
|
||||
/// For ELF, this is program headers with type [`PT_LOAD`](crate::elf::PT_LOAD).
|
||||
/// For Mach-O, this is load commands with type [`LC_SEGMENT`](crate::macho::LC_SEGMENT)
|
||||
/// or [`LC_SEGMENT_64`](crate::macho::LC_SEGMENT_64).
|
||||
/// For PE, this is all sections.
|
||||
fn segments(&'file self) -> Self::SegmentIterator;
|
||||
|
||||
/// Get the section named `section_name`, if such a section exists.
|
||||
///
|
||||
/// If `section_name` starts with a '.' then it is treated as a system section name,
|
||||
/// and is compared using the conventions specific to the object file format. This
|
||||
/// includes:
|
||||
/// - if ".debug_str_offsets" is requested for a Mach-O object file, then the actual
|
||||
/// section name that is searched for is "__debug_str_offs".
|
||||
/// - if ".debug_info" is requested for an ELF object file, then
|
||||
/// ".zdebug_info" may be returned (and similarly for other debug sections).
|
||||
///
|
||||
/// For some object files, multiple segments may contain sections with the same
|
||||
/// name. In this case, the first matching section will be used.
|
||||
///
|
||||
/// This method skips over sections with invalid names.
|
||||
fn section_by_name(&'file self, section_name: &str) -> Option<Self::Section> {
|
||||
self.section_by_name_bytes(section_name.as_bytes())
|
||||
}
|
||||
|
||||
/// Like [`Self::section_by_name`], but allows names that are not UTF-8.
|
||||
fn section_by_name_bytes(&'file self, section_name: &[u8]) -> Option<Self::Section>;
|
||||
|
||||
/// Get the section at the given index.
|
||||
///
|
||||
/// The meaning of the index depends on the object file.
|
||||
///
|
||||
/// For some object files, this requires iterating through all sections.
|
||||
///
|
||||
/// Returns an error if the index is invalid.
|
||||
fn section_by_index(&'file self, index: SectionIndex) -> Result<Self::Section>;
|
||||
|
||||
/// Get an iterator for the sections in the file.
|
||||
fn sections(&'file self) -> Self::SectionIterator;
|
||||
|
||||
/// Get an iterator for the COMDAT section groups in the file.
|
||||
fn comdats(&'file self) -> Self::ComdatIterator;
|
||||
|
||||
/// Get the debugging symbol table, if any.
|
||||
fn symbol_table(&'file self) -> Option<Self::SymbolTable>;
|
||||
|
||||
/// Get the debugging symbol at the given index.
|
||||
///
|
||||
/// The meaning of the index depends on the object file.
|
||||
///
|
||||
/// Returns an error if the index is invalid.
|
||||
fn symbol_by_index(&'file self, index: SymbolIndex) -> Result<Self::Symbol>;
|
||||
|
||||
/// Get an iterator for the debugging symbols in the file.
|
||||
///
|
||||
/// This may skip over symbols that are malformed or unsupported.
|
||||
///
|
||||
/// For Mach-O files, this does not include STAB entries.
|
||||
fn symbols(&'file self) -> Self::SymbolIterator;
|
||||
|
||||
/// Get the symbol named `symbol_name`, if the symbol exists.
|
||||
fn symbol_by_name(&'file self, symbol_name: &str) -> Option<Self::Symbol> {
|
||||
self.symbol_by_name_bytes(symbol_name.as_bytes())
|
||||
}
|
||||
|
||||
/// Like [`Self::symbol_by_name`], but allows names that are not UTF-8.
|
||||
fn symbol_by_name_bytes(&'file self, symbol_name: &[u8]) -> Option<Self::Symbol> {
|
||||
self.symbols()
|
||||
.find(|sym| sym.name_bytes() == Ok(symbol_name))
|
||||
}
|
||||
|
||||
/// Get the dynamic linking symbol table, if any.
|
||||
///
|
||||
/// Only ELF has a separate dynamic linking symbol table.
|
||||
/// Consider using [`Self::exports`] or [`Self::imports`] instead.
|
||||
fn dynamic_symbol_table(&'file self) -> Option<Self::SymbolTable>;
|
||||
|
||||
/// Get an iterator for the dynamic linking symbols in the file.
|
||||
///
|
||||
/// This may skip over symbols that are malformed or unsupported.
|
||||
///
|
||||
/// Only ELF has dynamic linking symbols.
|
||||
/// Other file formats will return an empty iterator.
|
||||
/// Consider using [`Self::exports`] or [`Self::imports`] instead.
|
||||
fn dynamic_symbols(&'file self) -> Self::SymbolIterator;
|
||||
|
||||
/// Get the dynamic relocations for this file.
|
||||
///
|
||||
/// Symbol indices in these relocations refer to the dynamic symbol table.
|
||||
///
|
||||
/// Only ELF has dynamic relocations.
|
||||
fn dynamic_relocations(&'file self) -> Option<Self::DynamicRelocationIterator>;
|
||||
|
||||
/// Construct a map from addresses to symbol names.
|
||||
///
|
||||
/// The map will only contain defined text and data symbols.
|
||||
/// The dynamic symbol table will only be used if there are no debugging symbols.
|
||||
fn symbol_map(&'file self) -> SymbolMap<SymbolMapName<'data>> {
|
||||
let mut symbols = Vec::new();
|
||||
if let Some(table) = self.symbol_table().or_else(|| self.dynamic_symbol_table()) {
|
||||
// Sometimes symbols share addresses. Collect them all then choose the "best".
|
||||
let mut all_symbols = Vec::new();
|
||||
for symbol in table.symbols() {
|
||||
// Must have an address.
|
||||
if !symbol.is_definition() {
|
||||
continue;
|
||||
}
|
||||
// Must have a name.
|
||||
let name = match symbol.name() {
|
||||
Ok(name) => name,
|
||||
_ => continue,
|
||||
};
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Lower is better.
|
||||
let mut priority = 0u32;
|
||||
|
||||
// Prefer known kind.
|
||||
match symbol.kind() {
|
||||
SymbolKind::Text | SymbolKind::Data => {}
|
||||
SymbolKind::Unknown => priority += 1,
|
||||
_ => continue,
|
||||
}
|
||||
priority *= 2;
|
||||
|
||||
// Prefer global visibility.
|
||||
priority += match symbol.scope() {
|
||||
SymbolScope::Unknown => 3,
|
||||
SymbolScope::Compilation => 2,
|
||||
SymbolScope::Linkage => 1,
|
||||
SymbolScope::Dynamic => 0,
|
||||
};
|
||||
priority *= 4;
|
||||
|
||||
// Prefer later entries (earlier symbol is likely to be less specific).
|
||||
let index = !0 - symbol.index().0;
|
||||
|
||||
// Tuple is ordered for sort.
|
||||
all_symbols.push((symbol.address(), priority, index, name));
|
||||
}
|
||||
// Unstable sort is okay because tuple includes index.
|
||||
all_symbols.sort_unstable();
|
||||
|
||||
let mut previous_address = !0;
|
||||
for (address, _priority, _index, name) in all_symbols {
|
||||
if address != previous_address {
|
||||
symbols.push(SymbolMapName::new(address, name));
|
||||
previous_address = address;
|
||||
}
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to symbol names and object file names.
|
||||
///
|
||||
/// This is derived from Mach-O STAB entries.
|
||||
fn object_map(&'file self) -> ObjectMap<'data> {
|
||||
ObjectMap::default()
|
||||
}
|
||||
|
||||
/// Get the imported symbols.
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>>;
|
||||
|
||||
/// Get the exported symbols that expose both a name and an address.
|
||||
///
|
||||
/// Some file formats may provide other kinds of symbols that can be retrieved using
|
||||
/// the low level API.
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>>;
|
||||
|
||||
/// Return true if the file contains DWARF debug information sections, false if not.
|
||||
fn has_debug_symbols(&self) -> bool;
|
||||
|
||||
/// The UUID from a Mach-O [`LC_UUID`](crate::macho::LC_UUID) load command.
|
||||
#[inline]
|
||||
fn mach_uuid(&self) -> Result<Option<[u8; 16]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The build ID from an ELF [`NT_GNU_BUILD_ID`](crate::elf::NT_GNU_BUILD_ID) note.
|
||||
#[inline]
|
||||
fn build_id(&self) -> Result<Option<&'data [u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The filename and CRC from a `.gnu_debuglink` section.
|
||||
#[inline]
|
||||
fn gnu_debuglink(&self) -> Result<Option<(&'data [u8], u32)>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The filename and build ID from a `.gnu_debugaltlink` section.
|
||||
#[inline]
|
||||
fn gnu_debugaltlink(&self) -> Result<Option<(&'data [u8], &'data [u8])>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The filename and GUID from the PE CodeView section.
|
||||
#[inline]
|
||||
fn pdb_info(&self) -> Result<Option<CodeView<'_>>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Get the base address used for relative virtual addresses.
|
||||
///
|
||||
/// Currently this is only non-zero for PE.
|
||||
fn relative_address_base(&'file self) -> u64;
|
||||
|
||||
/// Get the virtual address of the entry point of the binary.
|
||||
fn entry(&'file self) -> u64;
|
||||
|
||||
/// File flags that are specific to each file format.
|
||||
fn flags(&self) -> FileFlags;
|
||||
}
|
||||
|
||||
/// A loadable segment in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSegment<'data>: read::private::Sealed {
|
||||
/// Returns the virtual address of the segment.
|
||||
fn address(&self) -> u64;
|
||||
|
||||
/// Returns the size of the segment in memory.
|
||||
fn size(&self) -> u64;
|
||||
|
||||
/// Returns the alignment of the segment in memory.
|
||||
fn align(&self) -> u64;
|
||||
|
||||
/// Returns the offset and size of the segment in the file.
|
||||
fn file_range(&self) -> (u64, u64);
|
||||
|
||||
/// Returns a reference to the file contents of the segment.
|
||||
///
|
||||
/// The length of this data may be different from the size of the
|
||||
/// segment in memory.
|
||||
fn data(&self) -> Result<&'data [u8]>;
|
||||
|
||||
/// Return the segment data in the given range.
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment does not contain the given range.
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>>;
|
||||
|
||||
/// Returns the name of the segment.
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>>;
|
||||
|
||||
/// Returns the name of the segment.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<Option<&str>>;
|
||||
|
||||
/// Return the flags of segment.
|
||||
fn flags(&self) -> SegmentFlags;
|
||||
}
|
||||
|
||||
/// A section in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSection<'data>: read::private::Sealed {
|
||||
/// An iterator for the relocations for a section.
|
||||
///
|
||||
/// The first field in the item tuple is the section offset
|
||||
/// that the relocation applies to.
|
||||
type RelocationIterator: Iterator<Item = (u64, Relocation)>;
|
||||
|
||||
/// Returns the section index.
|
||||
fn index(&self) -> SectionIndex;
|
||||
|
||||
/// Returns the address of the section.
|
||||
fn address(&self) -> u64;
|
||||
|
||||
/// Returns the size of the section in memory.
|
||||
fn size(&self) -> u64;
|
||||
|
||||
/// Returns the alignment of the section in memory.
|
||||
fn align(&self) -> u64;
|
||||
|
||||
/// Returns offset and size of on-disk segment (if any).
|
||||
fn file_range(&self) -> Option<(u64, u64)>;
|
||||
|
||||
/// Returns the raw contents of the section.
|
||||
///
|
||||
/// The length of this data may be different from the size of the
|
||||
/// section in memory.
|
||||
///
|
||||
/// This does not do any decompression.
|
||||
fn data(&self) -> Result<&'data [u8]>;
|
||||
|
||||
/// Return the raw contents of the section data in the given range.
|
||||
///
|
||||
/// This does not do any decompression.
|
||||
///
|
||||
/// Returns `Ok(None)` if the section does not contain the given range.
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>>;
|
||||
|
||||
/// Returns the potentially compressed file range of the section,
|
||||
/// along with information about the compression.
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange>;
|
||||
|
||||
/// Returns the potentially compressed contents of the section,
|
||||
/// along with information about the compression.
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>>;
|
||||
|
||||
/// Returns the uncompressed contents of the section.
|
||||
///
|
||||
/// The length of this data may be different from the size of the
|
||||
/// section in memory.
|
||||
///
|
||||
/// If no compression is detected, then returns the data unchanged.
|
||||
/// Returns `Err` if decompression fails.
|
||||
fn uncompressed_data(&self) -> Result<Cow<'data, [u8]>> {
|
||||
self.compressed_data()?.decompress()
|
||||
}
|
||||
|
||||
/// Returns the name of the section.
|
||||
fn name_bytes(&self) -> Result<&[u8]>;
|
||||
|
||||
/// Returns the name of the section.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<&str>;
|
||||
|
||||
/// Returns the name of the segment for this section.
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>>;
|
||||
|
||||
/// Returns the name of the segment for this section.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn segment_name(&self) -> Result<Option<&str>>;
|
||||
|
||||
/// Return the kind of this section.
|
||||
fn kind(&self) -> SectionKind;
|
||||
|
||||
/// Get the relocations for this section.
|
||||
fn relocations(&self) -> Self::RelocationIterator;
|
||||
|
||||
/// Section flags that are specific to each file format.
|
||||
fn flags(&self) -> SectionFlags;
|
||||
}
|
||||
|
||||
/// A COMDAT section group in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectComdat<'data>: read::private::Sealed {
|
||||
/// An iterator for the sections in the section group.
|
||||
type SectionIterator: Iterator<Item = SectionIndex>;
|
||||
|
||||
/// Returns the COMDAT selection kind.
|
||||
fn kind(&self) -> ComdatKind;
|
||||
|
||||
/// Returns the index of the symbol used for the name of COMDAT section group.
|
||||
fn symbol(&self) -> SymbolIndex;
|
||||
|
||||
/// Returns the name of the COMDAT section group.
|
||||
fn name_bytes(&self) -> Result<&[u8]>;
|
||||
|
||||
/// Returns the name of the COMDAT section group.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<&str>;
|
||||
|
||||
/// Get the sections in this section group.
|
||||
fn sections(&self) -> Self::SectionIterator;
|
||||
}
|
||||
|
||||
/// A symbol table in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSymbolTable<'data>: read::private::Sealed {
|
||||
/// A symbol table entry.
|
||||
type Symbol: ObjectSymbol<'data>;
|
||||
|
||||
/// An iterator for the symbols in a symbol table.
|
||||
type SymbolIterator: Iterator<Item = Self::Symbol>;
|
||||
|
||||
/// Get an iterator for the symbols in the table.
|
||||
///
|
||||
/// This may skip over symbols that are malformed or unsupported.
|
||||
fn symbols(&self) -> Self::SymbolIterator;
|
||||
|
||||
/// Get the symbol at the given index.
|
||||
///
|
||||
/// The meaning of the index depends on the object file.
|
||||
///
|
||||
/// Returns an error if the index is invalid.
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol>;
|
||||
}
|
||||
|
||||
/// A symbol table entry in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSymbol<'data>: read::private::Sealed {
|
||||
/// The index of the symbol.
|
||||
fn index(&self) -> SymbolIndex;
|
||||
|
||||
/// The name of the symbol.
|
||||
fn name_bytes(&self) -> Result<&'data [u8]>;
|
||||
|
||||
/// The name of the symbol.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<&'data str>;
|
||||
|
||||
/// The address of the symbol. May be zero if the address is unknown.
|
||||
fn address(&self) -> u64;
|
||||
|
||||
/// The size of the symbol. May be zero if the size is unknown.
|
||||
fn size(&self) -> u64;
|
||||
|
||||
/// Return the kind of this symbol.
|
||||
fn kind(&self) -> SymbolKind;
|
||||
|
||||
/// Returns the section where the symbol is defined.
|
||||
fn section(&self) -> SymbolSection;
|
||||
|
||||
/// Returns the section index for the section containing this symbol.
|
||||
///
|
||||
/// May return `None` if the symbol is not defined in a section.
|
||||
fn section_index(&self) -> Option<SectionIndex> {
|
||||
self.section().index()
|
||||
}
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
fn is_undefined(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object
|
||||
/// that has a known address.
|
||||
///
|
||||
/// This is primarily used to implement [`Object::symbol_map`].
|
||||
fn is_definition(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is common data.
|
||||
///
|
||||
/// Note: does not check for [`SymbolSection::Section`] with [`SectionKind::Common`].
|
||||
fn is_common(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is weak.
|
||||
fn is_weak(&self) -> bool;
|
||||
|
||||
/// Returns the symbol scope.
|
||||
fn scope(&self) -> SymbolScope;
|
||||
|
||||
/// Return true if the symbol visible outside of the compilation unit.
|
||||
///
|
||||
/// This treats [`SymbolScope::Unknown`] as global.
|
||||
fn is_global(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is only visible within the compilation unit.
|
||||
fn is_local(&self) -> bool;
|
||||
|
||||
/// Symbol flags that are specific to each file format.
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex>;
|
||||
}
|
||||
|
||||
/// An iterator for files that don't have dynamic relocations.
|
||||
#[derive(Debug)]
|
||||
pub struct NoDynamicRelocationIterator;
|
||||
|
||||
impl Iterator for NoDynamicRelocationIterator {
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
425
vendor/object/src/read/util.rs
vendored
Normal file
425
vendor/object/src/read/util.rs
vendored
Normal file
@@ -0,0 +1,425 @@
|
||||
use alloc::string::String;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use crate::pod::{from_bytes, slice_from_bytes, Pod};
|
||||
use crate::ReadRef;
|
||||
|
||||
/// A newtype for byte slices.
|
||||
///
|
||||
/// It has these important features:
|
||||
/// - no methods that can panic, such as `Index`
|
||||
/// - convenience methods for `Pod` types
|
||||
/// - a useful `Debug` implementation
|
||||
#[derive(Default, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Bytes<'data>(pub &'data [u8]);
|
||||
|
||||
impl<'data> fmt::Debug for Bytes<'data> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
debug_list_bytes(self.0, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> Bytes<'data> {
|
||||
/// Return the length of the byte slice.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Return true if the byte slice is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Skip over the given number of bytes at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes.
|
||||
#[inline]
|
||||
pub fn skip(&mut self, offset: usize) -> Result<(), ()> {
|
||||
match self.0.get(offset..) {
|
||||
Some(tail) => {
|
||||
self.0 = tail;
|
||||
Ok(())
|
||||
}
|
||||
None => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to the given number of bytes at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes.
|
||||
#[inline]
|
||||
pub fn read_bytes(&mut self, count: usize) -> Result<Bytes<'data>, ()> {
|
||||
match (self.0.get(..count), self.0.get(count..)) {
|
||||
(Some(head), Some(tail)) => {
|
||||
self.0 = tail;
|
||||
Ok(Bytes(head))
|
||||
}
|
||||
_ => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to the given number of bytes at the given offset of the byte slice.
|
||||
///
|
||||
/// Returns an error if the offset is invalid or there are too few bytes.
|
||||
#[inline]
|
||||
pub fn read_bytes_at(mut self, offset: usize, count: usize) -> Result<Bytes<'data>, ()> {
|
||||
self.skip(offset)?;
|
||||
self.read_bytes(count)
|
||||
}
|
||||
|
||||
/// Return a reference to a `Pod` struct at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the slice is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read<T: Pod>(&mut self) -> Result<&'data T, ()> {
|
||||
match from_bytes(self.0) {
|
||||
Ok((value, tail)) => {
|
||||
self.0 = tail;
|
||||
Ok(value)
|
||||
}
|
||||
Err(()) => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to a `Pod` struct at the given offset of the byte slice.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the offset is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read_at<T: Pod>(mut self, offset: usize) -> Result<&'data T, ()> {
|
||||
self.skip(offset)?;
|
||||
self.read()
|
||||
}
|
||||
|
||||
/// Return a reference to a slice of `Pod` structs at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the offset is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read_slice<T: Pod>(&mut self, count: usize) -> Result<&'data [T], ()> {
|
||||
match slice_from_bytes(self.0, count) {
|
||||
Ok((value, tail)) => {
|
||||
self.0 = tail;
|
||||
Ok(value)
|
||||
}
|
||||
Err(()) => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to a slice of `Pod` structs at the given offset of the byte slice.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the offset is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read_slice_at<T: Pod>(mut self, offset: usize, count: usize) -> Result<&'data [T], ()> {
|
||||
self.skip(offset)?;
|
||||
self.read_slice(count)
|
||||
}
|
||||
|
||||
/// Read a null terminated string.
|
||||
///
|
||||
/// Does not assume any encoding.
|
||||
/// Reads past the null byte, but doesn't return it.
|
||||
#[inline]
|
||||
pub fn read_string(&mut self) -> Result<&'data [u8], ()> {
|
||||
match memchr::memchr(b'\0', self.0) {
|
||||
Some(null) => {
|
||||
// These will never fail.
|
||||
let bytes = self.read_bytes(null)?;
|
||||
self.skip(1)?;
|
||||
Ok(bytes.0)
|
||||
}
|
||||
None => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read a null terminated string at an offset.
|
||||
///
|
||||
/// Does not assume any encoding. Does not return the null byte.
|
||||
#[inline]
|
||||
pub fn read_string_at(mut self, offset: usize) -> Result<&'data [u8], ()> {
|
||||
self.skip(offset)?;
|
||||
self.read_string()
|
||||
}
|
||||
|
||||
/// Read an unsigned LEB128 number.
|
||||
pub fn read_uleb128(&mut self) -> Result<u64, ()> {
|
||||
let mut result = 0;
|
||||
let mut shift = 0;
|
||||
|
||||
loop {
|
||||
let byte = *self.read::<u8>()?;
|
||||
if shift == 63 && byte != 0x00 && byte != 0x01 {
|
||||
return Err(());
|
||||
}
|
||||
result |= u64::from(byte & 0x7f) << shift;
|
||||
shift += 7;
|
||||
|
||||
if byte & 0x80 == 0 {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read a signed LEB128 number.
|
||||
pub fn read_sleb128(&mut self) -> Result<i64, ()> {
|
||||
let mut result = 0;
|
||||
let mut shift = 0;
|
||||
|
||||
loop {
|
||||
let byte = *self.read::<u8>()?;
|
||||
if shift == 63 && byte != 0x00 && byte != 0x7f {
|
||||
return Err(());
|
||||
}
|
||||
result |= i64::from(byte & 0x7f) << shift;
|
||||
shift += 7;
|
||||
|
||||
if byte & 0x80 == 0 {
|
||||
if shift < 64 && (byte & 0x40) != 0 {
|
||||
// Sign extend the result.
|
||||
result |= !0 << shift;
|
||||
}
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only for Debug impl of `Bytes`.
|
||||
fn debug_list_bytes(bytes: &[u8], fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut list = fmt.debug_list();
|
||||
list.entries(bytes.iter().take(8).copied().map(DebugByte));
|
||||
if bytes.len() > 8 {
|
||||
list.entry(&DebugLen(bytes.len()));
|
||||
}
|
||||
list.finish()
|
||||
}
|
||||
|
||||
struct DebugByte(u8);
|
||||
|
||||
impl fmt::Debug for DebugByte {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(fmt, "0x{:02x}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
struct DebugLen(usize);
|
||||
|
||||
impl fmt::Debug for DebugLen {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(fmt, "...; {}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// A newtype for byte strings.
|
||||
///
|
||||
/// For byte slices that are strings of an unknown encoding.
|
||||
///
|
||||
/// Provides a `Debug` implementation that interprets the bytes as UTF-8.
|
||||
#[derive(Default, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) struct ByteString<'data>(pub &'data [u8]);
|
||||
|
||||
impl<'data> fmt::Debug for ByteString<'data> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(fmt, "\"{}\"", String::from_utf8_lossy(self.0))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[inline]
|
||||
pub(crate) fn align(offset: usize, size: usize) -> usize {
|
||||
(offset + (size - 1)) & !(size - 1)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn data_range(
|
||||
data: &[u8],
|
||||
data_address: u64,
|
||||
range_address: u64,
|
||||
size: u64,
|
||||
) -> Option<&[u8]> {
|
||||
let offset = range_address.checked_sub(data_address)?;
|
||||
data.get(offset.try_into().ok()?..)?
|
||||
.get(..size.try_into().ok()?)
|
||||
}
|
||||
|
||||
/// A table of zero-terminated strings.
|
||||
///
|
||||
/// This is used by most file formats for strings such as section names and symbol names.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct StringTable<'data, R = &'data [u8]>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
data: Option<R>,
|
||||
start: u64,
|
||||
end: u64,
|
||||
marker: PhantomData<&'data ()>,
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> StringTable<'data, R> {
|
||||
/// Interpret the given data as a string table.
|
||||
pub fn new(data: R, start: u64, end: u64) -> Self {
|
||||
StringTable {
|
||||
data: Some(data),
|
||||
start,
|
||||
end,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the string at the given offset.
|
||||
pub fn get(&self, offset: u32) -> Result<&'data [u8], ()> {
|
||||
match self.data {
|
||||
Some(data) => {
|
||||
let r_start = self.start.checked_add(offset.into()).ok_or(())?;
|
||||
data.read_bytes_at_until(r_start..self.end, 0)
|
||||
}
|
||||
None => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> Default for StringTable<'data, R> {
|
||||
fn default() -> Self {
|
||||
StringTable {
|
||||
data: None,
|
||||
start: 0,
|
||||
end: 0,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::pod::bytes_of;
|
||||
|
||||
#[test]
|
||||
fn bytes() {
|
||||
let x = u32::to_be(0x0123_4567);
|
||||
let data = Bytes(bytes_of(&x));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.skip(0), Ok(()));
|
||||
assert_eq!(bytes, data);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.skip(4), Ok(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.skip(5), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_bytes(0), Ok(Bytes(&[])));
|
||||
assert_eq!(bytes, data);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_bytes(4), Ok(data));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_bytes(5), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
assert_eq!(data.read_bytes_at(0, 0), Ok(Bytes(&[])));
|
||||
assert_eq!(data.read_bytes_at(4, 0), Ok(Bytes(&[])));
|
||||
assert_eq!(data.read_bytes_at(0, 4), Ok(data));
|
||||
assert_eq!(data.read_bytes_at(1, 4), Err(()));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read::<u16>(), Ok(&u16::to_be(0x0123)));
|
||||
assert_eq!(bytes, Bytes(&[0x45, 0x67]));
|
||||
assert_eq!(data.read_at::<u16>(2), Ok(&u16::to_be(0x4567)));
|
||||
assert_eq!(data.read_at::<u16>(3), Err(()));
|
||||
assert_eq!(data.read_at::<u16>(4), Err(()));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read::<u32>(), Ok(&x));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read::<u64>(), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_slice::<u8>(0), Ok(&[][..]));
|
||||
assert_eq!(bytes, data);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_slice::<u8>(4), Ok(data.0));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_slice::<u8>(5), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
assert_eq!(data.read_slice_at::<u8>(0, 0), Ok(&[][..]));
|
||||
assert_eq!(data.read_slice_at::<u8>(4, 0), Ok(&[][..]));
|
||||
assert_eq!(data.read_slice_at::<u8>(0, 4), Ok(data.0));
|
||||
assert_eq!(data.read_slice_at::<u8>(1, 4), Err(()));
|
||||
|
||||
let data = Bytes(&[0x01, 0x02, 0x00, 0x04]);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_string(), Ok(&data.0[..2]));
|
||||
assert_eq!(bytes.0, &data.0[3..]);
|
||||
|
||||
let mut bytes = data;
|
||||
bytes.skip(3).unwrap();
|
||||
assert_eq!(bytes.read_string(), Err(()));
|
||||
assert_eq!(bytes.0, &[]);
|
||||
|
||||
assert_eq!(data.read_string_at(0), Ok(&data.0[..2]));
|
||||
assert_eq!(data.read_string_at(1), Ok(&data.0[1..2]));
|
||||
assert_eq!(data.read_string_at(2), Ok(&[][..]));
|
||||
assert_eq!(data.read_string_at(3), Err(()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bytes_debug() {
|
||||
assert_eq!(format!("{:?}", Bytes(&[])), "[]");
|
||||
assert_eq!(format!("{:?}", Bytes(&[0x01])), "[0x01]");
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{:?}",
|
||||
Bytes(&[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08])
|
||||
),
|
||||
"[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]"
|
||||
);
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{:?}",
|
||||
Bytes(&[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09])
|
||||
),
|
||||
"[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, ...; 9]"
|
||||
);
|
||||
}
|
||||
}
|
||||
966
vendor/object/src/read/wasm.rs
vendored
Normal file
966
vendor/object/src/read/wasm.rs
vendored
Normal file
@@ -0,0 +1,966 @@
|
||||
//! Support for reading Wasm files.
|
||||
//!
|
||||
//! [`WasmFile`] implements the [`Object`] trait for Wasm files.
|
||||
use alloc::boxed::Box;
|
||||
use alloc::vec::Vec;
|
||||
use core::marker::PhantomData;
|
||||
use core::ops::Range;
|
||||
use core::{slice, str};
|
||||
use wasmparser as wp;
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, ComdatKind, CompressedData, CompressedFileRange, Error, Export, FileFlags,
|
||||
Import, NoDynamicRelocationIterator, Object, ObjectComdat, ObjectKind, ObjectSection,
|
||||
ObjectSegment, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Relocation, Result,
|
||||
SectionFlags, SectionIndex, SectionKind, SegmentFlags, SymbolFlags, SymbolIndex, SymbolKind,
|
||||
SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[repr(usize)]
|
||||
enum SectionId {
|
||||
Custom = 0,
|
||||
Type = 1,
|
||||
Import = 2,
|
||||
Function = 3,
|
||||
Table = 4,
|
||||
Memory = 5,
|
||||
Global = 6,
|
||||
Export = 7,
|
||||
Start = 8,
|
||||
Element = 9,
|
||||
Code = 10,
|
||||
Data = 11,
|
||||
DataCount = 12,
|
||||
}
|
||||
// Update this constant when adding new section id:
|
||||
const MAX_SECTION_ID: usize = SectionId::DataCount as usize;
|
||||
|
||||
/// A WebAssembly object file.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmFile<'data, R = &'data [u8]> {
|
||||
data: &'data [u8],
|
||||
has_memory64: bool,
|
||||
// All sections, including custom sections.
|
||||
sections: Vec<SectionHeader<'data>>,
|
||||
// Indices into `sections` of sections with a non-zero id.
|
||||
id_sections: Box<[Option<usize>; MAX_SECTION_ID + 1]>,
|
||||
// Whether the file has DWARF information.
|
||||
has_debug_symbols: bool,
|
||||
// Symbols collected from imports, exports, code and name sections.
|
||||
symbols: Vec<WasmSymbolInternal<'data>>,
|
||||
// Address of the function body for the entry point.
|
||||
entry: u64,
|
||||
marker: PhantomData<R>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SectionHeader<'data> {
|
||||
id: SectionId,
|
||||
range: Range<usize>,
|
||||
name: &'data str,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum LocalFunctionKind {
|
||||
Unknown,
|
||||
Exported { symbol_ids: Vec<u32> },
|
||||
Local { symbol_id: u32 },
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for wasmparser::Result<T> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.map_err(|_| Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> WasmFile<'data, R> {
|
||||
/// Parse the raw wasm data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let len = data.len().read_error("Unknown Wasm file size")?;
|
||||
let data = data.read_bytes_at(0, len).read_error("Wasm read failed")?;
|
||||
let parser = wp::Parser::new(0).parse_all(data);
|
||||
|
||||
let mut file = WasmFile {
|
||||
data,
|
||||
has_memory64: false,
|
||||
sections: Vec::new(),
|
||||
id_sections: Default::default(),
|
||||
has_debug_symbols: false,
|
||||
symbols: Vec::new(),
|
||||
entry: 0,
|
||||
marker: PhantomData,
|
||||
};
|
||||
|
||||
let mut main_file_symbol = Some(WasmSymbolInternal {
|
||||
name: "",
|
||||
address: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::File,
|
||||
section: SymbolSection::None,
|
||||
scope: SymbolScope::Compilation,
|
||||
});
|
||||
|
||||
let mut imported_funcs_count = 0;
|
||||
let mut local_func_kinds = Vec::new();
|
||||
let mut entry_func_id = None;
|
||||
let mut code_range_start = 0;
|
||||
let mut code_func_index = 0;
|
||||
// One-to-one mapping of globals to their value (if the global is a constant integer).
|
||||
let mut global_values = Vec::new();
|
||||
|
||||
for payload in parser {
|
||||
let payload = payload.read_error("Invalid Wasm section header")?;
|
||||
|
||||
match payload {
|
||||
wp::Payload::TypeSection(section) => {
|
||||
file.add_section(SectionId::Type, section.range(), "");
|
||||
}
|
||||
wp::Payload::ImportSection(section) => {
|
||||
file.add_section(SectionId::Import, section.range(), "");
|
||||
let mut last_module_name = None;
|
||||
|
||||
for import in section {
|
||||
let import = import.read_error("Couldn't read an import item")?;
|
||||
let module_name = import.module;
|
||||
|
||||
if last_module_name != Some(module_name) {
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: module_name,
|
||||
address: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::File,
|
||||
section: SymbolSection::None,
|
||||
scope: SymbolScope::Dynamic,
|
||||
});
|
||||
last_module_name = Some(module_name);
|
||||
}
|
||||
|
||||
let kind = match import.ty {
|
||||
wp::TypeRef::Func(_) => {
|
||||
imported_funcs_count += 1;
|
||||
SymbolKind::Text
|
||||
}
|
||||
wp::TypeRef::Memory(memory) => {
|
||||
file.has_memory64 |= memory.memory64;
|
||||
SymbolKind::Data
|
||||
}
|
||||
wp::TypeRef::Table(_) | wp::TypeRef::Global(_) => SymbolKind::Data,
|
||||
wp::TypeRef::Tag(_) => SymbolKind::Unknown,
|
||||
};
|
||||
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: import.name,
|
||||
address: 0,
|
||||
size: 0,
|
||||
kind,
|
||||
section: SymbolSection::Undefined,
|
||||
scope: SymbolScope::Dynamic,
|
||||
});
|
||||
}
|
||||
}
|
||||
wp::Payload::FunctionSection(section) => {
|
||||
file.add_section(SectionId::Function, section.range(), "");
|
||||
local_func_kinds =
|
||||
vec![LocalFunctionKind::Unknown; section.into_iter().count()];
|
||||
}
|
||||
wp::Payload::TableSection(section) => {
|
||||
file.add_section(SectionId::Table, section.range(), "");
|
||||
}
|
||||
wp::Payload::MemorySection(section) => {
|
||||
file.add_section(SectionId::Memory, section.range(), "");
|
||||
for memory in section {
|
||||
let memory = memory.read_error("Couldn't read a memory item")?;
|
||||
file.has_memory64 |= memory.memory64;
|
||||
}
|
||||
}
|
||||
wp::Payload::GlobalSection(section) => {
|
||||
file.add_section(SectionId::Global, section.range(), "");
|
||||
for global in section {
|
||||
let global = global.read_error("Couldn't read a global item")?;
|
||||
let mut address = None;
|
||||
if !global.ty.mutable {
|
||||
// There should be exactly one instruction.
|
||||
let init = global.init_expr.get_operators_reader().read();
|
||||
address = match init.read_error("Couldn't read a global init expr")? {
|
||||
wp::Operator::I32Const { value } => Some(value as u64),
|
||||
wp::Operator::I64Const { value } => Some(value as u64),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
global_values.push(address);
|
||||
}
|
||||
}
|
||||
wp::Payload::ExportSection(section) => {
|
||||
file.add_section(SectionId::Export, section.range(), "");
|
||||
if let Some(main_file_symbol) = main_file_symbol.take() {
|
||||
file.symbols.push(main_file_symbol);
|
||||
}
|
||||
|
||||
for export in section {
|
||||
let export = export.read_error("Couldn't read an export item")?;
|
||||
|
||||
let (kind, section_idx) = match export.kind {
|
||||
wp::ExternalKind::Func => {
|
||||
if let Some(local_func_id) =
|
||||
export.index.checked_sub(imported_funcs_count)
|
||||
{
|
||||
let local_func_kind =
|
||||
&mut local_func_kinds[local_func_id as usize];
|
||||
if let LocalFunctionKind::Unknown = local_func_kind {
|
||||
*local_func_kind = LocalFunctionKind::Exported {
|
||||
symbol_ids: Vec::new(),
|
||||
};
|
||||
}
|
||||
let symbol_ids = match local_func_kind {
|
||||
LocalFunctionKind::Exported { symbol_ids } => symbol_ids,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
symbol_ids.push(file.symbols.len() as u32);
|
||||
}
|
||||
(SymbolKind::Text, SectionId::Code)
|
||||
}
|
||||
wp::ExternalKind::Table
|
||||
| wp::ExternalKind::Memory
|
||||
| wp::ExternalKind::Global => (SymbolKind::Data, SectionId::Data),
|
||||
// TODO
|
||||
wp::ExternalKind::Tag => continue,
|
||||
};
|
||||
|
||||
// Try to guess the symbol address. Rust and C export a global containing
|
||||
// the address in linear memory of the symbol.
|
||||
let mut address = 0;
|
||||
if export.kind == wp::ExternalKind::Global {
|
||||
if let Some(&Some(x)) = global_values.get(export.index as usize) {
|
||||
address = x;
|
||||
}
|
||||
}
|
||||
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: export.name,
|
||||
address,
|
||||
size: 0,
|
||||
kind,
|
||||
section: SymbolSection::Section(SectionIndex(section_idx as usize)),
|
||||
scope: SymbolScope::Dynamic,
|
||||
});
|
||||
}
|
||||
}
|
||||
wp::Payload::StartSection { func, range, .. } => {
|
||||
file.add_section(SectionId::Start, range, "");
|
||||
entry_func_id = Some(func);
|
||||
}
|
||||
wp::Payload::ElementSection(section) => {
|
||||
file.add_section(SectionId::Element, section.range(), "");
|
||||
}
|
||||
wp::Payload::CodeSectionStart { range, .. } => {
|
||||
code_range_start = range.start;
|
||||
file.add_section(SectionId::Code, range, "");
|
||||
if let Some(main_file_symbol) = main_file_symbol.take() {
|
||||
file.symbols.push(main_file_symbol);
|
||||
}
|
||||
}
|
||||
wp::Payload::CodeSectionEntry(body) => {
|
||||
let i = code_func_index;
|
||||
code_func_index += 1;
|
||||
|
||||
let range = body.range();
|
||||
|
||||
let address = range.start as u64 - code_range_start as u64;
|
||||
let size = (range.end - range.start) as u64;
|
||||
|
||||
if entry_func_id == Some(i as u32) {
|
||||
file.entry = address;
|
||||
}
|
||||
|
||||
let local_func_kind = &mut local_func_kinds[i];
|
||||
match local_func_kind {
|
||||
LocalFunctionKind::Unknown => {
|
||||
*local_func_kind = LocalFunctionKind::Local {
|
||||
symbol_id: file.symbols.len() as u32,
|
||||
};
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: "",
|
||||
address,
|
||||
size,
|
||||
kind: SymbolKind::Text,
|
||||
section: SymbolSection::Section(SectionIndex(
|
||||
SectionId::Code as usize,
|
||||
)),
|
||||
scope: SymbolScope::Compilation,
|
||||
});
|
||||
}
|
||||
LocalFunctionKind::Exported { symbol_ids } => {
|
||||
for symbol_id in core::mem::take(symbol_ids) {
|
||||
let export_symbol = &mut file.symbols[symbol_id as usize];
|
||||
export_symbol.address = address;
|
||||
export_symbol.size = size;
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
wp::Payload::DataSection(section) => {
|
||||
file.add_section(SectionId::Data, section.range(), "");
|
||||
}
|
||||
wp::Payload::DataCountSection { range, .. } => {
|
||||
file.add_section(SectionId::DataCount, range, "");
|
||||
}
|
||||
wp::Payload::CustomSection(section) => {
|
||||
let name = section.name();
|
||||
let size = section.data().len();
|
||||
let mut range = section.range();
|
||||
range.start = range.end - size;
|
||||
file.add_section(SectionId::Custom, range, name);
|
||||
if name == "name" {
|
||||
for name in
|
||||
wp::NameSectionReader::new(section.data(), section.data_offset())
|
||||
{
|
||||
// TODO: Right now, ill-formed name subsections
|
||||
// are silently ignored in order to maintain
|
||||
// compatibility with extended name sections, which
|
||||
// are not yet supported by the version of
|
||||
// `wasmparser` currently used.
|
||||
// A better fix would be to update `wasmparser` to
|
||||
// the newest version, but this requires
|
||||
// a major rewrite of this file.
|
||||
if let Ok(wp::Name::Function(name_map)) = name {
|
||||
for naming in name_map {
|
||||
let naming =
|
||||
naming.read_error("Couldn't read a function name")?;
|
||||
if let Some(local_index) =
|
||||
naming.index.checked_sub(imported_funcs_count)
|
||||
{
|
||||
if let LocalFunctionKind::Local { symbol_id } =
|
||||
local_func_kinds[local_index as usize]
|
||||
{
|
||||
file.symbols[symbol_id as usize].name = naming.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if name.starts_with(".debug_") {
|
||||
file.has_debug_symbols = true;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
fn add_section(&mut self, id: SectionId, range: Range<usize>, name: &'data str) {
|
||||
let section = SectionHeader { id, range, name };
|
||||
self.id_sections[id as usize] = Some(self.sections.len());
|
||||
self.sections.push(section);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R> read::private::Sealed for WasmFile<'data, R> {}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>> Object<'data, 'file> for WasmFile<'data, R>
|
||||
where
|
||||
'data: 'file,
|
||||
R: 'file,
|
||||
{
|
||||
type Segment = WasmSegment<'data, 'file, R>;
|
||||
type SegmentIterator = WasmSegmentIterator<'data, 'file, R>;
|
||||
type Section = WasmSection<'data, 'file, R>;
|
||||
type SectionIterator = WasmSectionIterator<'data, 'file, R>;
|
||||
type Comdat = WasmComdat<'data, 'file, R>;
|
||||
type ComdatIterator = WasmComdatIterator<'data, 'file, R>;
|
||||
type Symbol = WasmSymbol<'data, 'file>;
|
||||
type SymbolIterator = WasmSymbolIterator<'data, 'file>;
|
||||
type SymbolTable = WasmSymbolTable<'data, 'file>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
#[inline]
|
||||
fn architecture(&self) -> Architecture {
|
||||
if self.has_memory64 {
|
||||
Architecture::Wasm64
|
||||
} else {
|
||||
Architecture::Wasm32
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
self.has_memory64
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
// TODO: check for `linking` custom section
|
||||
ObjectKind::Unknown
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> Self::SegmentIterator {
|
||||
WasmSegmentIterator { file: self }
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<WasmSection<'data, 'file, R>> {
|
||||
self.sections()
|
||||
.find(|section| section.name_bytes() == Ok(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(&'file self, index: SectionIndex) -> Result<WasmSection<'data, 'file, R>> {
|
||||
// TODO: Missing sections should return an empty section.
|
||||
let id_section = self
|
||||
.id_sections
|
||||
.get(index.0)
|
||||
.and_then(|x| *x)
|
||||
.read_error("Invalid Wasm section index")?;
|
||||
let section = self.sections.get(id_section).unwrap();
|
||||
Ok(WasmSection {
|
||||
file: self,
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> Self::SectionIterator {
|
||||
WasmSectionIterator {
|
||||
file: self,
|
||||
sections: self.sections.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> Self::ComdatIterator {
|
||||
WasmComdatIterator { file: self }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_by_index(&'file self, index: SymbolIndex) -> Result<WasmSymbol<'data, 'file>> {
|
||||
let symbol = self
|
||||
.symbols
|
||||
.get(index.0)
|
||||
.read_error("Invalid Wasm symbol index")?;
|
||||
Ok(WasmSymbol { index, symbol })
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> Self::SymbolIterator {
|
||||
WasmSymbolIterator {
|
||||
symbols: self.symbols.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_table(&'file self) -> Option<WasmSymbolTable<'data, 'file>> {
|
||||
Some(WasmSymbolTable {
|
||||
symbols: &self.symbols,
|
||||
})
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> Self::SymbolIterator {
|
||||
WasmSymbolIterator {
|
||||
symbols: [].iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_symbol_table(&'file self) -> Option<WasmSymbolTable<'data, 'file>> {
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_relocations(&self) -> Option<NoDynamicRelocationIterator> {
|
||||
None
|
||||
}
|
||||
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>> {
|
||||
// TODO: return entries in the import section
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
// TODO: return entries in the export section
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.has_debug_symbols
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn entry(&'file self) -> u64 {
|
||||
self.entry
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the segments in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSegmentIterator<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmSegmentIterator<'data, 'file, R> {
|
||||
type Item = WasmSegment<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSegment<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> read::private::Sealed for WasmSegment<'data, 'file, R> {}
|
||||
|
||||
impl<'data, 'file, R> ObjectSegment<'data> for WasmSegment<'data, 'file, R> {
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn data_range(&self, _address: u64, _size: u64) -> Result<Option<&'data [u8]>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a [`WasmFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSectionIterator<'data, 'file, R = &'data [u8]> {
|
||||
file: &'file WasmFile<'data, R>,
|
||||
sections: slice::Iter<'file, SectionHeader<'data>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmSectionIterator<'data, 'file, R> {
|
||||
type Item = WasmSection<'data, 'file, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let section = self.sections.next()?;
|
||||
Some(WasmSection {
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`WasmFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSection<'data, 'file, R = &'data [u8]> {
|
||||
file: &'file WasmFile<'data, R>,
|
||||
section: &'file SectionHeader<'data>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> read::private::Sealed for WasmSection<'data, 'file, R> {}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>> ObjectSection<'data> for WasmSection<'data, 'file, R> {
|
||||
type RelocationIterator = WasmRelocationIterator<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
// Note that we treat all custom sections as index 0.
|
||||
// This is ok because they are never looked up by index.
|
||||
SectionIndex(self.section.id as usize)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
let range = &self.section.range;
|
||||
(range.end - range.start) as u64
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
1
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
let range = &self.section.range;
|
||||
Some((range.start as _, range.end as _))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
let range = &self.section.range;
|
||||
self.file
|
||||
.data
|
||||
.read_bytes_at(range.start as u64, range.end as u64 - range.start as u64)
|
||||
.read_error("Invalid Wasm section size or offset")
|
||||
}
|
||||
|
||||
fn data_range(&self, _address: u64, _size: u64) -> Result<Option<&'data [u8]>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
self.name().map(str::as_bytes)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
Ok(match self.section.id {
|
||||
SectionId::Custom => self.section.name,
|
||||
SectionId::Type => "<type>",
|
||||
SectionId::Import => "<import>",
|
||||
SectionId::Function => "<function>",
|
||||
SectionId::Table => "<table>",
|
||||
SectionId::Memory => "<memory>",
|
||||
SectionId::Global => "<global>",
|
||||
SectionId::Export => "<export>",
|
||||
SectionId::Start => "<start>",
|
||||
SectionId::Element => "<element>",
|
||||
SectionId::Code => "<code>",
|
||||
SectionId::Data => "<data>",
|
||||
SectionId::DataCount => "<data_count>",
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SectionKind {
|
||||
match self.section.id {
|
||||
SectionId::Custom => match self.section.name {
|
||||
"reloc." | "linking" => SectionKind::Linker,
|
||||
_ => SectionKind::Other,
|
||||
},
|
||||
SectionId::Type => SectionKind::Metadata,
|
||||
SectionId::Import => SectionKind::Linker,
|
||||
SectionId::Function => SectionKind::Metadata,
|
||||
SectionId::Table => SectionKind::UninitializedData,
|
||||
SectionId::Memory => SectionKind::UninitializedData,
|
||||
SectionId::Global => SectionKind::Data,
|
||||
SectionId::Export => SectionKind::Linker,
|
||||
SectionId::Start => SectionKind::Linker,
|
||||
SectionId::Element => SectionKind::Data,
|
||||
SectionId::Code => SectionKind::Text,
|
||||
SectionId::Data => SectionKind::Data,
|
||||
SectionId::DataCount => SectionKind::UninitializedData,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn relocations(&self) -> WasmRelocationIterator<'data, 'file, R> {
|
||||
WasmRelocationIterator(PhantomData)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmComdatIterator<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmComdatIterator<'data, 'file, R> {
|
||||
type Item = WasmComdat<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmComdat<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> read::private::Sealed for WasmComdat<'data, 'file, R> {}
|
||||
|
||||
impl<'data, 'file, R> ObjectComdat<'data> for WasmComdat<'data, 'file, R> {
|
||||
type SectionIterator = WasmComdatSectionIterator<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmComdatSectionIterator<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmComdatSectionIterator<'data, 'file, R> {
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in a [`WasmFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSymbolTable<'data, 'file> {
|
||||
symbols: &'file [WasmSymbolInternal<'data>],
|
||||
}
|
||||
|
||||
impl<'data, 'file> read::private::Sealed for WasmSymbolTable<'data, 'file> {}
|
||||
|
||||
impl<'data, 'file> ObjectSymbolTable<'data> for WasmSymbolTable<'data, 'file> {
|
||||
type Symbol = WasmSymbol<'data, 'file>;
|
||||
type SymbolIterator = WasmSymbolIterator<'data, 'file>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
WasmSymbolIterator {
|
||||
symbols: self.symbols.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol> {
|
||||
let symbol = self
|
||||
.symbols
|
||||
.get(index.0)
|
||||
.read_error("Invalid Wasm symbol index")?;
|
||||
Ok(WasmSymbol { index, symbol })
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in a [`WasmFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSymbolIterator<'data, 'file> {
|
||||
symbols: core::iter::Enumerate<slice::Iter<'file, WasmSymbolInternal<'data>>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file> Iterator for WasmSymbolIterator<'data, 'file> {
|
||||
type Item = WasmSymbol<'data, 'file>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (index, symbol) = self.symbols.next()?;
|
||||
Some(WasmSymbol {
|
||||
index: SymbolIndex(index),
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in a [`WasmFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct WasmSymbol<'data, 'file> {
|
||||
index: SymbolIndex,
|
||||
symbol: &'file WasmSymbolInternal<'data>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct WasmSymbolInternal<'data> {
|
||||
name: &'data str,
|
||||
address: u64,
|
||||
size: u64,
|
||||
kind: SymbolKind,
|
||||
section: SymbolSection,
|
||||
scope: SymbolScope,
|
||||
}
|
||||
|
||||
impl<'data, 'file> read::private::Sealed for WasmSymbol<'data, 'file> {}
|
||||
|
||||
impl<'data, 'file> ObjectSymbol<'data> for WasmSymbol<'data, 'file> {
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> read::Result<&'data [u8]> {
|
||||
Ok(self.symbol.name.as_bytes())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> read::Result<&'data str> {
|
||||
Ok(self.symbol.name)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.symbol.address
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.symbol.size
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SymbolKind {
|
||||
self.symbol.kind
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn section(&self) -> SymbolSection {
|
||||
self.symbol.section
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.section == SymbolSection::Undefined
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
(self.symbol.kind == SymbolKind::Text || self.symbol.kind == SymbolKind::Data)
|
||||
&& self.symbol.section != SymbolSection::Undefined
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.section == SymbolSection::Common
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn scope(&self) -> SymbolScope {
|
||||
self.symbol.scope
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
self.symbol.scope != SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
self.symbol.scope == SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
SymbolFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the relocations for a [`WasmSection`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmRelocationIterator<'data, 'file, R = &'data [u8]>(
|
||||
PhantomData<(&'data (), &'file (), R)>,
|
||||
);
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmRelocationIterator<'data, 'file, R> {
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
135
vendor/object/src/read/xcoff/comdat.rs
vendored
Normal file
135
vendor/object/src/read/xcoff/comdat.rs
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
//! XCOFF doesn't support the COMDAT section.
|
||||
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::xcoff;
|
||||
|
||||
use crate::read::{self, ComdatKind, ObjectComdat, ReadRef, Result, SectionIndex, SymbolIndex};
|
||||
|
||||
use super::{FileHeader, XcoffFile};
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffComdatIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the COMDAT section groups in a [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffComdatIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffComdatIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(crate) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffComdatIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = XcoffComdat<'data, 'file, Xcoff, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffComdat32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdat<'data, 'file, xcoff::FileHeader32, R>;
|
||||
|
||||
/// A COMDAT section group in a [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffComdat64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdat<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A COMDAT section group in a [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffComdat<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> read::private::Sealed for XcoffComdat<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> ObjectComdat<'data> for XcoffComdat<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type SectionIterator = XcoffComdatSectionIterator<'data, 'file, Xcoff, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffComdatSectionIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatSectionIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the sections in a COMDAT section group in a [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffComdatSectionIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatSectionIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffComdatSectionIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffComdatSectionIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
696
vendor/object/src/read/xcoff/file.rs
vendored
Normal file
696
vendor/object/src/read/xcoff/file.rs
vendored
Normal file
@@ -0,0 +1,696 @@
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::read::{self, Error, NoDynamicRelocationIterator, Object, ReadError, ReadRef, Result};
|
||||
|
||||
use crate::{
|
||||
xcoff, Architecture, BigEndian as BE, FileFlags, ObjectKind, ObjectSection, Pod, SectionIndex,
|
||||
SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{
|
||||
CsectAux, FileAux, SectionHeader, SectionTable, Symbol, SymbolTable, XcoffComdat,
|
||||
XcoffComdatIterator, XcoffSection, XcoffSectionIterator, XcoffSegment, XcoffSegmentIterator,
|
||||
XcoffSymbol, XcoffSymbolIterator, XcoffSymbolTable,
|
||||
};
|
||||
|
||||
/// A 32-bit XCOFF object file.
|
||||
///
|
||||
/// This is a file that starts with [`xcoff::FileHeader32`], and corresponds
|
||||
/// to [`crate::FileKind::Xcoff32`].
|
||||
pub type XcoffFile32<'data, R = &'data [u8]> = XcoffFile<'data, xcoff::FileHeader32, R>;
|
||||
/// A 64-bit XCOFF object file.
|
||||
///
|
||||
/// This is a file that starts with [`xcoff::FileHeader64`], and corresponds
|
||||
/// to [`crate::FileKind::Xcoff64`].
|
||||
pub type XcoffFile64<'data, R = &'data [u8]> = XcoffFile<'data, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A partially parsed XCOFF file.
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffFile<'data, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) data: R,
|
||||
pub(super) header: &'data Xcoff,
|
||||
pub(super) aux_header: Option<&'data Xcoff::AuxHeader>,
|
||||
pub(super) sections: SectionTable<'data, Xcoff>,
|
||||
pub(super) symbols: SymbolTable<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> XcoffFile<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw XCOFF file data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = Xcoff::parse(data, &mut offset)?;
|
||||
let aux_header = header.aux_header(data, &mut offset)?;
|
||||
let sections = header.sections(data, &mut offset)?;
|
||||
let symbols = header.symbols(data)?;
|
||||
|
||||
Ok(XcoffFile {
|
||||
data,
|
||||
header,
|
||||
aux_header,
|
||||
sections,
|
||||
symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the raw data.
|
||||
pub fn data(&self) -> R {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// Returns the raw XCOFF file header.
|
||||
pub fn raw_header(&self) -> &'data Xcoff {
|
||||
self.header
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> read::private::Sealed for XcoffFile<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Object<'data, 'file> for XcoffFile<'data, Xcoff, R>
|
||||
where
|
||||
'data: 'file,
|
||||
Xcoff: FileHeader,
|
||||
R: 'file + ReadRef<'data>,
|
||||
{
|
||||
type Segment = XcoffSegment<'data, 'file, Xcoff, R>;
|
||||
type SegmentIterator = XcoffSegmentIterator<'data, 'file, Xcoff, R>;
|
||||
type Section = XcoffSection<'data, 'file, Xcoff, R>;
|
||||
type SectionIterator = XcoffSectionIterator<'data, 'file, Xcoff, R>;
|
||||
type Comdat = XcoffComdat<'data, 'file, Xcoff, R>;
|
||||
type ComdatIterator = XcoffComdatIterator<'data, 'file, Xcoff, R>;
|
||||
type Symbol = XcoffSymbol<'data, 'file, Xcoff, R>;
|
||||
type SymbolIterator = XcoffSymbolIterator<'data, 'file, Xcoff, R>;
|
||||
type SymbolTable = XcoffSymbolTable<'data, 'file, Xcoff, R>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
fn architecture(&self) -> crate::Architecture {
|
||||
if self.is_64() {
|
||||
Architecture::PowerPc64
|
||||
} else {
|
||||
Architecture::PowerPc
|
||||
}
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_64(&self) -> bool {
|
||||
self.header.is_type_64()
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
let flags = self.header.f_flags();
|
||||
if flags & xcoff::F_EXEC != 0 {
|
||||
ObjectKind::Executable
|
||||
} else if flags & xcoff::F_SHROBJ != 0 {
|
||||
ObjectKind::Dynamic
|
||||
} else if flags & xcoff::F_RELFLG == 0 {
|
||||
ObjectKind::Relocatable
|
||||
} else {
|
||||
ObjectKind::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> XcoffSegmentIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffSegmentIterator { file: self }
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<XcoffSection<'data, 'file, Xcoff, R>> {
|
||||
self.sections()
|
||||
.find(|section| section.name_bytes() == Ok(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> Result<XcoffSection<'data, 'file, Xcoff, R>> {
|
||||
let section = self.sections.section(index)?;
|
||||
Ok(XcoffSection {
|
||||
file: self,
|
||||
section,
|
||||
index,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> XcoffSectionIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffSectionIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> XcoffComdatIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffComdatIterator { file: self }
|
||||
}
|
||||
|
||||
fn symbol_table(&'file self) -> Option<XcoffSymbolTable<'data, 'file, Xcoff, R>> {
|
||||
if self.symbols.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(XcoffSymbolTable {
|
||||
symbols: &self.symbols,
|
||||
file: self,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> Result<XcoffSymbol<'data, 'file, Xcoff, R>> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(XcoffSymbol {
|
||||
symbols: &self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
file: self,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> XcoffSymbolIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffSymbolIterator {
|
||||
file: self,
|
||||
symbols: self.symbols.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn dynamic_symbol_table(&'file self) -> Option<XcoffSymbolTable<'data, 'file, Xcoff, R>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> XcoffSymbolIterator<'data, 'file, Xcoff, R> {
|
||||
// TODO: return the symbols in the STYP_LOADER section.
|
||||
XcoffSymbolIterator {
|
||||
file: self,
|
||||
symbols: self.symbols.iter_none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn dynamic_relocations(&'file self) -> Option<Self::DynamicRelocationIterator> {
|
||||
// TODO: return the relocations in the STYP_LOADER section.
|
||||
None
|
||||
}
|
||||
|
||||
fn imports(&self) -> Result<alloc::vec::Vec<crate::Import<'data>>> {
|
||||
// TODO: return the imports in the STYP_LOADER section.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn exports(&self) -> Result<alloc::vec::Vec<crate::Export<'data>>> {
|
||||
// TODO: return the exports in the STYP_LOADER section.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.section_by_name(".debug").is_some() || self.section_by_name(".dwinfo").is_some()
|
||||
}
|
||||
|
||||
fn relative_address_base(&'file self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn entry(&'file self) -> u64 {
|
||||
if let Some(aux_header) = self.aux_header {
|
||||
aux_header.o_entry().into()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::Xcoff {
|
||||
f_flags: self.header.f_flags(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::FileHeader32`] and [`xcoff::FileHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FileHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type AuxHeader: AuxHeader<Word = Self::Word>;
|
||||
type SectionHeader: SectionHeader<Word = Self::Word>;
|
||||
type Symbol: Symbol<Word = Self::Word>;
|
||||
type FileAux: FileAux;
|
||||
type CsectAux: CsectAux;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
fn is_type_64(&self) -> bool;
|
||||
|
||||
fn f_magic(&self) -> u16;
|
||||
fn f_nscns(&self) -> u16;
|
||||
fn f_timdat(&self) -> u32;
|
||||
fn f_symptr(&self) -> Self::Word;
|
||||
fn f_nsyms(&self) -> u32;
|
||||
fn f_opthdr(&self) -> u16;
|
||||
fn f_flags(&self) -> u16;
|
||||
|
||||
// Provided methods.
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// Also checks that the magic field in the file header is a supported format.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<Self>(offset)
|
||||
.read_error("Invalid XCOFF header size or alignment")?;
|
||||
if !header.is_supported() {
|
||||
return Err(Error("Unsupported XCOFF header"));
|
||||
}
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn is_supported(&self) -> bool {
|
||||
(self.is_type_64() && self.f_magic() == xcoff::MAGIC_64)
|
||||
|| (!self.is_type_64() && self.f_magic() == xcoff::MAGIC_32)
|
||||
}
|
||||
|
||||
/// Read the auxiliary file header.
|
||||
fn aux_header<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
) -> Result<Option<&'data Self::AuxHeader>> {
|
||||
let aux_header_size = self.f_opthdr();
|
||||
if self.f_flags() & xcoff::F_EXEC == 0 {
|
||||
// No auxiliary header is required for an object file that is not an executable.
|
||||
// TODO: Some AIX programs generate auxiliary headers for 32-bit object files
|
||||
// that end after the data_start field.
|
||||
*offset += u64::from(aux_header_size);
|
||||
return Ok(None);
|
||||
}
|
||||
// Executables, however, must have auxiliary headers that include the
|
||||
// full structure definitions.
|
||||
if aux_header_size != mem::size_of::<Self::AuxHeader>() as u16 {
|
||||
*offset += u64::from(aux_header_size);
|
||||
return Ok(None);
|
||||
}
|
||||
let aux_header = data
|
||||
.read::<Self::AuxHeader>(offset)
|
||||
.read_error("Invalid XCOFF auxiliary header size")?;
|
||||
Ok(Some(aux_header))
|
||||
}
|
||||
|
||||
/// Read the section table.
|
||||
#[inline]
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
) -> Result<SectionTable<'data, Self>> {
|
||||
SectionTable::parse(self, data, offset)
|
||||
}
|
||||
|
||||
/// Return the symbol table.
|
||||
#[inline]
|
||||
fn symbols<'data, R: ReadRef<'data>>(&self, data: R) -> Result<SymbolTable<'data, Self, R>> {
|
||||
SymbolTable::parse(*self, data)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileHeader for xcoff::FileHeader32 {
|
||||
type Word = u32;
|
||||
type AuxHeader = xcoff::AuxHeader32;
|
||||
type SectionHeader = xcoff::SectionHeader32;
|
||||
type Symbol = xcoff::Symbol32;
|
||||
type FileAux = xcoff::FileAux32;
|
||||
type CsectAux = xcoff::CsectAux32;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn f_magic(&self) -> u16 {
|
||||
self.f_magic.get(BE)
|
||||
}
|
||||
|
||||
fn f_nscns(&self) -> u16 {
|
||||
self.f_nscns.get(BE)
|
||||
}
|
||||
|
||||
fn f_timdat(&self) -> u32 {
|
||||
self.f_timdat.get(BE)
|
||||
}
|
||||
|
||||
fn f_symptr(&self) -> Self::Word {
|
||||
self.f_symptr.get(BE)
|
||||
}
|
||||
|
||||
fn f_nsyms(&self) -> u32 {
|
||||
self.f_nsyms.get(BE)
|
||||
}
|
||||
|
||||
fn f_opthdr(&self) -> u16 {
|
||||
self.f_opthdr.get(BE)
|
||||
}
|
||||
|
||||
fn f_flags(&self) -> u16 {
|
||||
self.f_flags.get(BE)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileHeader for xcoff::FileHeader64 {
|
||||
type Word = u64;
|
||||
type AuxHeader = xcoff::AuxHeader64;
|
||||
type SectionHeader = xcoff::SectionHeader64;
|
||||
type Symbol = xcoff::Symbol64;
|
||||
type FileAux = xcoff::FileAux64;
|
||||
type CsectAux = xcoff::CsectAux64;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn f_magic(&self) -> u16 {
|
||||
self.f_magic.get(BE)
|
||||
}
|
||||
|
||||
fn f_nscns(&self) -> u16 {
|
||||
self.f_nscns.get(BE)
|
||||
}
|
||||
|
||||
fn f_timdat(&self) -> u32 {
|
||||
self.f_timdat.get(BE)
|
||||
}
|
||||
|
||||
fn f_symptr(&self) -> Self::Word {
|
||||
self.f_symptr.get(BE)
|
||||
}
|
||||
|
||||
fn f_nsyms(&self) -> u32 {
|
||||
self.f_nsyms.get(BE)
|
||||
}
|
||||
|
||||
fn f_opthdr(&self) -> u16 {
|
||||
self.f_opthdr.get(BE)
|
||||
}
|
||||
|
||||
fn f_flags(&self) -> u16 {
|
||||
self.f_flags.get(BE)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::AuxHeader32`] and [`xcoff::AuxHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait AuxHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
|
||||
fn o_mflag(&self) -> u16;
|
||||
fn o_vstamp(&self) -> u16;
|
||||
fn o_tsize(&self) -> Self::Word;
|
||||
fn o_dsize(&self) -> Self::Word;
|
||||
fn o_bsize(&self) -> Self::Word;
|
||||
fn o_entry(&self) -> Self::Word;
|
||||
fn o_text_start(&self) -> Self::Word;
|
||||
fn o_data_start(&self) -> Self::Word;
|
||||
fn o_toc(&self) -> Self::Word;
|
||||
fn o_snentry(&self) -> u16;
|
||||
fn o_sntext(&self) -> u16;
|
||||
fn o_sndata(&self) -> u16;
|
||||
fn o_sntoc(&self) -> u16;
|
||||
fn o_snloader(&self) -> u16;
|
||||
fn o_snbss(&self) -> u16;
|
||||
fn o_algntext(&self) -> u16;
|
||||
fn o_algndata(&self) -> u16;
|
||||
fn o_modtype(&self) -> u16;
|
||||
fn o_cpuflag(&self) -> u8;
|
||||
fn o_cputype(&self) -> u8;
|
||||
fn o_maxstack(&self) -> Self::Word;
|
||||
fn o_maxdata(&self) -> Self::Word;
|
||||
fn o_debugger(&self) -> u32;
|
||||
fn o_textpsize(&self) -> u8;
|
||||
fn o_datapsize(&self) -> u8;
|
||||
fn o_stackpsize(&self) -> u8;
|
||||
fn o_flags(&self) -> u8;
|
||||
fn o_sntdata(&self) -> u16;
|
||||
fn o_sntbss(&self) -> u16;
|
||||
fn o_x64flags(&self) -> Option<u16>;
|
||||
}
|
||||
|
||||
impl AuxHeader for xcoff::AuxHeader32 {
|
||||
type Word = u32;
|
||||
|
||||
fn o_mflag(&self) -> u16 {
|
||||
self.o_mflag.get(BE)
|
||||
}
|
||||
|
||||
fn o_vstamp(&self) -> u16 {
|
||||
self.o_vstamp.get(BE)
|
||||
}
|
||||
|
||||
fn o_tsize(&self) -> Self::Word {
|
||||
self.o_tsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_dsize(&self) -> Self::Word {
|
||||
self.o_dsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_bsize(&self) -> Self::Word {
|
||||
self.o_bsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_entry(&self) -> Self::Word {
|
||||
self.o_entry.get(BE)
|
||||
}
|
||||
|
||||
fn o_text_start(&self) -> Self::Word {
|
||||
self.o_text_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_data_start(&self) -> Self::Word {
|
||||
self.o_data_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_toc(&self) -> Self::Word {
|
||||
self.o_toc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snentry(&self) -> u16 {
|
||||
self.o_snentry.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntext(&self) -> u16 {
|
||||
self.o_sntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_sndata(&self) -> u16 {
|
||||
self.o_sndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntoc(&self) -> u16 {
|
||||
self.o_sntoc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snloader(&self) -> u16 {
|
||||
self.o_snloader.get(BE)
|
||||
}
|
||||
|
||||
fn o_snbss(&self) -> u16 {
|
||||
self.o_snbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_algntext(&self) -> u16 {
|
||||
self.o_algntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_algndata(&self) -> u16 {
|
||||
self.o_algndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_modtype(&self) -> u16 {
|
||||
self.o_modtype.get(BE)
|
||||
}
|
||||
|
||||
fn o_cpuflag(&self) -> u8 {
|
||||
self.o_cpuflag
|
||||
}
|
||||
|
||||
fn o_cputype(&self) -> u8 {
|
||||
self.o_cputype
|
||||
}
|
||||
|
||||
fn o_maxstack(&self) -> Self::Word {
|
||||
self.o_maxstack.get(BE)
|
||||
}
|
||||
|
||||
fn o_maxdata(&self) -> Self::Word {
|
||||
self.o_maxdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_debugger(&self) -> u32 {
|
||||
self.o_debugger.get(BE)
|
||||
}
|
||||
|
||||
fn o_textpsize(&self) -> u8 {
|
||||
self.o_textpsize
|
||||
}
|
||||
|
||||
fn o_datapsize(&self) -> u8 {
|
||||
self.o_datapsize
|
||||
}
|
||||
|
||||
fn o_stackpsize(&self) -> u8 {
|
||||
self.o_stackpsize
|
||||
}
|
||||
|
||||
fn o_flags(&self) -> u8 {
|
||||
self.o_flags
|
||||
}
|
||||
|
||||
fn o_sntdata(&self) -> u16 {
|
||||
self.o_sntdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntbss(&self) -> u16 {
|
||||
self.o_sntbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_x64flags(&self) -> Option<u16> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl AuxHeader for xcoff::AuxHeader64 {
|
||||
type Word = u64;
|
||||
|
||||
fn o_mflag(&self) -> u16 {
|
||||
self.o_mflag.get(BE)
|
||||
}
|
||||
|
||||
fn o_vstamp(&self) -> u16 {
|
||||
self.o_vstamp.get(BE)
|
||||
}
|
||||
|
||||
fn o_tsize(&self) -> Self::Word {
|
||||
self.o_tsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_dsize(&self) -> Self::Word {
|
||||
self.o_dsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_bsize(&self) -> Self::Word {
|
||||
self.o_bsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_entry(&self) -> Self::Word {
|
||||
self.o_entry.get(BE)
|
||||
}
|
||||
|
||||
fn o_text_start(&self) -> Self::Word {
|
||||
self.o_text_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_data_start(&self) -> Self::Word {
|
||||
self.o_data_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_toc(&self) -> Self::Word {
|
||||
self.o_toc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snentry(&self) -> u16 {
|
||||
self.o_snentry.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntext(&self) -> u16 {
|
||||
self.o_sntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_sndata(&self) -> u16 {
|
||||
self.o_sndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntoc(&self) -> u16 {
|
||||
self.o_sntoc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snloader(&self) -> u16 {
|
||||
self.o_snloader.get(BE)
|
||||
}
|
||||
|
||||
fn o_snbss(&self) -> u16 {
|
||||
self.o_snbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_algntext(&self) -> u16 {
|
||||
self.o_algntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_algndata(&self) -> u16 {
|
||||
self.o_algndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_modtype(&self) -> u16 {
|
||||
self.o_modtype.get(BE)
|
||||
}
|
||||
|
||||
fn o_cpuflag(&self) -> u8 {
|
||||
self.o_cpuflag
|
||||
}
|
||||
|
||||
fn o_cputype(&self) -> u8 {
|
||||
self.o_cputype
|
||||
}
|
||||
|
||||
fn o_maxstack(&self) -> Self::Word {
|
||||
self.o_maxstack.get(BE)
|
||||
}
|
||||
|
||||
fn o_maxdata(&self) -> Self::Word {
|
||||
self.o_maxdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_debugger(&self) -> u32 {
|
||||
self.o_debugger.get(BE)
|
||||
}
|
||||
|
||||
fn o_textpsize(&self) -> u8 {
|
||||
self.o_textpsize
|
||||
}
|
||||
|
||||
fn o_datapsize(&self) -> u8 {
|
||||
self.o_datapsize
|
||||
}
|
||||
|
||||
fn o_stackpsize(&self) -> u8 {
|
||||
self.o_stackpsize
|
||||
}
|
||||
|
||||
fn o_flags(&self) -> u8 {
|
||||
self.o_flags
|
||||
}
|
||||
|
||||
fn o_sntdata(&self) -> u16 {
|
||||
self.o_sntdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntbss(&self) -> u16 {
|
||||
self.o_sntbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_x64flags(&self) -> Option<u16> {
|
||||
Some(self.o_x64flags.get(BE))
|
||||
}
|
||||
}
|
||||
63
vendor/object/src/read/xcoff/mod.rs
vendored
Normal file
63
vendor/object/src/read/xcoff/mod.rs
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
//! Support for reading AIX XCOFF files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between 32-bit and 64-bit XCOFF.
|
||||
//! The primary trait for this is [`FileHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`XcoffFile`] implements the [`Object`](crate::read::Object) trait for XCOFF files.
|
||||
//! [`XcoffFile`] is parameterised by [`FileHeader`] to allow reading both 32-bit and
|
||||
//! 64-bit XCOFF. There are type aliases for these parameters ([`XcoffFile32`] and
|
||||
//! [`XcoffFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`FileHeader`] trait can be directly used to parse both [`xcoff::FileHeader32`]
|
||||
//! and [`xcoff::FileHeader64`].
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::xcoff;
|
||||
//! use object::read::xcoff::{FileHeader, SectionHeader, Symbol};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section and symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let mut offset = 0;
|
||||
//! let header = xcoff::FileHeader64::parse(&*data, &mut offset)?;
|
||||
//! let aux_header = header.aux_header(&*data, &mut offset)?;
|
||||
//! let sections = header.sections(&*data, &mut offset)?;
|
||||
//! let symbols = header.symbols(&*data)?;
|
||||
//! for section in sections.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(section.name()));
|
||||
//! }
|
||||
//! for (_index, symbol) in symbols.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(symbol.name(symbols.strings())?));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::xcoff;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod comdat;
|
||||
pub use comdat::*;
|
||||
|
||||
mod segment;
|
||||
pub use segment::*;
|
||||
127
vendor/object/src/read/xcoff/relocation.rs
vendored
Normal file
127
vendor/object/src/read/xcoff/relocation.rs
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
use alloc::fmt;
|
||||
use core::fmt::Debug;
|
||||
use core::slice;
|
||||
|
||||
use crate::pod::Pod;
|
||||
use crate::{xcoff, BigEndian as BE, Relocation};
|
||||
|
||||
use crate::read::{ReadRef, RelocationEncoding, RelocationKind, RelocationTarget, SymbolIndex};
|
||||
|
||||
use super::{FileHeader, SectionHeader, XcoffFile};
|
||||
|
||||
/// An iterator for the relocations in an [`XcoffSection32`](super::XcoffSection32).
|
||||
pub type XcoffRelocationIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffRelocationIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the relocations in an [`XcoffSection64`](super::XcoffSection64).
|
||||
pub type XcoffRelocationIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffRelocationIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the relocations in an [`XcoffSection`](super::XcoffSection).
|
||||
pub struct XcoffRelocationIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) relocations:
|
||||
slice::Iter<'data, <<Xcoff as FileHeader>::SectionHeader as SectionHeader>::Rel>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffRelocationIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.relocations.next().map(|relocation| {
|
||||
let encoding = RelocationEncoding::Generic;
|
||||
let (kind, addend) = match relocation.r_rtype() {
|
||||
xcoff::R_POS
|
||||
| xcoff::R_RL
|
||||
| xcoff::R_RLA
|
||||
| xcoff::R_BA
|
||||
| xcoff::R_RBA
|
||||
| xcoff::R_TLS => (RelocationKind::Absolute, 0),
|
||||
xcoff::R_REL | xcoff::R_BR | xcoff::R_RBR => (RelocationKind::Relative, -4),
|
||||
xcoff::R_TOC | xcoff::R_TOCL | xcoff::R_TOCU => (RelocationKind::Got, 0),
|
||||
r_type => (RelocationKind::Xcoff(r_type), 0),
|
||||
};
|
||||
let size = (relocation.r_rsize() & 0x3F) + 1;
|
||||
let target = RelocationTarget::Symbol(SymbolIndex(relocation.r_symndx() as usize));
|
||||
(
|
||||
relocation.r_vaddr().into(),
|
||||
Relocation {
|
||||
kind,
|
||||
encoding,
|
||||
size,
|
||||
target,
|
||||
addend,
|
||||
implicit_addend: true,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> fmt::Debug for XcoffRelocationIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("XcoffRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::Rel32`] and [`xcoff::Rel64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Rel: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
fn r_vaddr(&self) -> Self::Word;
|
||||
fn r_symndx(&self) -> u32;
|
||||
fn r_rsize(&self) -> u8;
|
||||
fn r_rtype(&self) -> u8;
|
||||
}
|
||||
|
||||
impl Rel for xcoff::Rel32 {
|
||||
type Word = u32;
|
||||
|
||||
fn r_vaddr(&self) -> Self::Word {
|
||||
self.r_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn r_symndx(&self) -> u32 {
|
||||
self.r_symndx.get(BE)
|
||||
}
|
||||
|
||||
fn r_rsize(&self) -> u8 {
|
||||
self.r_rsize
|
||||
}
|
||||
|
||||
fn r_rtype(&self) -> u8 {
|
||||
self.r_rtype
|
||||
}
|
||||
}
|
||||
|
||||
impl Rel for xcoff::Rel64 {
|
||||
type Word = u64;
|
||||
|
||||
fn r_vaddr(&self) -> Self::Word {
|
||||
self.r_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn r_symndx(&self) -> u32 {
|
||||
self.r_symndx.get(BE)
|
||||
}
|
||||
|
||||
fn r_rsize(&self) -> u8 {
|
||||
self.r_rsize
|
||||
}
|
||||
|
||||
fn r_rtype(&self) -> u8 {
|
||||
self.r_rtype
|
||||
}
|
||||
}
|
||||
431
vendor/object/src/read/xcoff/section.rs
vendored
Normal file
431
vendor/object/src/read/xcoff/section.rs
vendored
Normal file
@@ -0,0 +1,431 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{iter, result, slice, str};
|
||||
|
||||
use crate::{
|
||||
xcoff, BigEndian as BE, CompressedData, CompressedFileRange, Pod, SectionFlags, SectionKind,
|
||||
};
|
||||
|
||||
use crate::read::{self, Error, ObjectSection, ReadError, ReadRef, Result, SectionIndex};
|
||||
|
||||
use super::{AuxHeader, FileHeader, Rel, XcoffFile, XcoffRelocationIterator};
|
||||
|
||||
/// An iterator for the sections in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSectionIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSectionIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the sections in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSectionIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSectionIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the sections in an [`XcoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSectionIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, Xcoff::SectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffSectionIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = XcoffSection<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|(index, section)| XcoffSection {
|
||||
index: SectionIndex(index + 1),
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSection32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSection<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A section in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSection64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSection<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A section in an [`XcoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSection<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) section: &'data Xcoff::SectionHeader,
|
||||
pub(super) index: SectionIndex,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> XcoffSection<'data, 'file, Xcoff, R> {
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.section
|
||||
.data(self.file.data)
|
||||
.read_error("Invalid XCOFF section offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> read::private::Sealed for XcoffSection<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> ObjectSection<'data> for XcoffSection<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type RelocationIterator = XcoffRelocationIterator<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn address(&self) -> u64 {
|
||||
self.section.s_paddr().into()
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
self.section.s_size().into()
|
||||
}
|
||||
|
||||
fn align(&self) -> u64 {
|
||||
// The default section alignment is 4.
|
||||
if let Some(aux_header) = self.file.aux_header {
|
||||
match self.kind() {
|
||||
SectionKind::Text => aux_header.o_algntext().into(),
|
||||
SectionKind::Data => aux_header.o_algndata().into(),
|
||||
_ => 4,
|
||||
}
|
||||
} else {
|
||||
4
|
||||
}
|
||||
}
|
||||
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
self.section.file_range()
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&[u8]> {
|
||||
Ok(self.section.name())
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 XCOFF section name")
|
||||
}
|
||||
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn kind(&self) -> SectionKind {
|
||||
let section_type = self.section.s_flags() as u16;
|
||||
if section_type & xcoff::STYP_TEXT != 0 {
|
||||
SectionKind::Text
|
||||
} else if section_type & xcoff::STYP_DATA != 0 {
|
||||
SectionKind::Data
|
||||
} else if section_type & xcoff::STYP_TDATA != 0 {
|
||||
SectionKind::Tls
|
||||
} else if section_type & xcoff::STYP_BSS != 0 {
|
||||
SectionKind::UninitializedData
|
||||
} else if section_type & xcoff::STYP_TBSS != 0 {
|
||||
SectionKind::UninitializedTls
|
||||
} else if section_type & (xcoff::STYP_DEBUG | xcoff::STYP_DWARF) != 0 {
|
||||
SectionKind::Debug
|
||||
} else if section_type & (xcoff::STYP_LOADER | xcoff::STYP_OVRFLO) != 0 {
|
||||
SectionKind::Metadata
|
||||
} else if section_type
|
||||
& (xcoff::STYP_INFO | xcoff::STYP_EXCEPT | xcoff::STYP_PAD | xcoff::STYP_TYPCHK)
|
||||
!= 0
|
||||
{
|
||||
SectionKind::Other
|
||||
} else {
|
||||
SectionKind::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn relocations(&self) -> Self::RelocationIterator {
|
||||
let rel = self.section.relocations(self.file.data).unwrap_or(&[]);
|
||||
XcoffRelocationIterator {
|
||||
file: self.file,
|
||||
relocations: rel.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::Xcoff {
|
||||
s_flags: self.section.s_flags(),
|
||||
}
|
||||
}
|
||||
|
||||
fn uncompressed_data(&self) -> Result<alloc::borrow::Cow<'data, [u8]>> {
|
||||
self.compressed_data()?.decompress()
|
||||
}
|
||||
}
|
||||
|
||||
/// The table of section headers in an XCOFF file.
|
||||
///
|
||||
/// Returned by [`FileHeader::sections`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SectionTable<'data, Xcoff: FileHeader> {
|
||||
sections: &'data [Xcoff::SectionHeader],
|
||||
}
|
||||
|
||||
impl<'data, Xcoff> Default for SectionTable<'data, Xcoff>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self { sections: &[] }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Xcoff> SectionTable<'data, Xcoff>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
{
|
||||
/// Parse the section table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be after the optional file header.
|
||||
pub fn parse<R: ReadRef<'data>>(header: &Xcoff, data: R, offset: &mut u64) -> Result<Self> {
|
||||
let section_num = header.f_nscns();
|
||||
if section_num == 0 {
|
||||
return Ok(SectionTable::default());
|
||||
}
|
||||
let sections = data
|
||||
.read_slice(offset, section_num as usize)
|
||||
.read_error("Invalid XCOFF section headers")?;
|
||||
Ok(SectionTable { sections })
|
||||
}
|
||||
|
||||
/// Iterate over the section headers.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, Xcoff::SectionHeader> {
|
||||
self.sections.iter()
|
||||
}
|
||||
|
||||
/// Return true if the section table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.sections.is_empty()
|
||||
}
|
||||
|
||||
/// The number of section headers.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.sections.len()
|
||||
}
|
||||
|
||||
/// Return the section header at the given index.
|
||||
///
|
||||
/// The index is 1-based.
|
||||
pub fn section(&self, index: SectionIndex) -> read::Result<&'data Xcoff::SectionHeader> {
|
||||
self.sections
|
||||
.get(index.0.wrapping_sub(1))
|
||||
.read_error("Invalid XCOFF section index")
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::SectionHeader32`] and [`xcoff::SectionHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait SectionHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type HalfWord: Into<u32>;
|
||||
type Xcoff: FileHeader<SectionHeader = Self, Word = Self::Word>;
|
||||
type Rel: Rel<Word = Self::Word>;
|
||||
|
||||
fn s_name(&self) -> &[u8; 8];
|
||||
fn s_paddr(&self) -> Self::Word;
|
||||
fn s_vaddr(&self) -> Self::Word;
|
||||
fn s_size(&self) -> Self::Word;
|
||||
fn s_scnptr(&self) -> Self::Word;
|
||||
fn s_relptr(&self) -> Self::Word;
|
||||
fn s_lnnoptr(&self) -> Self::Word;
|
||||
fn s_nreloc(&self) -> Self::HalfWord;
|
||||
fn s_nlnno(&self) -> Self::HalfWord;
|
||||
fn s_flags(&self) -> u32;
|
||||
|
||||
/// Return the section name.
|
||||
fn name(&self) -> &[u8] {
|
||||
let sectname = &self.s_name()[..];
|
||||
match memchr::memchr(b'\0', sectname) {
|
||||
Some(end) => §name[..end],
|
||||
None => sectname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the section in the file.
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
Some((self.s_scnptr().into(), self.s_size().into()))
|
||||
}
|
||||
|
||||
/// Return the section data.
|
||||
///
|
||||
/// Returns `Ok(&[])` if the section has no data.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(&self, data: R) -> result::Result<&'data [u8], ()> {
|
||||
if let Some((offset, size)) = self.file_range() {
|
||||
data.read_bytes_at(offset, size)
|
||||
} else {
|
||||
Ok(&[])
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the relocations.
|
||||
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]>;
|
||||
}
|
||||
|
||||
impl SectionHeader for xcoff::SectionHeader32 {
|
||||
type Word = u32;
|
||||
type HalfWord = u16;
|
||||
type Xcoff = xcoff::FileHeader32;
|
||||
type Rel = xcoff::Rel32;
|
||||
|
||||
fn s_name(&self) -> &[u8; 8] {
|
||||
&self.s_name
|
||||
}
|
||||
|
||||
fn s_paddr(&self) -> Self::Word {
|
||||
self.s_paddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_vaddr(&self) -> Self::Word {
|
||||
self.s_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_size(&self) -> Self::Word {
|
||||
self.s_size.get(BE)
|
||||
}
|
||||
|
||||
fn s_scnptr(&self) -> Self::Word {
|
||||
self.s_scnptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_relptr(&self) -> Self::Word {
|
||||
self.s_relptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_lnnoptr(&self) -> Self::Word {
|
||||
self.s_lnnoptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_nreloc(&self) -> Self::HalfWord {
|
||||
self.s_nreloc.get(BE)
|
||||
}
|
||||
|
||||
fn s_nlnno(&self) -> Self::HalfWord {
|
||||
self.s_nlnno.get(BE)
|
||||
}
|
||||
|
||||
fn s_flags(&self) -> u32 {
|
||||
self.s_flags.get(BE)
|
||||
}
|
||||
|
||||
/// Read the relocations in a XCOFF32 file.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]> {
|
||||
let reloc_num = self.s_nreloc() as usize;
|
||||
// TODO: If more than 65,534 relocation entries are required, the field value will be 65535,
|
||||
// and an STYP_OVRFLO section header will contain the actual count of relocation entries in
|
||||
// the s_paddr field.
|
||||
if reloc_num == 65535 {
|
||||
return Err(Error("Overflow section is not supported yet."));
|
||||
}
|
||||
data.read_slice_at(self.s_relptr().into(), reloc_num)
|
||||
.read_error("Invalid XCOFF relocation offset or number")
|
||||
}
|
||||
}
|
||||
|
||||
impl SectionHeader for xcoff::SectionHeader64 {
|
||||
type Word = u64;
|
||||
type HalfWord = u32;
|
||||
type Xcoff = xcoff::FileHeader64;
|
||||
type Rel = xcoff::Rel64;
|
||||
|
||||
fn s_name(&self) -> &[u8; 8] {
|
||||
&self.s_name
|
||||
}
|
||||
|
||||
fn s_paddr(&self) -> Self::Word {
|
||||
self.s_paddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_vaddr(&self) -> Self::Word {
|
||||
self.s_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_size(&self) -> Self::Word {
|
||||
self.s_size.get(BE)
|
||||
}
|
||||
|
||||
fn s_scnptr(&self) -> Self::Word {
|
||||
self.s_scnptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_relptr(&self) -> Self::Word {
|
||||
self.s_relptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_lnnoptr(&self) -> Self::Word {
|
||||
self.s_lnnoptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_nreloc(&self) -> Self::HalfWord {
|
||||
self.s_nreloc.get(BE)
|
||||
}
|
||||
|
||||
fn s_nlnno(&self) -> Self::HalfWord {
|
||||
self.s_nlnno.get(BE)
|
||||
}
|
||||
|
||||
fn s_flags(&self) -> u32 {
|
||||
self.s_flags.get(BE)
|
||||
}
|
||||
|
||||
/// Read the relocations in a XCOFF64 file.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]> {
|
||||
data.read_slice_at(self.s_relptr(), self.s_nreloc() as usize)
|
||||
.read_error("Invalid XCOFF relocation offset or number")
|
||||
}
|
||||
}
|
||||
117
vendor/object/src/read/xcoff/segment.rs
vendored
Normal file
117
vendor/object/src/read/xcoff/segment.rs
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
//! TODO: Support the segment for XCOFF when auxiliary file header and loader section is ready.
|
||||
|
||||
use core::fmt::Debug;
|
||||
use core::str;
|
||||
|
||||
use crate::read::{self, ObjectSegment, ReadRef, Result};
|
||||
use crate::xcoff;
|
||||
|
||||
use super::{FileHeader, XcoffFile};
|
||||
|
||||
/// An iterator for the segments in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSegmentIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegmentIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the segments in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSegmentIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegmentIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the segments in an [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSegmentIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffSegmentIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = XcoffSegment<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSegment32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegment<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A segment in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSegment64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegment<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A loadable section in an [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSegment<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> XcoffSegment<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> read::private::Sealed for XcoffSegment<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> ObjectSegment<'data> for XcoffSegment<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn address(&self) -> u64 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn align(&self) -> u64 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn data_range(&self, _address: u64, _size: u64) -> Result<Option<&'data [u8]>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn flags(&self) -> crate::SegmentFlags {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
786
vendor/object/src/read/xcoff/symbol.rs
vendored
Normal file
786
vendor/object/src/read/xcoff/symbol.rs
vendored
Normal file
@@ -0,0 +1,786 @@
|
||||
use alloc::fmt;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
use core::marker::PhantomData;
|
||||
use core::str;
|
||||
|
||||
use crate::endian::{BigEndian as BE, U32Bytes};
|
||||
use crate::pod::{bytes_of, Pod};
|
||||
use crate::read::util::StringTable;
|
||||
use crate::xcoff;
|
||||
|
||||
use crate::read::{
|
||||
self, Bytes, Error, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Result, SectionIndex,
|
||||
SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
use super::{FileHeader, XcoffFile};
|
||||
|
||||
/// A table of symbol entries in an XCOFF file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`FileHeader::symbols`].
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable<'data, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
symbols: &'data [xcoff::SymbolBytes],
|
||||
strings: StringTable<'data, R>,
|
||||
header: PhantomData<Xcoff>,
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> Default for SymbolTable<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
symbols: &[],
|
||||
strings: StringTable::default(),
|
||||
header: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> SymbolTable<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the symbol table.
|
||||
pub fn parse(header: Xcoff, data: R) -> Result<Self> {
|
||||
let mut offset = header.f_symptr().into();
|
||||
let (symbols, strings) = if offset != 0 {
|
||||
let symbols = data
|
||||
.read_slice(&mut offset, header.f_nsyms() as usize)
|
||||
.read_error("Invalid XCOFF symbol table offset or size")?;
|
||||
|
||||
// Parse the string table.
|
||||
// Note: don't update data when reading length; the length includes itself.
|
||||
let length = data
|
||||
.read_at::<U32Bytes<_>>(offset)
|
||||
.read_error("Missing XCOFF string table")?
|
||||
.get(BE);
|
||||
let str_end = offset
|
||||
.checked_add(length as u64)
|
||||
.read_error("Invalid XCOFF string table length")?;
|
||||
let strings = StringTable::new(data, offset, str_end);
|
||||
|
||||
(symbols, strings)
|
||||
} else {
|
||||
(&[][..], StringTable::default())
|
||||
};
|
||||
|
||||
Ok(SymbolTable {
|
||||
symbols,
|
||||
strings,
|
||||
header: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter<'table>(&'table self) -> SymbolIterator<'data, 'table, Xcoff, R> {
|
||||
SymbolIterator {
|
||||
symbols: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Empty symbol iterator.
|
||||
#[inline]
|
||||
pub(super) fn iter_none<'table>(&'table self) -> SymbolIterator<'data, 'table, Xcoff, R> {
|
||||
SymbolIterator {
|
||||
symbols: self,
|
||||
index: self.symbols.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol entry at the given index and offset.
|
||||
pub fn get<T: Pod>(&self, index: usize, offset: usize) -> Result<&'data T> {
|
||||
let entry = index
|
||||
.checked_add(offset)
|
||||
.and_then(|x| self.symbols.get(x))
|
||||
.read_error("Invalid XCOFF symbol index")?;
|
||||
let bytes = bytes_of(entry);
|
||||
Bytes(bytes).read().read_error("Invalid XCOFF symbol data")
|
||||
}
|
||||
|
||||
/// Return the symbol at the given index.
|
||||
pub fn symbol(&self, index: usize) -> Result<&'data Xcoff::Symbol> {
|
||||
self.get::<Xcoff::Symbol>(index, 0)
|
||||
}
|
||||
|
||||
/// Return a file auxiliary symbol.
|
||||
pub fn aux_file(&self, index: usize, offset: usize) -> Result<&'data Xcoff::FileAux> {
|
||||
debug_assert!(self.symbol(index)?.has_aux_file());
|
||||
let aux_file = self.get::<Xcoff::FileAux>(index, offset)?;
|
||||
if let Some(aux_type) = aux_file.x_auxtype() {
|
||||
if aux_type != xcoff::AUX_FILE {
|
||||
return Err(Error("Invalid index for file auxiliary symbol."));
|
||||
}
|
||||
}
|
||||
Ok(aux_file)
|
||||
}
|
||||
|
||||
/// Return the csect auxiliary symbol.
|
||||
pub fn aux_csect(&self, index: usize, offset: usize) -> Result<&'data Xcoff::CsectAux> {
|
||||
debug_assert!(self.symbol(index)?.has_aux_csect());
|
||||
let aux_csect = self.get::<Xcoff::CsectAux>(index, offset)?;
|
||||
if let Some(aux_type) = aux_csect.x_auxtype() {
|
||||
if aux_type != xcoff::AUX_CSECT {
|
||||
return Err(Error("Invalid index/offset for csect auxiliary symbol."));
|
||||
}
|
||||
}
|
||||
Ok(aux_csect)
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbol table entries.
|
||||
///
|
||||
/// This includes auxiliary symbol table entries.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for symbol entries in an XCOFF file.
|
||||
///
|
||||
/// Yields the index and symbol structure for each symbol.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolIterator<'data, 'table, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
symbols: &'table SymbolTable<'data, Xcoff, R>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'table, Xcoff: FileHeader, R: ReadRef<'data>> Iterator
|
||||
for SymbolIterator<'data, 'table, Xcoff, R>
|
||||
{
|
||||
type Item = (SymbolIndex, &'data Xcoff::Symbol);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.n_numaux() as usize;
|
||||
Some((SymbolIndex(index), symbol))
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSymbolTable32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolTable<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A symbol table in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSymbolTable64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolTable<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A symbol table in an [`XcoffFile`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct XcoffSymbolTable<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for XcoffSymbolTable<'data, 'file, Xcoff, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> ObjectSymbolTable<'data>
|
||||
for XcoffSymbolTable<'data, 'file, Xcoff, R>
|
||||
{
|
||||
type Symbol = XcoffSymbol<'data, 'file, Xcoff, R>;
|
||||
type SymbolIterator = XcoffSymbolIterator<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
XcoffSymbolIterator {
|
||||
file: self.file,
|
||||
symbols: self.symbols.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> read::Result<Self::Symbol> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(XcoffSymbol {
|
||||
file: self.file,
|
||||
symbols: self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSymbolIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the symbols in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSymbolIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the symbols in an [`XcoffFile`].
|
||||
pub struct XcoffSymbolIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) symbols: SymbolIterator<'data, 'file, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> fmt::Debug
|
||||
for XcoffSymbolIterator<'data, 'file, Xcoff, R>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("XcoffSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> Iterator
|
||||
for XcoffSymbolIterator<'data, 'file, Xcoff, R>
|
||||
{
|
||||
type Item = XcoffSymbol<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (index, symbol) = self.symbols.next()?;
|
||||
Some(XcoffSymbol {
|
||||
file: self.file,
|
||||
symbols: self.symbols.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSymbol32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbol<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A symbol in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSymbol64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbol<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A symbol in an [`XcoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct XcoffSymbol<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Xcoff, R>,
|
||||
pub(super) index: SymbolIndex,
|
||||
pub(super) symbol: &'data Xcoff::Symbol,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for XcoffSymbol<'data, 'file, Xcoff, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> ObjectSymbol<'data>
|
||||
for XcoffSymbol<'data, 'file, Xcoff, R>
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> Result<&'data [u8]> {
|
||||
if self.symbol.has_aux_file() {
|
||||
// By convention the file name is in the first auxiliary entry.
|
||||
self.symbols
|
||||
.aux_file(self.index.0, 1)?
|
||||
.fname(self.symbols.strings)
|
||||
} else {
|
||||
self.symbol.name(self.symbols.strings)
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 XCOFF symbol name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
match self.symbol.n_sclass() {
|
||||
// Relocatable address.
|
||||
xcoff::C_EXT
|
||||
| xcoff::C_WEAKEXT
|
||||
| xcoff::C_HIDEXT
|
||||
| xcoff::C_FCN
|
||||
| xcoff::C_BLOCK
|
||||
| xcoff::C_STAT
|
||||
| xcoff::C_INFO => self.symbol.n_value().into(),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
if self.symbol.has_aux_csect() {
|
||||
// XCOFF32 must have the csect auxiliary entry as the last auxiliary entry.
|
||||
// XCOFF64 doesn't require this, but conventionally does.
|
||||
if let Ok(aux_csect) = self
|
||||
.file
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
let sym_type = aux_csect.sym_type();
|
||||
if sym_type == xcoff::XTY_SD || sym_type == xcoff::XTY_CM {
|
||||
return aux_csect.x_scnlen();
|
||||
}
|
||||
}
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
if self.symbol.has_aux_csect() {
|
||||
if let Ok(aux_csect) = self
|
||||
.file
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
let sym_type = aux_csect.sym_type();
|
||||
if sym_type == xcoff::XTY_SD || sym_type == xcoff::XTY_CM {
|
||||
return match aux_csect.x_smclas() {
|
||||
xcoff::XMC_PR | xcoff::XMC_GL => SymbolKind::Text,
|
||||
xcoff::XMC_RO | xcoff::XMC_RW | xcoff::XMC_TD | xcoff::XMC_BS => {
|
||||
SymbolKind::Data
|
||||
}
|
||||
xcoff::XMC_TL | xcoff::XMC_UL => SymbolKind::Tls,
|
||||
xcoff::XMC_DS | xcoff::XMC_TC0 | xcoff::XMC_TC => {
|
||||
// `Metadata` might be a better kind for these if we had it.
|
||||
SymbolKind::Data
|
||||
}
|
||||
_ => SymbolKind::Unknown,
|
||||
};
|
||||
} else if sym_type == xcoff::XTY_LD {
|
||||
// A function entry point. Neither `Text` nor `Label` are a good fit for this.
|
||||
return SymbolKind::Text;
|
||||
} else if sym_type == xcoff::XTY_ER {
|
||||
return SymbolKind::Unknown;
|
||||
}
|
||||
}
|
||||
}
|
||||
match self.symbol.n_sclass() {
|
||||
xcoff::C_NULL => SymbolKind::Null,
|
||||
xcoff::C_FILE => SymbolKind::File,
|
||||
_ => SymbolKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.symbol.n_scnum() {
|
||||
xcoff::N_ABS => SymbolSection::Absolute,
|
||||
xcoff::N_UNDEF => SymbolSection::Undefined,
|
||||
xcoff::N_DEBUG => SymbolSection::None,
|
||||
index if index > 0 => SymbolSection::Section(SectionIndex(index as usize)),
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.is_undefined()
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
if self.symbol.n_scnum() <= 0 {
|
||||
return false;
|
||||
}
|
||||
if self.symbol.has_aux_csect() {
|
||||
if let Ok(aux_csect) = self
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
let sym_type = aux_csect.sym_type();
|
||||
sym_type == xcoff::XTY_SD || sym_type == xcoff::XTY_LD || sym_type == xcoff::XTY_CM
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.n_sclass() == xcoff::C_EXT && self.symbol.n_scnum() == xcoff::N_UNDEF
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.symbol.n_sclass() == xcoff::C_WEAKEXT
|
||||
}
|
||||
|
||||
fn scope(&self) -> SymbolScope {
|
||||
if self.symbol.n_scnum() == xcoff::N_UNDEF {
|
||||
SymbolScope::Unknown
|
||||
} else {
|
||||
match self.symbol.n_sclass() {
|
||||
xcoff::C_EXT | xcoff::C_WEAKEXT => {
|
||||
let visibility = self.symbol.n_type() & xcoff::SYM_V_MASK;
|
||||
if visibility == xcoff::SYM_V_HIDDEN {
|
||||
SymbolScope::Linkage
|
||||
} else {
|
||||
SymbolScope::Dynamic
|
||||
}
|
||||
}
|
||||
_ => SymbolScope::Compilation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
match self.symbol.n_sclass() {
|
||||
xcoff::C_EXT | xcoff::C_WEAKEXT => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
!self.is_global()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
let mut x_smtyp = 0;
|
||||
let mut x_smclas = 0;
|
||||
let mut containing_csect = None;
|
||||
if self.symbol.has_aux_csect() {
|
||||
if let Ok(aux_csect) = self
|
||||
.file
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
x_smtyp = aux_csect.x_smtyp();
|
||||
x_smclas = aux_csect.x_smclas();
|
||||
if aux_csect.sym_type() == xcoff::XTY_LD {
|
||||
containing_csect = Some(SymbolIndex(aux_csect.x_scnlen() as usize))
|
||||
}
|
||||
}
|
||||
}
|
||||
SymbolFlags::Xcoff {
|
||||
n_sclass: self.symbol.n_sclass(),
|
||||
x_smtyp,
|
||||
x_smclas,
|
||||
containing_csect,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::Symbol32`] and [`xcoff::Symbol64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Symbol: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
|
||||
fn n_value(&self) -> Self::Word;
|
||||
fn n_scnum(&self) -> i16;
|
||||
fn n_type(&self) -> u16;
|
||||
fn n_sclass(&self) -> u8;
|
||||
fn n_numaux(&self) -> u8;
|
||||
|
||||
fn name_offset(&self) -> Option<u32>;
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]>;
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
let n_sclass = self.n_sclass();
|
||||
(n_sclass == xcoff::C_EXT || n_sclass == xcoff::C_WEAKEXT)
|
||||
&& self.n_scnum() == xcoff::N_UNDEF
|
||||
}
|
||||
|
||||
/// Return true if the symbol has file auxiliary entry.
|
||||
fn has_aux_file(&self) -> bool {
|
||||
self.n_numaux() > 0 && self.n_sclass() == xcoff::C_FILE
|
||||
}
|
||||
|
||||
/// Return true if the symbol has csect auxiliary entry.
|
||||
///
|
||||
/// A csect auxiliary entry is required for each symbol table entry that has
|
||||
/// a storage class value of C_EXT, C_WEAKEXT, or C_HIDEXT.
|
||||
fn has_aux_csect(&self) -> bool {
|
||||
let sclass = self.n_sclass();
|
||||
self.n_numaux() > 0
|
||||
&& (sclass == xcoff::C_EXT || sclass == xcoff::C_WEAKEXT || sclass == xcoff::C_HIDEXT)
|
||||
}
|
||||
}
|
||||
|
||||
impl Symbol for xcoff::Symbol64 {
|
||||
type Word = u64;
|
||||
|
||||
fn n_value(&self) -> Self::Word {
|
||||
self.n_value.get(BE)
|
||||
}
|
||||
|
||||
fn n_scnum(&self) -> i16 {
|
||||
self.n_scnum.get(BE)
|
||||
}
|
||||
|
||||
fn n_type(&self) -> u16 {
|
||||
self.n_type.get(BE)
|
||||
}
|
||||
|
||||
fn n_sclass(&self) -> u8 {
|
||||
self.n_sclass
|
||||
}
|
||||
|
||||
fn n_numaux(&self) -> u8 {
|
||||
self.n_numaux
|
||||
}
|
||||
|
||||
fn name_offset(&self) -> Option<u32> {
|
||||
Some(self.n_offset.get(BE))
|
||||
}
|
||||
|
||||
/// Parse the symbol name for XCOFF64.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.n_offset.get(BE))
|
||||
.read_error("Invalid XCOFF symbol name offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl Symbol for xcoff::Symbol32 {
|
||||
type Word = u32;
|
||||
|
||||
fn n_value(&self) -> Self::Word {
|
||||
self.n_value.get(BE)
|
||||
}
|
||||
|
||||
fn n_scnum(&self) -> i16 {
|
||||
self.n_scnum.get(BE)
|
||||
}
|
||||
|
||||
fn n_type(&self) -> u16 {
|
||||
self.n_type.get(BE)
|
||||
}
|
||||
|
||||
fn n_sclass(&self) -> u8 {
|
||||
self.n_sclass
|
||||
}
|
||||
|
||||
fn n_numaux(&self) -> u8 {
|
||||
self.n_numaux
|
||||
}
|
||||
|
||||
fn name_offset(&self) -> Option<u32> {
|
||||
if self.n_name[0] == 0 {
|
||||
let offset = u32::from_be_bytes(self.n_name[4..8].try_into().unwrap());
|
||||
Some(offset)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the symbol name for XCOFF32.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
if let Some(offset) = self.name_offset() {
|
||||
// If the name starts with 0 then the last 4 bytes are a string table offset.
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid XCOFF symbol name offset")
|
||||
} else {
|
||||
// The name is inline and padded with nulls.
|
||||
Ok(match memchr::memchr(b'\0', &self.n_name) {
|
||||
Some(end) => &self.n_name[..end],
|
||||
None => &self.n_name,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::FileAux32`] and [`xcoff::FileAux64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FileAux: Debug + Pod {
|
||||
fn x_fname(&self) -> &[u8; 8];
|
||||
fn x_ftype(&self) -> u8;
|
||||
fn x_auxtype(&self) -> Option<u8>;
|
||||
|
||||
fn name_offset(&self) -> Option<u32> {
|
||||
let x_fname = self.x_fname();
|
||||
if x_fname[0] == 0 {
|
||||
Some(u32::from_be_bytes(x_fname[4..8].try_into().unwrap()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the x_fname field, which may be an inline string or a string table offset.
|
||||
fn fname<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
if let Some(offset) = self.name_offset() {
|
||||
// If the name starts with 0 then the last 4 bytes are a string table offset.
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid XCOFF symbol name offset")
|
||||
} else {
|
||||
// The name is inline and padded with nulls.
|
||||
let x_fname = self.x_fname();
|
||||
Ok(match memchr::memchr(b'\0', x_fname) {
|
||||
Some(end) => &x_fname[..end],
|
||||
None => x_fname,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileAux for xcoff::FileAux64 {
|
||||
fn x_fname(&self) -> &[u8; 8] {
|
||||
&self.x_fname
|
||||
}
|
||||
|
||||
fn x_ftype(&self) -> u8 {
|
||||
self.x_ftype
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
Some(self.x_auxtype)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileAux for xcoff::FileAux32 {
|
||||
fn x_fname(&self) -> &[u8; 8] {
|
||||
&self.x_fname
|
||||
}
|
||||
|
||||
fn x_ftype(&self) -> u8 {
|
||||
self.x_ftype
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::CsectAux32`] and [`xcoff::CsectAux64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait CsectAux: Debug + Pod {
|
||||
fn x_scnlen(&self) -> u64;
|
||||
fn x_parmhash(&self) -> u32;
|
||||
fn x_snhash(&self) -> u16;
|
||||
fn x_smtyp(&self) -> u8;
|
||||
fn x_smclas(&self) -> u8;
|
||||
fn x_stab(&self) -> Option<u32>;
|
||||
fn x_snstab(&self) -> Option<u16>;
|
||||
fn x_auxtype(&self) -> Option<u8>;
|
||||
|
||||
fn alignment(&self) -> u8 {
|
||||
self.x_smtyp() >> 3
|
||||
}
|
||||
fn sym_type(&self) -> u8 {
|
||||
self.x_smtyp() & 0x07
|
||||
}
|
||||
}
|
||||
|
||||
impl CsectAux for xcoff::CsectAux64 {
|
||||
fn x_scnlen(&self) -> u64 {
|
||||
self.x_scnlen_lo.get(BE) as u64 | ((self.x_scnlen_hi.get(BE) as u64) << 32)
|
||||
}
|
||||
|
||||
fn x_parmhash(&self) -> u32 {
|
||||
self.x_parmhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_snhash(&self) -> u16 {
|
||||
self.x_snhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_smtyp(&self) -> u8 {
|
||||
self.x_smtyp
|
||||
}
|
||||
|
||||
fn x_smclas(&self) -> u8 {
|
||||
self.x_smclas
|
||||
}
|
||||
|
||||
fn x_stab(&self) -> Option<u32> {
|
||||
None
|
||||
}
|
||||
|
||||
fn x_snstab(&self) -> Option<u16> {
|
||||
None
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
Some(self.x_auxtype)
|
||||
}
|
||||
}
|
||||
|
||||
impl CsectAux for xcoff::CsectAux32 {
|
||||
fn x_scnlen(&self) -> u64 {
|
||||
self.x_scnlen.get(BE) as u64
|
||||
}
|
||||
|
||||
fn x_parmhash(&self) -> u32 {
|
||||
self.x_parmhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_snhash(&self) -> u16 {
|
||||
self.x_snhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_smtyp(&self) -> u8 {
|
||||
self.x_smtyp
|
||||
}
|
||||
|
||||
fn x_smclas(&self) -> u8 {
|
||||
self.x_smclas
|
||||
}
|
||||
|
||||
fn x_stab(&self) -> Option<u32> {
|
||||
Some(self.x_stab.get(BE))
|
||||
}
|
||||
|
||||
fn x_snstab(&self) -> Option<u16> {
|
||||
Some(self.x_snstab.get(BE))
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
None
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user