Initial vendor packages
Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
1
vendor/object/.cargo-checksum.json
vendored
Normal file
1
vendor/object/.cargo-checksum.json
vendored
Normal file
File diff suppressed because one or more lines are too long
754
vendor/object/CHANGELOG.md
vendored
Normal file
754
vendor/object/CHANGELOG.md
vendored
Normal file
@@ -0,0 +1,754 @@
|
||||
# `object` Change Log
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.32.2
|
||||
|
||||
Released 2023/12/24.
|
||||
|
||||
### Added
|
||||
|
||||
* Added ELF relocations for LoongArch ABI v2.20.
|
||||
[#578](https://github.com/gimli-rs/object/pull/578)
|
||||
[#589](https://github.com/gimli-rs/object/pull/589)
|
||||
|
||||
* Added ELF support for SHARC.
|
||||
[#593](https://github.com/gimli-rs/object/pull/593)
|
||||
|
||||
* Added `write::coff::Writer`.
|
||||
[#595](https://github.com/gimli-rs/object/pull/595)
|
||||
|
||||
* Added `SubArchitecture::Arm64EC` support for PE/COFF.
|
||||
[#607](https://github.com/gimli-rs/object/pull/607)
|
||||
|
||||
* Added `SubArchitecture::Arm64E` support for Mach-O.
|
||||
[#614](https://github.com/gimli-rs/object/pull/614)
|
||||
|
||||
* Added `read::Object::symbol_by_name` and `read::Object::symbol_by_name_bytes`.
|
||||
[#602](https://github.com/gimli-rs/object/pull/602)
|
||||
|
||||
* Added more functions to the low level API in `read::xcoff`.
|
||||
[#608](https://github.com/gimli-rs/object/pull/608)
|
||||
|
||||
* Added more functions to the low level API in `read::macho`.
|
||||
[#584](https://github.com/gimli-rs/object/pull/584)
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixes for AArch64 relocation addends for Mach-O.
|
||||
[#581](https://github.com/gimli-rs/object/pull/581)
|
||||
|
||||
* Changes to `write::Object` output for Mach-O, including the addition of a `LC_DYSYMTAB` load command.
|
||||
[#584](https://github.com/gimli-rs/object/pull/584)
|
||||
|
||||
* Changed `write::Object` to always use `R_X86_64_PLT32` for x86-64 branches for ELF.
|
||||
[#590](https://github.com/gimli-rs/object/pull/590)
|
||||
|
||||
* Fixed `read::ObjectSymbol::kind` for undefined section symbols for COFF.
|
||||
[#592](https://github.com/gimli-rs/object/pull/592)
|
||||
|
||||
* Fixed `write::Object` to accept undefined section symbols for COFF.
|
||||
[#594](https://github.com/gimli-rs/object/pull/594)
|
||||
|
||||
* Improved parsing of auxiliary section symbols for COFF.
|
||||
[#603](https://github.com/gimli-rs/object/pull/603)
|
||||
|
||||
* Improved the selection of symbols for `read::Object::symbol_map`.
|
||||
This includes changes to `read::Symbol::is_definition`.
|
||||
[#601](https://github.com/gimli-rs/object/pull/601)
|
||||
[#606](https://github.com/gimli-rs/object/pull/606)
|
||||
|
||||
* Changed `read::ObjectSymbol::kind` for ELF `STT_NOTYPE` symbols to `SymbolKind::Unknown`.
|
||||
[#604](https://github.com/gimli-rs/object/pull/604)
|
||||
|
||||
* Changed `read::ObjectSymbol::scope` for XCOFF `C_HIDEXT` symbols to `SymbolScope::Compilation`.
|
||||
[#605](https://github.com/gimli-rs/object/pull/605)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.32.1
|
||||
|
||||
Released 2023/09/03.
|
||||
|
||||
### Added
|
||||
|
||||
* Added `write::Object::set_macho_cpu_subtype`.
|
||||
[#574](https://github.com/gimli-rs/object/pull/574)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.32.0
|
||||
|
||||
Released 2023/08/12.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* Changed `read::elf::Note::name` to exclude all trailing null bytes.
|
||||
[#549](https://github.com/gimli-rs/object/pull/549)
|
||||
|
||||
* Updated dependencies, and changed some optional dependencies to use the `dep:`
|
||||
feature syntax.
|
||||
[#558](https://github.com/gimli-rs/object/pull/558)
|
||||
[#569](https://github.com/gimli-rs/object/pull/569)
|
||||
|
||||
### Changed
|
||||
|
||||
* The minimum supported rust version for the `read` feature and its dependencies
|
||||
has changed to 1.60.0.
|
||||
|
||||
* The minimum supported rust version for other features has changed to 1.65.0.
|
||||
|
||||
* Changed many definitions from `static` to `const`.
|
||||
[#549](https://github.com/gimli-rs/object/pull/549)
|
||||
|
||||
* Fixed Mach-O section alignment padding in `write::Object`.
|
||||
[#553](https://github.com/gimli-rs/object/pull/553)
|
||||
|
||||
* Changed `read::File` to an enum.
|
||||
[#564](https://github.com/gimli-rs/object/pull/564)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `elf::ELF_NOTE_GO`, `elf::NT_GO_BUILD_ID`, and `read::elf::Note::name_bytes`.
|
||||
[#549](https://github.com/gimli-rs/object/pull/549)
|
||||
|
||||
* Added `read::FileKind::CoffImport` and `read::coff::ImportFile`.
|
||||
[#555](https://github.com/gimli-rs/object/pull/555)
|
||||
[#556](https://github.com/gimli-rs/object/pull/556)
|
||||
|
||||
* Added `Architecture::Csky` and basic ELF support for C-SKY.
|
||||
[#561](https://github.com/gimli-rs/object/pull/561)
|
||||
|
||||
* Added `read::elf::ElfSymbol::raw_symbol`.
|
||||
[#562](https://github.com/gimli-rs/object/pull/562)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.30.4
|
||||
|
||||
Released 2023/06/05.
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed Mach-O section alignment padding in `write::Object`.
|
||||
[#553](https://github.com/gimli-rs/object/pull/553)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.31.1
|
||||
|
||||
Released 2023/05/09.
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed address for global symbols in `read::wasm`.
|
||||
[#539](https://github.com/gimli-rs/object/pull/539)
|
||||
|
||||
* Fixed writing of alignment for empty ELF sections.
|
||||
[#540](https://github.com/gimli-rs/object/pull/540)
|
||||
|
||||
### Added
|
||||
|
||||
* Added more `elf::GNU_PROPERTY_*` definitions.
|
||||
Added `read::elf::note::gnu_properties`, `write::StandardSection::GnuProperty`,
|
||||
and `write::Object::add_elf_gnu_property_u32`.
|
||||
[#537](https://github.com/gimli-rs/object/pull/537)
|
||||
[#541](https://github.com/gimli-rs/object/pull/541)
|
||||
|
||||
* Added Mach-O support for `Architecture::Aarch64_Ilp32`.
|
||||
[#542](https://github.com/gimli-rs/object/pull/542)
|
||||
[#545](https://github.com/gimli-rs/object/pull/545)
|
||||
|
||||
* Added `Architecture::Wasm64`.
|
||||
[#543](https://github.com/gimli-rs/object/pull/543)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.31.0
|
||||
|
||||
Released 2023/04/14.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* Added a type parameter on existing COFF types to support reading COFF `/bigobj` files.
|
||||
[#502](https://github.com/gimli-rs/object/pull/502)
|
||||
|
||||
* Changed PE symbols to support COFF `/bigobj`.
|
||||
Changed `pe::IMAGE_SYM_*` to `i32`.
|
||||
Changed `pe::ImageSymbolEx::section_number` to `I32Bytes`.
|
||||
Deleted a number of methods from `pe::ImageSymbol`.
|
||||
Use the `read::pe::ImageSymbol` trait instead.
|
||||
[#502](https://github.com/gimli-rs/object/pull/502)
|
||||
|
||||
* Changed `pe::Guid` to a single array, and added methods to read the individual fields.
|
||||
[#502](https://github.com/gimli-rs/object/pull/502)
|
||||
|
||||
* Added `Symbol` type parameter to `SymbolFlags` to support `SymbolFlags::Xcoff`.
|
||||
[#527](https://github.com/gimli-rs/object/pull/527)
|
||||
|
||||
### Changed
|
||||
|
||||
* Fix alignment when reserving zero length sections in `write::elf::Write::reserve`.
|
||||
[#514](https://github.com/gimli-rs/object/pull/514)
|
||||
|
||||
* Validate command size in `read::macho::LoadCommandIterator`.
|
||||
[#516](https://github.com/gimli-rs/object/pull/516)
|
||||
|
||||
* Handle invalid alignment in `read::macho::MachoSection::align`.
|
||||
[#516](https://github.com/gimli-rs/object/pull/516)
|
||||
|
||||
* Accept `SymbolKind::Unknown` in `write::Object::macho_write`.
|
||||
[#519](https://github.com/gimli-rs/object/pull/519)
|
||||
|
||||
* Updated `wasmparser` dependency.
|
||||
[#528](https://github.com/gimli-rs/object/pull/528)
|
||||
|
||||
### Added
|
||||
|
||||
* Added more `elf::EF_RISCV_*` definitions.
|
||||
[#507](https://github.com/gimli-rs/object/pull/507)
|
||||
|
||||
* Added `read::elf::SectionHeader::gnu_attributes` and associated types.
|
||||
Added `.gnu.attributes` support to `write::elf::Writer`.
|
||||
[#509](https://github.com/gimli-rs/object/pull/509)
|
||||
[#525](https://github.com/gimli-rs/object/pull/525)
|
||||
|
||||
* Added `write::Object::set_macho_build_version`.
|
||||
[#524](https://github.com/gimli-rs/object/pull/524)
|
||||
|
||||
* Added `read::FileKind::Xcoff32`, `read::FileKind::Xcoff64`, `read::XcoffFile`,
|
||||
and associated types.
|
||||
Added XCOFF support to `write::Object`.
|
||||
[#469](https://github.com/gimli-rs/object/pull/469)
|
||||
[#476](https://github.com/gimli-rs/object/pull/476)
|
||||
[#477](https://github.com/gimli-rs/object/pull/477)
|
||||
[#482](https://github.com/gimli-rs/object/pull/482)
|
||||
[#484](https://github.com/gimli-rs/object/pull/484)
|
||||
[#486](https://github.com/gimli-rs/object/pull/486)
|
||||
[#527](https://github.com/gimli-rs/object/pull/527)
|
||||
|
||||
* Added `read::FileKind::CoffBig`, `read::pe::CoffHeader` and `read::pe::ImageSymbol`.
|
||||
[#502](https://github.com/gimli-rs/object/pull/502)
|
||||
|
||||
* Added `elf::PT_GNU_PROPERTY`.
|
||||
[#530](https://github.com/gimli-rs/object/pull/530)
|
||||
|
||||
* Added `elf::ELFCOMPRESS_ZSTD`, `read::CompressionFormat::Zstandard`,
|
||||
and Zstandard decompression in `read::CompressedData::decompress` using
|
||||
the `ruzstd` crate.
|
||||
[#532](https://github.com/gimli-rs/object/pull/532)
|
||||
|
||||
* Added `read::elf::NoteIterator::new`.
|
||||
[#533](https://github.com/gimli-rs/object/pull/533)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.30.3
|
||||
|
||||
Released 2023/01/23.
|
||||
|
||||
### Added
|
||||
|
||||
* Added `SectionKind::ReadOnlyDataWithRel` for writing.
|
||||
[#504](https://github.com/gimli-rs/object/pull/504)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.30.2
|
||||
|
||||
Released 2023/01/11.
|
||||
|
||||
### Added
|
||||
|
||||
* Added more ELF constants for AVR flags and relocations.
|
||||
[#500](https://github.com/gimli-rs/object/pull/500)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.30.1
|
||||
|
||||
Released 2023/01/04.
|
||||
|
||||
### Changed
|
||||
|
||||
* Changed `read::ElfSymbol::kind` to handle `STT_NOTYPE` and `STT_GNU_IFUNC`.
|
||||
[#498](https://github.com/gimli-rs/object/pull/498)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `read::CoffSymbol::raw_symbol`.
|
||||
[#494](https://github.com/gimli-rs/object/pull/494)
|
||||
|
||||
* Added ELF support for Solana Binary Format.
|
||||
[#491](https://github.com/gimli-rs/object/pull/491)
|
||||
|
||||
* Added ELF support for AArch64 ILP32.
|
||||
[#497](https://github.com/gimli-rs/object/pull/497)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.30.0
|
||||
|
||||
Released 2022/11/22.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* The minimum supported rust version for the `read` feature has changed to 1.52.0.
|
||||
[#458](https://github.com/gimli-rs/object/pull/458)
|
||||
|
||||
* The minimum supported rust version for the `write` feature has changed to 1.61.0.
|
||||
|
||||
* Fixed endian handling in `read::elf::SymbolTable::shndx`.
|
||||
[#458](https://github.com/gimli-rs/object/pull/458)
|
||||
|
||||
* Fixed endian handling in `read::pe::ResourceName`.
|
||||
[#458](https://github.com/gimli-rs/object/pull/458)
|
||||
|
||||
* Changed definitions for LoongArch ELF header flags.
|
||||
[#483](https://github.com/gimli-rs/object/pull/483)
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed parsing of multiple debug directory entries in `read::pe::PeFile::pdb_info`.
|
||||
[#451](https://github.com/gimli-rs/object/pull/451)
|
||||
|
||||
* Changed the section name used when writing COFF stub symbols.
|
||||
[#475](https://github.com/gimli-rs/object/pull/475)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `read::pe::DataDirectories::delay_load_import_table`.
|
||||
[#448](https://github.com/gimli-rs/object/pull/448)
|
||||
|
||||
* Added `read::macho::LoadCommandData::raw_data`.
|
||||
[#449](https://github.com/gimli-rs/object/pull/449)
|
||||
|
||||
* Added ELF relocations for LoongArch ps ABI v2.
|
||||
[#450](https://github.com/gimli-rs/object/pull/450)
|
||||
|
||||
* Added PowerPC support for Mach-O.
|
||||
[#460](https://github.com/gimli-rs/object/pull/460)
|
||||
|
||||
* Added support for reading the AIX big archive format.
|
||||
[#462](https://github.com/gimli-rs/object/pull/462)
|
||||
[#467](https://github.com/gimli-rs/object/pull/467)
|
||||
[#473](https://github.com/gimli-rs/object/pull/473)
|
||||
|
||||
* Added support for `RelocationEncoding::AArch64Call` when writing Mach-O files.
|
||||
[#465](https://github.com/gimli-rs/object/pull/465)
|
||||
|
||||
* Added support for `RelocationKind::Relative` when writing RISC-V ELF files.
|
||||
[#470](https://github.com/gimli-rs/object/pull/470)
|
||||
|
||||
* Added Xtensa architecture support for ELF.
|
||||
[#481](https://github.com/gimli-rs/object/pull/481)
|
||||
|
||||
* Added `read::pe::ResourceName::raw_data`.
|
||||
[#487](https://github.com/gimli-rs/object/pull/487)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.29.0
|
||||
|
||||
Released 2022/06/22.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* The `write` feature now has a minimum supported rust version of 1.56.1.
|
||||
[#444](https://github.com/gimli-rs/object/pull/444)
|
||||
|
||||
* Added `os_abi` and `abi_version` fields to `FileFlags::Elf`.
|
||||
[#438](https://github.com/gimli-rs/object/pull/438)
|
||||
[#441](https://github.com/gimli-rs/object/pull/441)
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed handling of empty symbol tables in `read::elf::ElfFile::symbol_table` and
|
||||
`read::elf::ElfFile::dynamic_symbol_table`.
|
||||
[#443](https://github.com/gimli-rs/object/pull/443)
|
||||
|
||||
### Added
|
||||
|
||||
* Added more `ELF_OSABI_*` constants.
|
||||
[#439](https://github.com/gimli-rs/object/pull/439)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.28.4
|
||||
|
||||
Released 2022/05/09.
|
||||
|
||||
### Added
|
||||
|
||||
* Added `read::pe::DataDirectories::resource_directory`.
|
||||
[#425](https://github.com/gimli-rs/object/pull/425)
|
||||
[#427](https://github.com/gimli-rs/object/pull/427)
|
||||
|
||||
* Added PE support for more ARM relocations.
|
||||
[#428](https://github.com/gimli-rs/object/pull/428)
|
||||
|
||||
* Added support for `Architecture::LoongArch64`.
|
||||
[#430](https://github.com/gimli-rs/object/pull/430)
|
||||
[#432](https://github.com/gimli-rs/object/pull/432)
|
||||
|
||||
* Added `elf::EF_MIPS_ABI` and associated constants.
|
||||
[#433](https://github.com/gimli-rs/object/pull/433)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.28.3
|
||||
|
||||
Released 2022/01/19.
|
||||
|
||||
### Changed
|
||||
|
||||
* For the Mach-O support in `write::Object`, accept `RelocationKind::MachO` for all
|
||||
architectures, and accept `RelocationKind::Absolute` for ARM64.
|
||||
[#422](https://github.com/gimli-rs/object/pull/422)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `pe::ImageDataDirectory::file_range`, `read::pe::SectionTable::pe_file_range_at`
|
||||
and `pe::ImageSectionHeader::pe_file_range_at`.
|
||||
[#421](https://github.com/gimli-rs/object/pull/421)
|
||||
|
||||
* Added `write::Object::add_coff_exports`.
|
||||
[#423](https://github.com/gimli-rs/object/pull/423)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.28.2
|
||||
|
||||
Released 2022/01/09.
|
||||
|
||||
### Changed
|
||||
|
||||
* Ignored errors for the Wasm extended name section in `read::WasmFile::parse`.
|
||||
[#408](https://github.com/gimli-rs/object/pull/408)
|
||||
|
||||
* Ignored errors for the COFF symbol table in `read::PeFile::parse`.
|
||||
[#410](https://github.com/gimli-rs/object/pull/410)
|
||||
|
||||
* Fixed handling of `SectionFlags::Coff` in `write::Object::coff_write`.
|
||||
[#412](https://github.com/gimli-rs/object/pull/412)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `read::ObjectSegment::flags`.
|
||||
[#416](https://github.com/gimli-rs/object/pull/416)
|
||||
[#418](https://github.com/gimli-rs/object/pull/418)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.28.1
|
||||
|
||||
Released 2021/12/12.
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed `read::elf::SymbolTable::shndx_section`.
|
||||
[#405](https://github.com/gimli-rs/object/pull/405)
|
||||
|
||||
* Fixed build warnings.
|
||||
[#405](https://github.com/gimli-rs/object/pull/405)
|
||||
[#406](https://github.com/gimli-rs/object/pull/406)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.28.0
|
||||
|
||||
Released 2021/12/12.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* `write_core` feature no longer enables `std` support. Use `write_std` instead.
|
||||
[#400](https://github.com/gimli-rs/object/pull/400)
|
||||
|
||||
* Multiple changes related to Mach-O split dyld cache support.
|
||||
[#398](https://github.com/gimli-rs/object/pull/398)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `write::pe::Writer::write_file_align`.
|
||||
[#397](https://github.com/gimli-rs/object/pull/397)
|
||||
|
||||
* Added support for Mach-O split dyld cache.
|
||||
[#398](https://github.com/gimli-rs/object/pull/398)
|
||||
|
||||
* Added support for `IMAGE_SCN_LNK_NRELOC_OVFL` when reading and writing COFF.
|
||||
[#399](https://github.com/gimli-rs/object/pull/399)
|
||||
|
||||
* Added `write::elf::Writer::reserve_null_symbol_index`.
|
||||
[#402](https://github.com/gimli-rs/object/pull/402)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.27.1
|
||||
|
||||
Released 2021/10/22.
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed build error with older Rust versions due to cargo resolver version.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.27.0
|
||||
|
||||
Released 2021/10/17.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* Changed `read::elf` to use `SectionIndex` instead of `usize` in more places.
|
||||
[#341](https://github.com/gimli-rs/object/pull/341)
|
||||
|
||||
* Changed some `read::elf` section methods to additionally return the linked section index.
|
||||
[#341](https://github.com/gimli-rs/object/pull/341)
|
||||
|
||||
* Changed `read::pe::ImageNtHeaders::parse` to return `DataDirectories` instead of a slice.
|
||||
[#357](https://github.com/gimli-rs/object/pull/357)
|
||||
|
||||
* Deleted `value` parameter for `write:WritableBuffer::resize`.
|
||||
[#369](https://github.com/gimli-rs/object/pull/369)
|
||||
|
||||
* Changed `write::Object` and `write::Section` to use `Cow` for section data.
|
||||
This added a lifetime parameter, which existing users can set to `'static`.
|
||||
[#370](https://github.com/gimli-rs/object/pull/370)
|
||||
|
||||
### Changed
|
||||
|
||||
* Fixed parsing when PE import directory has zero size.
|
||||
[#341](https://github.com/gimli-rs/object/pull/341)
|
||||
|
||||
* Fixed parsing when PE import directory has zero for original first thunk.
|
||||
[#385](https://github.com/gimli-rs/object/pull/385)
|
||||
[#387](https://github.com/gimli-rs/object/pull/387)
|
||||
|
||||
* Fixed parsing when PE export directory has zero number of names.
|
||||
[#353](https://github.com/gimli-rs/object/pull/353)
|
||||
|
||||
* Fixed parsing when PE export directory has zero number of names and addresses.
|
||||
[#362](https://github.com/gimli-rs/object/pull/362)
|
||||
|
||||
* Fixed parsing when PE sections are contiguous.
|
||||
[#354](https://github.com/gimli-rs/object/pull/354)
|
||||
|
||||
* Fixed `std` feature for `indexmap` dependency.
|
||||
[#374](https://github.com/gimli-rs/object/pull/374)
|
||||
|
||||
* Fixed overflow in COFF section name offset parsing.
|
||||
[#390](https://github.com/gimli-rs/object/pull/390)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `name_bytes` methods to unified `read` traits.
|
||||
[#351](https://github.com/gimli-rs/object/pull/351)
|
||||
|
||||
* Added `read::Object::kind`.
|
||||
[#352](https://github.com/gimli-rs/object/pull/352)
|
||||
|
||||
* Added `read::elf::VersionTable` and related helpers.
|
||||
[#341](https://github.com/gimli-rs/object/pull/341)
|
||||
|
||||
* Added `read::elf::SectionTable::dynamic` and related helpers.
|
||||
[#345](https://github.com/gimli-rs/object/pull/345)
|
||||
|
||||
* Added `read::coff::SectionTable::max_section_file_offset`.
|
||||
[#344](https://github.com/gimli-rs/object/pull/344)
|
||||
|
||||
* Added `read::pe::ExportTable` and related helpers.
|
||||
[#349](https://github.com/gimli-rs/object/pull/349)
|
||||
[#353](https://github.com/gimli-rs/object/pull/353)
|
||||
|
||||
* Added `read::pe::ImportTable` and related helpers.
|
||||
[#357](https://github.com/gimli-rs/object/pull/357)
|
||||
|
||||
* Added `read::pe::DataDirectories` and related helpers.
|
||||
[#357](https://github.com/gimli-rs/object/pull/357)
|
||||
[#384](https://github.com/gimli-rs/object/pull/384)
|
||||
|
||||
* Added `read::pe::RichHeaderInfo` and related helpers.
|
||||
[#375](https://github.com/gimli-rs/object/pull/375)
|
||||
[#379](https://github.com/gimli-rs/object/pull/379)
|
||||
|
||||
* Added `read::pe::RelocationBlocks` and related helpers.
|
||||
[#378](https://github.com/gimli-rs/object/pull/378)
|
||||
|
||||
* Added `write::elf::Writer`.
|
||||
[#350](https://github.com/gimli-rs/object/pull/350)
|
||||
|
||||
* Added `write::pe::Writer`.
|
||||
[#382](https://github.com/gimli-rs/object/pull/382)
|
||||
[#388](https://github.com/gimli-rs/object/pull/388)
|
||||
|
||||
* Added `write::Section::data/data_mut`.
|
||||
[#367](https://github.com/gimli-rs/object/pull/367)
|
||||
|
||||
* Added `write::Object::write_stream`.
|
||||
[#369](https://github.com/gimli-rs/object/pull/369)
|
||||
|
||||
* Added MIPSr6 ELF header flag definitions.
|
||||
[#372](https://github.com/gimli-rs/object/pull/372)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.26.2
|
||||
|
||||
Released 2021/08/28.
|
||||
|
||||
### Added
|
||||
|
||||
* Added support for 64-bit symbol table names to `read::archive`.
|
||||
[#366](https://github.com/gimli-rs/object/pull/366)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.26.1
|
||||
|
||||
Released 2021/08/19.
|
||||
|
||||
### Changed
|
||||
|
||||
* Activate `memchr`'s `rustc-dep-of-std` feature
|
||||
[#356](https://github.com/gimli-rs/object/pull/356)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.26.0
|
||||
|
||||
Released 2021/07/26.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* Changed `ReadRef::read_bytes_at_until` to accept a range parameter.
|
||||
[#326](https://github.com/gimli-rs/object/pull/326)
|
||||
|
||||
* Added `ReadRef` type parameter to `read::StringTable` and types that
|
||||
contain it. String table entries are now only read as required.
|
||||
[#326](https://github.com/gimli-rs/object/pull/326)
|
||||
|
||||
* Changed result type of `read::elf::SectionHeader::data` and `data_as_array`.
|
||||
[#332](https://github.com/gimli-rs/object/pull/332)
|
||||
|
||||
* Moved `pod::WritableBuffer` to `write::WritableBuffer`.
|
||||
Renamed `WritableBuffer::extend` to `write_bytes`.
|
||||
Added more provided methods to `WritableBuffer`.
|
||||
[#335](https://github.com/gimli-rs/object/pull/335)
|
||||
|
||||
* Moved `pod::Bytes` to `read::Bytes`.
|
||||
[#336](https://github.com/gimli-rs/object/pull/336)
|
||||
|
||||
* Added `is_mips64el` parameter to `elf::Rela64::r_info/set_r_info`.
|
||||
[#337](https://github.com/gimli-rs/object/pull/337)
|
||||
|
||||
### Changed
|
||||
|
||||
* Removed `alloc` dependency when no features are enabled.
|
||||
[#336](https://github.com/gimli-rs/object/pull/336)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `read::pe::PeFile` methods: `section_table`, `data_directory`, and `data`.
|
||||
[#324](https://github.com/gimli-rs/object/pull/324)
|
||||
|
||||
* Added more ELF definitions.
|
||||
[#332](https://github.com/gimli-rs/object/pull/332)
|
||||
|
||||
* Added `read::elf::SectionTable` methods for hash tables and symbol version
|
||||
information.
|
||||
[#332](https://github.com/gimli-rs/object/pull/332)
|
||||
|
||||
* Added PE RISC-V definitions.
|
||||
[#333](https://github.com/gimli-rs/object/pull/333)
|
||||
|
||||
* Added `WritableBuffer` implementation for `Vec`.
|
||||
[#335](https://github.com/gimli-rs/object/pull/335)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.25.3
|
||||
|
||||
Released 2021/06/12.
|
||||
|
||||
### Added
|
||||
|
||||
* Added `RelocationEncoding::AArch64Call`.
|
||||
[#322](https://github.com/gimli-rs/object/pull/322)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.25.2
|
||||
|
||||
Released 2021/06/04.
|
||||
|
||||
### Added
|
||||
|
||||
* Added `Architecture::X86_64_X32`.
|
||||
[#320](https://github.com/gimli-rs/object/pull/320)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.25.1
|
||||
|
||||
Released 2021/06/03.
|
||||
|
||||
### Changed
|
||||
|
||||
* write: Fix choice of `SHT_REL` or `SHT_RELA` for most architectures.
|
||||
[#318](https://github.com/gimli-rs/object/pull/318)
|
||||
|
||||
* write: Fix relocation encoding for MIPS64EL.
|
||||
[#318](https://github.com/gimli-rs/object/pull/318)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
## 0.25.0
|
||||
|
||||
Released 2021/06/02.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* Added `non_exhaustive` to most public enums.
|
||||
[#306](https://github.com/gimli-rs/object/pull/306)
|
||||
|
||||
* `MachHeader::parse` and `MachHeader::load_commands` now require a header offset.
|
||||
[#304](https://github.com/gimli-rs/object/pull/304)
|
||||
|
||||
* Added `ReadRef::read_bytes_at_until`.
|
||||
[#308](https://github.com/gimli-rs/object/pull/308)
|
||||
|
||||
* `PeFile::entry`, `PeSection::address` and `PeSegment::address` now return a
|
||||
virtual address instead of a RVA.
|
||||
[#315](https://github.com/gimli-rs/object/pull/315)
|
||||
|
||||
### Added
|
||||
|
||||
* Added `pod::from_bytes_mut`, `pod::slice_from_bytes_mut`, `pod::bytes_of_mut`,
|
||||
and `pod::bytes_of_slice_mut`.
|
||||
[#296](https://github.com/gimli-rs/object/pull/296)
|
||||
[#297](https://github.com/gimli-rs/object/pull/297)
|
||||
|
||||
* Added `Object::pdb_info`.
|
||||
[#298](https://github.com/gimli-rs/object/pull/298)
|
||||
|
||||
* Added `read::macho::DyldCache`, other associated definitions,
|
||||
and support for these in the examples.
|
||||
[#308](https://github.com/gimli-rs/object/pull/308)
|
||||
|
||||
* Added more architecture support.
|
||||
[#303](https://github.com/gimli-rs/object/pull/303)
|
||||
[#309](https://github.com/gimli-rs/object/pull/309)
|
||||
|
||||
* Derive more traits for enums.
|
||||
[#311](https://github.com/gimli-rs/object/pull/311)
|
||||
|
||||
* Added `Object::relative_address_base`.
|
||||
[#315](https://github.com/gimli-rs/object/pull/315)
|
||||
|
||||
### Changed
|
||||
|
||||
* Improved performance for string parsing.
|
||||
[#302](https://github.com/gimli-rs/object/pull/302)
|
||||
|
||||
* `objdump` example allows selecting container members.
|
||||
[#308](https://github.com/gimli-rs/object/pull/308)
|
||||
163
vendor/object/Cargo.toml
vendored
Normal file
163
vendor/object/Cargo.toml
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
rust-version = "1.60"
|
||||
name = "object"
|
||||
version = "0.32.2"
|
||||
exclude = [
|
||||
"/.github",
|
||||
"/testfiles",
|
||||
]
|
||||
description = "A unified interface for reading and writing object file formats."
|
||||
readme = "README.md"
|
||||
keywords = [
|
||||
"object",
|
||||
"elf",
|
||||
"mach-o",
|
||||
"pe",
|
||||
"coff",
|
||||
]
|
||||
license = "Apache-2.0 OR MIT"
|
||||
repository = "https://github.com/gimli-rs/object"
|
||||
resolver = "2"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["doc"]
|
||||
|
||||
[dependencies.alloc]
|
||||
version = "1.0.0"
|
||||
optional = true
|
||||
package = "rustc-std-workspace-alloc"
|
||||
|
||||
[dependencies.compiler_builtins]
|
||||
version = "0.1.2"
|
||||
optional = true
|
||||
|
||||
[dependencies.core]
|
||||
version = "1.0.0"
|
||||
optional = true
|
||||
package = "rustc-std-workspace-core"
|
||||
|
||||
[dependencies.crc32fast]
|
||||
version = "1.2"
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.flate2]
|
||||
version = "1"
|
||||
optional = true
|
||||
|
||||
[dependencies.hashbrown]
|
||||
version = "0.14.0"
|
||||
features = ["ahash"]
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.indexmap]
|
||||
version = "2.0"
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.memchr]
|
||||
version = "2.4.1"
|
||||
default-features = false
|
||||
|
||||
[dependencies.ruzstd]
|
||||
version = "0.5.0"
|
||||
optional = true
|
||||
|
||||
[dependencies.wasmparser]
|
||||
version = "0.118.0"
|
||||
optional = true
|
||||
|
||||
[features]
|
||||
all = [
|
||||
"read",
|
||||
"write",
|
||||
"std",
|
||||
"compression",
|
||||
"wasm",
|
||||
]
|
||||
archive = []
|
||||
cargo-all = []
|
||||
coff = []
|
||||
compression = [
|
||||
"dep:flate2",
|
||||
"dep:ruzstd",
|
||||
"std",
|
||||
]
|
||||
default = [
|
||||
"read",
|
||||
"compression",
|
||||
]
|
||||
doc = [
|
||||
"read_core",
|
||||
"write_std",
|
||||
"std",
|
||||
"compression",
|
||||
"archive",
|
||||
"coff",
|
||||
"elf",
|
||||
"macho",
|
||||
"pe",
|
||||
"wasm",
|
||||
"xcoff",
|
||||
]
|
||||
elf = []
|
||||
macho = []
|
||||
pe = ["coff"]
|
||||
read = [
|
||||
"read_core",
|
||||
"archive",
|
||||
"coff",
|
||||
"elf",
|
||||
"macho",
|
||||
"pe",
|
||||
"xcoff",
|
||||
"unaligned",
|
||||
]
|
||||
read_core = []
|
||||
rustc-dep-of-std = [
|
||||
"core",
|
||||
"compiler_builtins",
|
||||
"alloc",
|
||||
"memchr/rustc-dep-of-std",
|
||||
]
|
||||
std = ["memchr/std"]
|
||||
unaligned = []
|
||||
unstable = []
|
||||
unstable-all = [
|
||||
"all",
|
||||
"unstable",
|
||||
]
|
||||
wasm = ["dep:wasmparser"]
|
||||
write = [
|
||||
"write_std",
|
||||
"coff",
|
||||
"elf",
|
||||
"macho",
|
||||
"pe",
|
||||
"xcoff",
|
||||
]
|
||||
write_core = [
|
||||
"dep:crc32fast",
|
||||
"dep:indexmap",
|
||||
"dep:hashbrown",
|
||||
]
|
||||
write_std = [
|
||||
"write_core",
|
||||
"std",
|
||||
"indexmap?/std",
|
||||
"crc32fast?/std",
|
||||
]
|
||||
xcoff = []
|
||||
201
vendor/object/LICENSE-APACHE
vendored
Normal file
201
vendor/object/LICENSE-APACHE
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
25
vendor/object/LICENSE-MIT
vendored
Normal file
25
vendor/object/LICENSE-MIT
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
Copyright (c) 2015 The Gimli Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
56
vendor/object/README.md
vendored
Normal file
56
vendor/object/README.md
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
# `object`
|
||||
|
||||
The `object` crate provides a unified interface to working with object files
|
||||
across platforms. It supports reading relocatable object files and executable files,
|
||||
and writing COFF/ELF/Mach-O/XCOFF relocatable object files and ELF/PE executable files.
|
||||
|
||||
For reading files, it provides multiple levels of support:
|
||||
|
||||
* raw struct definitions suitable for zero copy access
|
||||
* low level APIs for accessing the raw structs ([example](crates/examples/src/readobj/))
|
||||
* a higher level unified API for accessing common features of object files, such
|
||||
as sections and symbols ([example](crates/examples/src/objdump.rs))
|
||||
|
||||
Supported file formats: ELF, Mach-O, Windows PE/COFF, Wasm, XCOFF, and Unix archive.
|
||||
|
||||
## Example for unified read API
|
||||
```rust
|
||||
use object::{Object, ObjectSection};
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
|
||||
/// Reads a file and displays the name of each section.
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let binary_data = fs::read("path/to/binary")?;
|
||||
let file = object::File::parse(&*binary_data)?;
|
||||
for section in file.sections() {
|
||||
println!("{}", section.name()?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
See [`crates/examples`](crates/examples) for more examples.
|
||||
|
||||
## Minimum Supported Rust Version (MSRV)
|
||||
|
||||
Changes to MSRV are considered breaking changes. We are conservative about changing the MSRV,
|
||||
but sometimes are required to due to dependencies. The MSRV is:
|
||||
|
||||
* 1.60.0 for the `read` feature and its dependencies.
|
||||
* 1.65.0 for other features.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0 ([`LICENSE-APACHE`](./LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([`LICENSE-MIT`](./LICENSE-MIT) or https://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
## Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
||||
1
vendor/object/clippy.toml
vendored
Normal file
1
vendor/object/clippy.toml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
msrv = "1.60.0"
|
||||
91
vendor/object/src/archive.rs
vendored
Normal file
91
vendor/object/src/archive.rs
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
//! Archive definitions.
|
||||
//!
|
||||
//! These definitions are independent of read/write support, although we do implement
|
||||
//! some traits useful for those.
|
||||
|
||||
use crate::pod::Pod;
|
||||
|
||||
/// File identification bytes stored at the beginning of the file.
|
||||
pub const MAGIC: [u8; 8] = *b"!<arch>\n";
|
||||
|
||||
/// File identification bytes at the beginning of AIX big archive.
|
||||
pub const AIX_BIG_MAGIC: [u8; 8] = *b"<bigaf>\n";
|
||||
|
||||
/// File identification bytes stored at the beginning of a thin archive.
|
||||
///
|
||||
/// A thin archive only contains a symbol table and file names.
|
||||
pub const THIN_MAGIC: [u8; 8] = *b"!<thin>\n";
|
||||
|
||||
/// The terminator for each archive member header.
|
||||
pub const TERMINATOR: [u8; 2] = *b"`\n";
|
||||
|
||||
/// The header at the start of an archive member.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct Header {
|
||||
/// The file name.
|
||||
pub name: [u8; 16],
|
||||
/// File modification timestamp in decimal.
|
||||
pub date: [u8; 12],
|
||||
/// User ID in decimal.
|
||||
pub uid: [u8; 6],
|
||||
/// Group ID in decimal.
|
||||
pub gid: [u8; 6],
|
||||
/// File mode in octal.
|
||||
pub mode: [u8; 8],
|
||||
/// File size in decimal.
|
||||
pub size: [u8; 10],
|
||||
/// Must be equal to `TERMINATOR`.
|
||||
pub terminator: [u8; 2],
|
||||
}
|
||||
|
||||
/// The header at the start of an AIX big archive member, without name.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct AixHeader {
|
||||
/// File member size in decimal.
|
||||
pub size: [u8; 20],
|
||||
/// Next member offset in decimal.
|
||||
pub nxtmem: [u8; 20],
|
||||
/// Previous member offset in decimal.
|
||||
pub prvmem: [u8; 20],
|
||||
/// File member date in decimal.
|
||||
pub date: [u8; 12],
|
||||
/// File member user id in decimal.
|
||||
pub uid: [u8; 12],
|
||||
/// File member group id in decimal.
|
||||
pub gid: [u8; 12],
|
||||
/// File member mode in octal.
|
||||
pub mode: [u8; 12],
|
||||
/// File member name length in decimal.
|
||||
pub namlen: [u8; 4],
|
||||
}
|
||||
|
||||
/// The AIX big archive's fixed length header at file beginning.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct AixFileHeader {
|
||||
/// Archive magic string.
|
||||
pub magic: [u8; 8],
|
||||
/// Offset of member table.
|
||||
pub memoff: [u8; 20],
|
||||
/// Offset of global symbol table.
|
||||
pub gstoff: [u8; 20],
|
||||
/// Offset of global symbol table for 64-bit objects.
|
||||
pub gst64off: [u8; 20],
|
||||
/// Offset of first member.
|
||||
pub fstmoff: [u8; 20],
|
||||
/// Offset of last member.
|
||||
pub lstmoff: [u8; 20],
|
||||
/// Offset of first member on free list.
|
||||
pub freeoff: [u8; 20],
|
||||
}
|
||||
|
||||
/// Offset of a member in an AIX big archive.
|
||||
///
|
||||
/// This is used in the member index.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct AixMemberOffset(pub [u8; 20]);
|
||||
|
||||
unsafe_impl_pod!(Header, AixHeader, AixFileHeader, AixMemberOffset,);
|
||||
536
vendor/object/src/common.rs
vendored
Normal file
536
vendor/object/src/common.rs
vendored
Normal file
@@ -0,0 +1,536 @@
|
||||
/// A CPU architecture.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum Architecture {
|
||||
Unknown,
|
||||
Aarch64,
|
||||
#[allow(non_camel_case_types)]
|
||||
Aarch64_Ilp32,
|
||||
Arm,
|
||||
Avr,
|
||||
Bpf,
|
||||
Csky,
|
||||
I386,
|
||||
X86_64,
|
||||
#[allow(non_camel_case_types)]
|
||||
X86_64_X32,
|
||||
Hexagon,
|
||||
LoongArch64,
|
||||
Mips,
|
||||
Mips64,
|
||||
Msp430,
|
||||
PowerPc,
|
||||
PowerPc64,
|
||||
Riscv32,
|
||||
Riscv64,
|
||||
S390x,
|
||||
Sbf,
|
||||
Sharc,
|
||||
Sparc64,
|
||||
Wasm32,
|
||||
Wasm64,
|
||||
Xtensa,
|
||||
}
|
||||
|
||||
/// A CPU sub-architecture.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SubArchitecture {
|
||||
Arm64E,
|
||||
Arm64EC,
|
||||
}
|
||||
|
||||
impl Architecture {
|
||||
/// The size of an address value for this architecture.
|
||||
///
|
||||
/// Returns `None` for unknown architectures.
|
||||
pub fn address_size(self) -> Option<AddressSize> {
|
||||
match self {
|
||||
Architecture::Unknown => None,
|
||||
Architecture::Aarch64 => Some(AddressSize::U64),
|
||||
Architecture::Aarch64_Ilp32 => Some(AddressSize::U32),
|
||||
Architecture::Arm => Some(AddressSize::U32),
|
||||
Architecture::Avr => Some(AddressSize::U8),
|
||||
Architecture::Bpf => Some(AddressSize::U64),
|
||||
Architecture::Csky => Some(AddressSize::U32),
|
||||
Architecture::I386 => Some(AddressSize::U32),
|
||||
Architecture::X86_64 => Some(AddressSize::U64),
|
||||
Architecture::X86_64_X32 => Some(AddressSize::U32),
|
||||
Architecture::Hexagon => Some(AddressSize::U32),
|
||||
Architecture::LoongArch64 => Some(AddressSize::U64),
|
||||
Architecture::Mips => Some(AddressSize::U32),
|
||||
Architecture::Mips64 => Some(AddressSize::U64),
|
||||
Architecture::Msp430 => Some(AddressSize::U16),
|
||||
Architecture::PowerPc => Some(AddressSize::U32),
|
||||
Architecture::PowerPc64 => Some(AddressSize::U64),
|
||||
Architecture::Riscv32 => Some(AddressSize::U32),
|
||||
Architecture::Riscv64 => Some(AddressSize::U64),
|
||||
Architecture::S390x => Some(AddressSize::U64),
|
||||
Architecture::Sbf => Some(AddressSize::U64),
|
||||
Architecture::Sharc => Some(AddressSize::U32),
|
||||
Architecture::Sparc64 => Some(AddressSize::U64),
|
||||
Architecture::Wasm32 => Some(AddressSize::U32),
|
||||
Architecture::Wasm64 => Some(AddressSize::U64),
|
||||
Architecture::Xtensa => Some(AddressSize::U32),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The size of an address value for an architecture.
|
||||
///
|
||||
/// This may differ from the address size supported by the file format (such as for COFF).
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
#[repr(u8)]
|
||||
pub enum AddressSize {
|
||||
U8 = 1,
|
||||
U16 = 2,
|
||||
U32 = 4,
|
||||
U64 = 8,
|
||||
}
|
||||
|
||||
impl AddressSize {
|
||||
/// The size in bytes of an address value.
|
||||
#[inline]
|
||||
pub fn bytes(self) -> u8 {
|
||||
self as u8
|
||||
}
|
||||
}
|
||||
|
||||
/// A binary file format.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum BinaryFormat {
|
||||
Coff,
|
||||
Elf,
|
||||
MachO,
|
||||
Pe,
|
||||
Wasm,
|
||||
Xcoff,
|
||||
}
|
||||
|
||||
/// The kind of a section.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SectionKind {
|
||||
/// The section kind is unknown.
|
||||
Unknown,
|
||||
/// An executable code section.
|
||||
///
|
||||
/// Example ELF sections: `.text`
|
||||
///
|
||||
/// Example Mach-O sections: `__TEXT/__text`
|
||||
Text,
|
||||
/// A data section.
|
||||
///
|
||||
/// Example ELF sections: `.data`
|
||||
///
|
||||
/// Example Mach-O sections: `__DATA/__data`
|
||||
Data,
|
||||
/// A read only data section.
|
||||
///
|
||||
/// Example ELF sections: `.rodata`
|
||||
///
|
||||
/// Example Mach-O sections: `__TEXT/__const`, `__DATA/__const`, `__TEXT/__literal4`
|
||||
ReadOnlyData,
|
||||
/// A read only data section with relocations.
|
||||
///
|
||||
/// This is the same as either `Data` or `ReadOnlyData`, depending on the file format.
|
||||
/// This value is only used in the API for writing files. It is never returned when reading files.
|
||||
ReadOnlyDataWithRel,
|
||||
/// A loadable string section.
|
||||
///
|
||||
/// Example ELF sections: `.rodata.str`
|
||||
///
|
||||
/// Example Mach-O sections: `__TEXT/__cstring`
|
||||
ReadOnlyString,
|
||||
/// An uninitialized data section.
|
||||
///
|
||||
/// Example ELF sections: `.bss`
|
||||
///
|
||||
/// Example Mach-O sections: `__DATA/__bss`
|
||||
UninitializedData,
|
||||
/// An uninitialized common data section.
|
||||
///
|
||||
/// Example Mach-O sections: `__DATA/__common`
|
||||
Common,
|
||||
/// A TLS data section.
|
||||
///
|
||||
/// Example ELF sections: `.tdata`
|
||||
///
|
||||
/// Example Mach-O sections: `__DATA/__thread_data`
|
||||
Tls,
|
||||
/// An uninitialized TLS data section.
|
||||
///
|
||||
/// Example ELF sections: `.tbss`
|
||||
///
|
||||
/// Example Mach-O sections: `__DATA/__thread_bss`
|
||||
UninitializedTls,
|
||||
/// A TLS variables section.
|
||||
///
|
||||
/// This contains TLS variable structures, rather than the variable initializers.
|
||||
///
|
||||
/// Example Mach-O sections: `__DATA/__thread_vars`
|
||||
TlsVariables,
|
||||
/// A non-loadable string section.
|
||||
///
|
||||
/// Example ELF sections: `.comment`, `.debug_str`
|
||||
OtherString,
|
||||
/// Some other non-loadable section.
|
||||
///
|
||||
/// Example ELF sections: `.debug_info`
|
||||
Other,
|
||||
/// Debug information.
|
||||
///
|
||||
/// Example Mach-O sections: `__DWARF/__debug_info`
|
||||
Debug,
|
||||
/// Information for the linker.
|
||||
///
|
||||
/// Example COFF sections: `.drectve`
|
||||
Linker,
|
||||
/// ELF note section.
|
||||
Note,
|
||||
/// Metadata such as symbols or relocations.
|
||||
///
|
||||
/// Example ELF sections: `.symtab`, `.strtab`, `.group`
|
||||
Metadata,
|
||||
/// Some other ELF section type.
|
||||
///
|
||||
/// This is the `sh_type` field in the section header.
|
||||
/// The meaning may be dependent on the architecture.
|
||||
Elf(u32),
|
||||
}
|
||||
|
||||
impl SectionKind {
|
||||
/// Return true if this section contains zerofill data.
|
||||
pub fn is_bss(self) -> bool {
|
||||
self == SectionKind::UninitializedData
|
||||
|| self == SectionKind::UninitializedTls
|
||||
|| self == SectionKind::Common
|
||||
}
|
||||
}
|
||||
|
||||
/// The selection kind for a COMDAT section group.
|
||||
///
|
||||
/// This determines the way in which the linker resolves multiple definitions of the COMDAT
|
||||
/// sections.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum ComdatKind {
|
||||
/// The selection kind is unknown.
|
||||
Unknown,
|
||||
/// Multiple definitions are allowed.
|
||||
///
|
||||
/// An arbitrary definition is selected, and the rest are removed.
|
||||
///
|
||||
/// This is the only supported selection kind for ELF.
|
||||
Any,
|
||||
/// Multiple definitions are not allowed.
|
||||
///
|
||||
/// This is used to group sections without allowing duplicates.
|
||||
NoDuplicates,
|
||||
/// Multiple definitions must have the same size.
|
||||
///
|
||||
/// An arbitrary definition is selected, and the rest are removed.
|
||||
SameSize,
|
||||
/// Multiple definitions must match exactly.
|
||||
///
|
||||
/// An arbitrary definition is selected, and the rest are removed.
|
||||
ExactMatch,
|
||||
/// Multiple definitions are allowed, and the largest is selected.
|
||||
///
|
||||
/// An arbitrary definition with the largest size is selected, and the rest are removed.
|
||||
Largest,
|
||||
/// Multiple definitions are allowed, and the newest is selected.
|
||||
Newest,
|
||||
}
|
||||
|
||||
/// The kind of a symbol.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SymbolKind {
|
||||
/// The symbol kind is unknown.
|
||||
Unknown,
|
||||
/// The symbol is a null placeholder.
|
||||
Null,
|
||||
/// The symbol is for executable code.
|
||||
Text,
|
||||
/// The symbol is for a data object.
|
||||
Data,
|
||||
/// The symbol is for a section.
|
||||
Section,
|
||||
/// The symbol is the name of a file. It precedes symbols within that file.
|
||||
File,
|
||||
/// The symbol is for a code label.
|
||||
Label,
|
||||
/// The symbol is for a thread local storage entity.
|
||||
Tls,
|
||||
}
|
||||
|
||||
/// A symbol scope.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum SymbolScope {
|
||||
/// Unknown scope.
|
||||
Unknown,
|
||||
/// Symbol is visible to the compilation unit.
|
||||
Compilation,
|
||||
/// Symbol is visible to the static linkage unit.
|
||||
Linkage,
|
||||
/// Symbol is visible to dynamically linked objects.
|
||||
Dynamic,
|
||||
}
|
||||
|
||||
/// The operation used to calculate the result of the relocation.
|
||||
///
|
||||
/// The relocation descriptions use the following definitions. Note that
|
||||
/// these definitions probably don't match any ELF ABI.
|
||||
///
|
||||
/// * A - The value of the addend.
|
||||
/// * G - The address of the symbol's entry within the global offset table.
|
||||
/// * L - The address of the symbol's entry within the procedure linkage table.
|
||||
/// * P - The address of the place of the relocation.
|
||||
/// * S - The address of the symbol.
|
||||
/// * GotBase - The address of the global offset table.
|
||||
/// * Image - The base address of the image.
|
||||
/// * Section - The address of the section containing the symbol.
|
||||
///
|
||||
/// 'XxxRelative' means 'Xxx + A - P'. 'XxxOffset' means 'S + A - Xxx'.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum RelocationKind {
|
||||
/// S + A
|
||||
Absolute,
|
||||
/// S + A - P
|
||||
Relative,
|
||||
/// G + A - GotBase
|
||||
Got,
|
||||
/// G + A - P
|
||||
GotRelative,
|
||||
/// GotBase + A - P
|
||||
GotBaseRelative,
|
||||
/// S + A - GotBase
|
||||
GotBaseOffset,
|
||||
/// L + A - P
|
||||
PltRelative,
|
||||
/// S + A - Image
|
||||
ImageOffset,
|
||||
/// S + A - Section
|
||||
SectionOffset,
|
||||
/// The index of the section containing the symbol.
|
||||
SectionIndex,
|
||||
/// Some other ELF relocation. The value is dependent on the architecture.
|
||||
Elf(u32),
|
||||
/// Some other Mach-O relocation. The value is dependent on the architecture.
|
||||
MachO {
|
||||
/// The relocation type.
|
||||
value: u8,
|
||||
/// Whether the relocation is relative to the place.
|
||||
relative: bool,
|
||||
},
|
||||
/// Some other COFF relocation. The value is dependent on the architecture.
|
||||
Coff(u16),
|
||||
/// Some other XCOFF relocation.
|
||||
Xcoff(u8),
|
||||
}
|
||||
|
||||
/// Information about how the result of the relocation operation is encoded in the place.
|
||||
///
|
||||
/// This is usually architecture specific, such as specifying an addressing mode or
|
||||
/// a specific instruction.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum RelocationEncoding {
|
||||
/// Generic encoding.
|
||||
Generic,
|
||||
|
||||
/// x86 sign extension at runtime.
|
||||
///
|
||||
/// Used with `RelocationKind::Absolute`.
|
||||
X86Signed,
|
||||
/// x86 rip-relative addressing.
|
||||
///
|
||||
/// The `RelocationKind` must be PC relative.
|
||||
X86RipRelative,
|
||||
/// x86 rip-relative addressing in movq instruction.
|
||||
///
|
||||
/// The `RelocationKind` must be PC relative.
|
||||
X86RipRelativeMovq,
|
||||
/// x86 branch instruction.
|
||||
///
|
||||
/// The `RelocationKind` must be PC relative.
|
||||
X86Branch,
|
||||
|
||||
/// s390x PC-relative offset shifted right by one bit.
|
||||
///
|
||||
/// The `RelocationKind` must be PC relative.
|
||||
S390xDbl,
|
||||
|
||||
/// AArch64 call target.
|
||||
///
|
||||
/// The `RelocationKind` must be PC relative.
|
||||
AArch64Call,
|
||||
|
||||
/// LoongArch branch offset with two trailing zeros.
|
||||
///
|
||||
/// The `RelocationKind` must be PC relative.
|
||||
LoongArchBranch,
|
||||
|
||||
/// SHARC+ 48-bit Type A instruction
|
||||
///
|
||||
/// Represents these possible variants, each with a corresponding
|
||||
/// `R_SHARC_*` constant:
|
||||
///
|
||||
/// * 24-bit absolute address
|
||||
/// * 32-bit absolute address
|
||||
/// * 6-bit relative address
|
||||
/// * 24-bit relative address
|
||||
/// * 6-bit absolute address in the immediate value field
|
||||
/// * 16-bit absolute address in the immediate value field
|
||||
SharcTypeA,
|
||||
|
||||
/// SHARC+ 32-bit Type B instruction
|
||||
///
|
||||
/// Represents these possible variants, each with a corresponding
|
||||
/// `R_SHARC_*` constant:
|
||||
///
|
||||
/// * 6-bit absolute address in the immediate value field
|
||||
/// * 7-bit absolute address in the immediate value field
|
||||
/// * 16-bit absolute address
|
||||
/// * 6-bit relative address
|
||||
SharcTypeB,
|
||||
}
|
||||
|
||||
/// File flags that are specific to each file format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum FileFlags {
|
||||
/// No file flags.
|
||||
None,
|
||||
/// ELF file flags.
|
||||
Elf {
|
||||
/// `os_abi` field in the ELF file header.
|
||||
os_abi: u8,
|
||||
/// `abi_version` field in the ELF file header.
|
||||
abi_version: u8,
|
||||
/// `e_flags` field in the ELF file header.
|
||||
e_flags: u32,
|
||||
},
|
||||
/// Mach-O file flags.
|
||||
MachO {
|
||||
/// `flags` field in the Mach-O file header.
|
||||
flags: u32,
|
||||
},
|
||||
/// COFF file flags.
|
||||
Coff {
|
||||
/// `Characteristics` field in the COFF file header.
|
||||
characteristics: u16,
|
||||
},
|
||||
/// XCOFF file flags.
|
||||
Xcoff {
|
||||
/// `f_flags` field in the XCOFF file header.
|
||||
f_flags: u16,
|
||||
},
|
||||
}
|
||||
|
||||
/// Segment flags that are specific to each file format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SegmentFlags {
|
||||
/// No segment flags.
|
||||
None,
|
||||
/// ELF segment flags.
|
||||
Elf {
|
||||
/// `p_flags` field in the segment header.
|
||||
p_flags: u32,
|
||||
},
|
||||
/// Mach-O segment flags.
|
||||
MachO {
|
||||
/// `flags` field in the segment header.
|
||||
flags: u32,
|
||||
/// `maxprot` field in the segment header.
|
||||
maxprot: u32,
|
||||
/// `initprot` field in the segment header.
|
||||
initprot: u32,
|
||||
},
|
||||
/// COFF segment flags.
|
||||
Coff {
|
||||
/// `Characteristics` field in the segment header.
|
||||
characteristics: u32,
|
||||
},
|
||||
}
|
||||
|
||||
/// Section flags that are specific to each file format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SectionFlags {
|
||||
/// No section flags.
|
||||
None,
|
||||
/// ELF section flags.
|
||||
Elf {
|
||||
/// `sh_flags` field in the section header.
|
||||
sh_flags: u64,
|
||||
},
|
||||
/// Mach-O section flags.
|
||||
MachO {
|
||||
/// `flags` field in the section header.
|
||||
flags: u32,
|
||||
},
|
||||
/// COFF section flags.
|
||||
Coff {
|
||||
/// `Characteristics` field in the section header.
|
||||
characteristics: u32,
|
||||
},
|
||||
/// XCOFF section flags.
|
||||
Xcoff {
|
||||
/// `s_flags` field in the section header.
|
||||
s_flags: u32,
|
||||
},
|
||||
}
|
||||
|
||||
/// Symbol flags that are specific to each file format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SymbolFlags<Section, Symbol> {
|
||||
/// No symbol flags.
|
||||
None,
|
||||
/// ELF symbol flags.
|
||||
Elf {
|
||||
/// `st_info` field in the ELF symbol.
|
||||
st_info: u8,
|
||||
/// `st_other` field in the ELF symbol.
|
||||
st_other: u8,
|
||||
},
|
||||
/// Mach-O symbol flags.
|
||||
MachO {
|
||||
/// `n_desc` field in the Mach-O symbol.
|
||||
n_desc: u16,
|
||||
},
|
||||
/// COFF flags for a section symbol.
|
||||
CoffSection {
|
||||
/// `Selection` field in the auxiliary symbol for the section.
|
||||
selection: u8,
|
||||
/// `Number` field in the auxiliary symbol for the section.
|
||||
associative_section: Option<Section>,
|
||||
},
|
||||
/// XCOFF symbol flags.
|
||||
Xcoff {
|
||||
/// `n_sclass` field in the XCOFF symbol.
|
||||
n_sclass: u8,
|
||||
/// `x_smtyp` field in the CSECT auxiliary symbol.
|
||||
///
|
||||
/// Only valid if `n_sclass` is `C_EXT`, `C_WEAKEXT`, or `C_HIDEXT`.
|
||||
x_smtyp: u8,
|
||||
/// `x_smclas` field in the CSECT auxiliary symbol.
|
||||
///
|
||||
/// Only valid if `n_sclass` is `C_EXT`, `C_WEAKEXT`, or `C_HIDEXT`.
|
||||
x_smclas: u8,
|
||||
/// The containing csect for the symbol.
|
||||
///
|
||||
/// Only valid if `x_smtyp` is `XTY_LD`.
|
||||
containing_csect: Option<Symbol>,
|
||||
},
|
||||
}
|
||||
6287
vendor/object/src/elf.rs
vendored
Normal file
6287
vendor/object/src/elf.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
831
vendor/object/src/endian.rs
vendored
Normal file
831
vendor/object/src/endian.rs
vendored
Normal file
@@ -0,0 +1,831 @@
|
||||
//! Types for compile-time and run-time endianness.
|
||||
|
||||
use crate::pod::Pod;
|
||||
use core::fmt::{self, Debug};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
/// A trait for using an endianness specification.
|
||||
///
|
||||
/// Provides methods for converting between the specified endianness and
|
||||
/// the native endianness of the target machine.
|
||||
///
|
||||
/// This trait does not require that the endianness is known at compile time.
|
||||
pub trait Endian: Debug + Default + Clone + Copy + PartialEq + Eq + 'static {
|
||||
/// Construct a specification for the endianness of some values.
|
||||
///
|
||||
/// Returns `None` if the type does not support specifying the given endianness.
|
||||
fn from_big_endian(big_endian: bool) -> Option<Self>;
|
||||
|
||||
/// Construct a specification for the endianness of some values.
|
||||
///
|
||||
/// Returns `None` if the type does not support specifying the given endianness.
|
||||
fn from_little_endian(little_endian: bool) -> Option<Self> {
|
||||
Self::from_big_endian(!little_endian)
|
||||
}
|
||||
|
||||
/// Return true for big endian byte order.
|
||||
fn is_big_endian(self) -> bool;
|
||||
|
||||
/// Return true for little endian byte order.
|
||||
#[inline]
|
||||
fn is_little_endian(self) -> bool {
|
||||
!self.is_big_endian()
|
||||
}
|
||||
|
||||
/// Converts an unsigned 16 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_u16(self, n: u16) -> u16 {
|
||||
if self.is_big_endian() {
|
||||
u16::from_be(n)
|
||||
} else {
|
||||
u16::from_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unsigned 32 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_u32(self, n: u32) -> u32 {
|
||||
if self.is_big_endian() {
|
||||
u32::from_be(n)
|
||||
} else {
|
||||
u32::from_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unsigned 64 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_u64(self, n: u64) -> u64 {
|
||||
if self.is_big_endian() {
|
||||
u64::from_be(n)
|
||||
} else {
|
||||
u64::from_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a signed 16 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_i16(self, n: i16) -> i16 {
|
||||
if self.is_big_endian() {
|
||||
i16::from_be(n)
|
||||
} else {
|
||||
i16::from_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a signed 32 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_i32(self, n: i32) -> i32 {
|
||||
if self.is_big_endian() {
|
||||
i32::from_be(n)
|
||||
} else {
|
||||
i32::from_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a signed 64 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_i64(self, n: i64) -> i64 {
|
||||
if self.is_big_endian() {
|
||||
i64::from_be(n)
|
||||
} else {
|
||||
i64::from_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned unsigned 16 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_u16_bytes(self, n: [u8; 2]) -> u16 {
|
||||
if self.is_big_endian() {
|
||||
u16::from_be_bytes(n)
|
||||
} else {
|
||||
u16::from_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned unsigned 32 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_u32_bytes(self, n: [u8; 4]) -> u32 {
|
||||
if self.is_big_endian() {
|
||||
u32::from_be_bytes(n)
|
||||
} else {
|
||||
u32::from_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned unsigned 64 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_u64_bytes(self, n: [u8; 8]) -> u64 {
|
||||
if self.is_big_endian() {
|
||||
u64::from_be_bytes(n)
|
||||
} else {
|
||||
u64::from_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned signed 16 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_i16_bytes(self, n: [u8; 2]) -> i16 {
|
||||
if self.is_big_endian() {
|
||||
i16::from_be_bytes(n)
|
||||
} else {
|
||||
i16::from_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned signed 32 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_i32_bytes(self, n: [u8; 4]) -> i32 {
|
||||
if self.is_big_endian() {
|
||||
i32::from_be_bytes(n)
|
||||
} else {
|
||||
i32::from_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned signed 64 bit integer to native endian.
|
||||
#[inline]
|
||||
fn read_i64_bytes(self, n: [u8; 8]) -> i64 {
|
||||
if self.is_big_endian() {
|
||||
i64::from_be_bytes(n)
|
||||
} else {
|
||||
i64::from_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unsigned 16 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_u16(self, n: u16) -> u16 {
|
||||
if self.is_big_endian() {
|
||||
u16::to_be(n)
|
||||
} else {
|
||||
u16::to_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unsigned 32 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_u32(self, n: u32) -> u32 {
|
||||
if self.is_big_endian() {
|
||||
u32::to_be(n)
|
||||
} else {
|
||||
u32::to_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unsigned 64 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_u64(self, n: u64) -> u64 {
|
||||
if self.is_big_endian() {
|
||||
u64::to_be(n)
|
||||
} else {
|
||||
u64::to_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a signed 16 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_i16(self, n: i16) -> i16 {
|
||||
if self.is_big_endian() {
|
||||
i16::to_be(n)
|
||||
} else {
|
||||
i16::to_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a signed 32 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_i32(self, n: i32) -> i32 {
|
||||
if self.is_big_endian() {
|
||||
i32::to_be(n)
|
||||
} else {
|
||||
i32::to_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a signed 64 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_i64(self, n: i64) -> i64 {
|
||||
if self.is_big_endian() {
|
||||
i64::to_be(n)
|
||||
} else {
|
||||
i64::to_le(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned unsigned 16 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_u16_bytes(self, n: u16) -> [u8; 2] {
|
||||
if self.is_big_endian() {
|
||||
u16::to_be_bytes(n)
|
||||
} else {
|
||||
u16::to_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned unsigned 32 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_u32_bytes(self, n: u32) -> [u8; 4] {
|
||||
if self.is_big_endian() {
|
||||
u32::to_be_bytes(n)
|
||||
} else {
|
||||
u32::to_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned unsigned 64 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_u64_bytes(self, n: u64) -> [u8; 8] {
|
||||
if self.is_big_endian() {
|
||||
u64::to_be_bytes(n)
|
||||
} else {
|
||||
u64::to_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned signed 16 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_i16_bytes(self, n: i16) -> [u8; 2] {
|
||||
if self.is_big_endian() {
|
||||
i16::to_be_bytes(n)
|
||||
} else {
|
||||
i16::to_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned signed 32 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_i32_bytes(self, n: i32) -> [u8; 4] {
|
||||
if self.is_big_endian() {
|
||||
i32::to_be_bytes(n)
|
||||
} else {
|
||||
i32::to_le_bytes(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an unaligned signed 64 bit integer from native endian.
|
||||
#[inline]
|
||||
fn write_i64_bytes(self, n: i64) -> [u8; 8] {
|
||||
if self.is_big_endian() {
|
||||
i64::to_be_bytes(n)
|
||||
} else {
|
||||
i64::to_le_bytes(n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An endianness that is selectable at run-time.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum Endianness {
|
||||
/// Little endian byte order.
|
||||
Little,
|
||||
/// Big endian byte order.
|
||||
Big,
|
||||
}
|
||||
|
||||
impl Default for Endianness {
|
||||
#[cfg(target_endian = "little")]
|
||||
#[inline]
|
||||
fn default() -> Endianness {
|
||||
Endianness::Little
|
||||
}
|
||||
|
||||
#[cfg(target_endian = "big")]
|
||||
#[inline]
|
||||
fn default() -> Endianness {
|
||||
Endianness::Big
|
||||
}
|
||||
}
|
||||
|
||||
impl Endian for Endianness {
|
||||
#[inline]
|
||||
fn from_big_endian(big_endian: bool) -> Option<Self> {
|
||||
Some(if big_endian {
|
||||
Endianness::Big
|
||||
} else {
|
||||
Endianness::Little
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_big_endian(self) -> bool {
|
||||
self != Endianness::Little
|
||||
}
|
||||
}
|
||||
|
||||
/// Compile-time little endian byte order.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct LittleEndian;
|
||||
|
||||
impl Default for LittleEndian {
|
||||
#[inline]
|
||||
fn default() -> LittleEndian {
|
||||
LittleEndian
|
||||
}
|
||||
}
|
||||
|
||||
impl Endian for LittleEndian {
|
||||
#[inline]
|
||||
fn from_big_endian(big_endian: bool) -> Option<Self> {
|
||||
if big_endian {
|
||||
None
|
||||
} else {
|
||||
Some(LittleEndian)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_big_endian(self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Compile-time big endian byte order.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct BigEndian;
|
||||
|
||||
impl Default for BigEndian {
|
||||
#[inline]
|
||||
fn default() -> BigEndian {
|
||||
BigEndian
|
||||
}
|
||||
}
|
||||
|
||||
impl Endian for BigEndian {
|
||||
#[inline]
|
||||
fn from_big_endian(big_endian: bool) -> Option<Self> {
|
||||
if big_endian {
|
||||
Some(BigEndian)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_big_endian(self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// The native endianness for the target platform.
|
||||
#[cfg(target_endian = "little")]
|
||||
pub type NativeEndian = LittleEndian;
|
||||
|
||||
#[cfg(target_endian = "little")]
|
||||
#[allow(non_upper_case_globals)]
|
||||
#[doc(hidden)]
|
||||
pub const NativeEndian: LittleEndian = LittleEndian;
|
||||
|
||||
/// The native endianness for the target platform.
|
||||
#[cfg(target_endian = "big")]
|
||||
pub type NativeEndian = BigEndian;
|
||||
|
||||
#[cfg(target_endian = "big")]
|
||||
#[allow(non_upper_case_globals)]
|
||||
#[doc(hidden)]
|
||||
pub const NativeEndian: BigEndian = BigEndian;
|
||||
|
||||
macro_rules! unsafe_impl_endian_pod {
|
||||
($($struct_name:ident),+ $(,)?) => {
|
||||
$(
|
||||
unsafe impl<E: Endian> Pod for $struct_name<E> { }
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "unaligned"))]
|
||||
mod aligned {
|
||||
use super::{fmt, Endian, PhantomData, Pod};
|
||||
|
||||
/// A `u16` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct U16<E: Endian>(u16, PhantomData<E>);
|
||||
|
||||
impl<E: Endian> U16<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 2]) -> Self {
|
||||
Self(u16::from_ne_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: u16) -> Self {
|
||||
Self(e.write_u16(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> u16 {
|
||||
e.read_u16(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: u16) {
|
||||
self.0 = e.write_u16(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// A `u32` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct U32<E: Endian>(u32, PhantomData<E>);
|
||||
|
||||
impl<E: Endian> U32<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 4]) -> Self {
|
||||
Self(u32::from_ne_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: u32) -> Self {
|
||||
Self(e.write_u32(n), PhantomData)
|
||||
}
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> u32 {
|
||||
e.read_u32(self.0)
|
||||
}
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: u32) {
|
||||
self.0 = e.write_u32(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// A `u64` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct U64<E: Endian>(u64, PhantomData<E>);
|
||||
|
||||
impl<E: Endian> U64<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 8]) -> Self {
|
||||
Self(u64::from_ne_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: u64) -> Self {
|
||||
Self(e.write_u64(n), PhantomData)
|
||||
}
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> u64 {
|
||||
e.read_u64(self.0)
|
||||
}
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: u64) {
|
||||
self.0 = e.write_u64(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An `i16` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct I16<E: Endian>(i16, PhantomData<E>);
|
||||
|
||||
impl<E: Endian> I16<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 2]) -> Self {
|
||||
Self(i16::from_ne_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: i16) -> Self {
|
||||
Self(e.write_i16(n), PhantomData)
|
||||
}
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> i16 {
|
||||
e.read_i16(self.0)
|
||||
}
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: i16) {
|
||||
self.0 = e.write_i16(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An `i32` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct I32<E: Endian>(i32, PhantomData<E>);
|
||||
|
||||
impl<E: Endian> I32<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 4]) -> Self {
|
||||
Self(i32::from_ne_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: i32) -> Self {
|
||||
Self(e.write_i32(n), PhantomData)
|
||||
}
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> i32 {
|
||||
e.read_i32(self.0)
|
||||
}
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: i32) {
|
||||
self.0 = e.write_i32(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An `i64` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct I64<E: Endian>(i64, PhantomData<E>);
|
||||
|
||||
impl<E: Endian> I64<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 8]) -> Self {
|
||||
Self(i64::from_ne_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: i64) -> Self {
|
||||
Self(e.write_i64(n), PhantomData)
|
||||
}
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> i64 {
|
||||
e.read_i64(self.0)
|
||||
}
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: i64) {
|
||||
self.0 = e.write_i64(n);
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for U16<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "U16({:x})", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for U32<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "U32({:x})", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for U64<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "U64({:x})", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for I16<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "I16({:x})", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for I32<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "I32({:x})", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for I64<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "I64({:x})", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
unsafe_impl_endian_pod!(U16, U32, U64, I16, I32, I64);
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "unaligned"))]
|
||||
pub use aligned::*;
|
||||
|
||||
/// A `u16` value with an externally specified endianness of type `E`.
|
||||
#[cfg(feature = "unaligned")]
|
||||
pub type U16<E> = U16Bytes<E>;
|
||||
|
||||
/// A `u32` value with an externally specified endianness of type `E`.
|
||||
#[cfg(feature = "unaligned")]
|
||||
pub type U32<E> = U32Bytes<E>;
|
||||
|
||||
/// A `u64` value with an externally specified endianness of type `E`.
|
||||
#[cfg(feature = "unaligned")]
|
||||
pub type U64<E> = U64Bytes<E>;
|
||||
|
||||
/// An `i16` value with an externally specified endianness of type `E`.
|
||||
#[cfg(feature = "unaligned")]
|
||||
pub type I16<E> = I16Bytes<E>;
|
||||
|
||||
/// An `i32` value with an externally specified endianness of type `E`.
|
||||
#[cfg(feature = "unaligned")]
|
||||
pub type I32<E> = I32Bytes<E>;
|
||||
|
||||
/// An `i64` value with an externally specified endianness of type `E`.
|
||||
#[cfg(feature = "unaligned")]
|
||||
pub type I64<E> = I64Bytes<E>;
|
||||
|
||||
/// An unaligned `u16` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct U16Bytes<E: Endian>([u8; 2], PhantomData<E>);
|
||||
|
||||
impl<E: Endian> U16Bytes<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 2]) -> Self {
|
||||
Self(n, PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: u16) -> Self {
|
||||
Self(e.write_u16_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> u16 {
|
||||
e.read_u16_bytes(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: u16) {
|
||||
self.0 = e.write_u16_bytes(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An unaligned `u32` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct U32Bytes<E: Endian>([u8; 4], PhantomData<E>);
|
||||
|
||||
impl<E: Endian> U32Bytes<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 4]) -> Self {
|
||||
Self(n, PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: u32) -> Self {
|
||||
Self(e.write_u32_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> u32 {
|
||||
e.read_u32_bytes(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: u32) {
|
||||
self.0 = e.write_u32_bytes(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An unaligned `u64` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct U64Bytes<E: Endian>([u8; 8], PhantomData<E>);
|
||||
|
||||
impl<E: Endian> U64Bytes<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 8]) -> Self {
|
||||
Self(n, PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: u64) -> Self {
|
||||
Self(e.write_u64_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> u64 {
|
||||
e.read_u64_bytes(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: u64) {
|
||||
self.0 = e.write_u64_bytes(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An unaligned `i16` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct I16Bytes<E: Endian>([u8; 2], PhantomData<E>);
|
||||
|
||||
impl<E: Endian> I16Bytes<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 2]) -> Self {
|
||||
Self(n, PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: i16) -> Self {
|
||||
Self(e.write_i16_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> i16 {
|
||||
e.read_i16_bytes(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: i16) {
|
||||
self.0 = e.write_i16_bytes(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An unaligned `i32` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct I32Bytes<E: Endian>([u8; 4], PhantomData<E>);
|
||||
|
||||
impl<E: Endian> I32Bytes<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 4]) -> Self {
|
||||
Self(n, PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: i32) -> Self {
|
||||
Self(e.write_i32_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> i32 {
|
||||
e.read_i32_bytes(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: i32) {
|
||||
self.0 = e.write_i32_bytes(n);
|
||||
}
|
||||
}
|
||||
|
||||
/// An unaligned `i64` value with an externally specified endianness of type `E`.
|
||||
#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct I64Bytes<E: Endian>([u8; 8], PhantomData<E>);
|
||||
|
||||
impl<E: Endian> I64Bytes<E> {
|
||||
/// Construct a new value given bytes that already have the required endianness.
|
||||
pub fn from_bytes(n: [u8; 8]) -> Self {
|
||||
Self(n, PhantomData)
|
||||
}
|
||||
|
||||
/// Construct a new value given a native endian value.
|
||||
pub fn new(e: E, n: i64) -> Self {
|
||||
Self(e.write_i64_bytes(n), PhantomData)
|
||||
}
|
||||
|
||||
/// Return the value as a native endian value.
|
||||
pub fn get(self, e: E) -> i64 {
|
||||
e.read_i64_bytes(self.0)
|
||||
}
|
||||
|
||||
/// Set the value given a native endian value.
|
||||
pub fn set(&mut self, e: E, n: i64) {
|
||||
self.0 = e.write_i64_bytes(n);
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for U16Bytes<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "U16({:x}, {:x})", self.0[0], self.0[1],)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for U32Bytes<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"U32({:x}, {:x}, {:x}, {:x})",
|
||||
self.0[0], self.0[1], self.0[2], self.0[3],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for U64Bytes<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"U64({:x}, {:x}, {:x}, {:x}, {:x}, {:x}, {:x}, {:x})",
|
||||
self.0[0], self.0[1], self.0[2], self.0[3], self.0[4], self.0[5], self.0[6], self.0[7],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for I16Bytes<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "I16({:x}, {:x})", self.0[0], self.0[1],)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for I32Bytes<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"I32({:x}, {:x}, {:x}, {:x})",
|
||||
self.0[0], self.0[1], self.0[2], self.0[3],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> fmt::Debug for I64Bytes<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"I64({:x}, {:x}, {:x}, {:x}, {:x}, {:x}, {:x}, {:x})",
|
||||
self.0[0], self.0[1], self.0[2], self.0[3], self.0[4], self.0[5], self.0[6], self.0[7],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
unsafe_impl_endian_pod!(U16Bytes, U32Bytes, U64Bytes, I16Bytes, I32Bytes, I64Bytes);
|
||||
99
vendor/object/src/lib.rs
vendored
Normal file
99
vendor/object/src/lib.rs
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
//! # `object`
|
||||
//!
|
||||
//! The `object` crate provides a unified interface to working with object files
|
||||
//! across platforms. It supports reading relocatable object files and executable files,
|
||||
//! and writing relocatable object files and some executable files.
|
||||
//!
|
||||
//! ## Raw struct definitions
|
||||
//!
|
||||
//! Raw structs are defined for: [ELF](elf), [Mach-O](macho), [PE/COFF](pe),
|
||||
//! [XCOFF](xcoff), [archive].
|
||||
//! Types and traits for zerocopy support are defined in the [`pod`] and [`endian`] modules.
|
||||
//!
|
||||
//! ## Unified read API
|
||||
//!
|
||||
//! The [`read`] module provides a unified read API using the [`read::Object`] trait.
|
||||
//! There is an implementation of this trait for [`read::File`], which allows reading any
|
||||
//! file format, as well as implementations for each file format.
|
||||
//!
|
||||
//! ## Low level read API
|
||||
//!
|
||||
//! The [`read#modules`] submodules define helpers that operate on the raw structs.
|
||||
//! These can be used instead of the unified API, or in conjunction with it to access
|
||||
//! details that are not available via the unified API.
|
||||
//!
|
||||
//! ## Unified write API
|
||||
//!
|
||||
//! The [`mod@write`] module provides a unified write API for relocatable object files
|
||||
//! using [`write::Object`]. This does not support writing executable files.
|
||||
//!
|
||||
//! ## Low level write API
|
||||
//!
|
||||
//! The [`mod@write#modules`] submodules define helpers for writing the raw structs.
|
||||
//!
|
||||
//! ## Shared definitions
|
||||
//!
|
||||
//! The crate provides a number of definitions that are used by both the read and write
|
||||
//! APIs. These are defined at the top level module, but none of these are the main entry
|
||||
//! points of the crate.
|
||||
|
||||
#![deny(missing_docs)]
|
||||
#![deny(missing_debug_implementations)]
|
||||
#![no_std]
|
||||
#![warn(rust_2018_idioms)]
|
||||
// Style.
|
||||
#![allow(clippy::collapsible_if)]
|
||||
#![allow(clippy::comparison_chain)]
|
||||
#![allow(clippy::manual_flatten)]
|
||||
#![allow(clippy::match_like_matches_macro)]
|
||||
#![allow(clippy::single_match)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
// Occurs due to fallible iteration.
|
||||
#![allow(clippy::should_implement_trait)]
|
||||
// Unit errors are converted to other types by callers.
|
||||
#![allow(clippy::result_unit_err)]
|
||||
// Worse readability sometimes.
|
||||
#![allow(clippy::collapsible_else_if)]
|
||||
|
||||
#[cfg(feature = "cargo-all")]
|
||||
compile_error!("'--all-features' is not supported; use '--features all' instead");
|
||||
|
||||
#[cfg(any(feature = "read_core", feature = "write_core"))]
|
||||
#[allow(unused_imports)]
|
||||
#[macro_use]
|
||||
extern crate alloc;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[allow(unused_imports)]
|
||||
#[macro_use]
|
||||
extern crate std;
|
||||
|
||||
mod common;
|
||||
pub use common::*;
|
||||
|
||||
#[macro_use]
|
||||
pub mod endian;
|
||||
pub use endian::*;
|
||||
|
||||
#[macro_use]
|
||||
pub mod pod;
|
||||
pub use pod::*;
|
||||
|
||||
#[cfg(feature = "read_core")]
|
||||
pub mod read;
|
||||
#[cfg(feature = "read_core")]
|
||||
pub use read::*;
|
||||
|
||||
#[cfg(feature = "write_core")]
|
||||
pub mod write;
|
||||
|
||||
#[cfg(feature = "archive")]
|
||||
pub mod archive;
|
||||
#[cfg(feature = "elf")]
|
||||
pub mod elf;
|
||||
#[cfg(feature = "macho")]
|
||||
pub mod macho;
|
||||
#[cfg(any(feature = "coff", feature = "pe"))]
|
||||
pub mod pe;
|
||||
#[cfg(feature = "xcoff")]
|
||||
pub mod xcoff;
|
||||
3307
vendor/object/src/macho.rs
vendored
Normal file
3307
vendor/object/src/macho.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3056
vendor/object/src/pe.rs
vendored
Normal file
3056
vendor/object/src/pe.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
239
vendor/object/src/pod.rs
vendored
Normal file
239
vendor/object/src/pod.rs
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
//! Tools for converting file format structures to and from bytes.
|
||||
//!
|
||||
//! This module should be replaced once rust provides safe transmutes.
|
||||
|
||||
// This module provides functions for both read and write features.
|
||||
#![cfg_attr(
|
||||
not(all(feature = "read_core", feature = "write_core")),
|
||||
allow(dead_code)
|
||||
)]
|
||||
|
||||
use core::{mem, result, slice};
|
||||
|
||||
type Result<T> = result::Result<T, ()>;
|
||||
|
||||
/// A trait for types that can safely be converted from and to byte slices.
|
||||
///
|
||||
/// # Safety
|
||||
/// A type that is `Pod` must:
|
||||
/// - be `#[repr(C)]` or `#[repr(transparent)]`
|
||||
/// - have no invalid byte values
|
||||
/// - have no padding
|
||||
pub unsafe trait Pod: Copy + 'static {}
|
||||
|
||||
/// Cast a byte slice to a `Pod` type.
|
||||
///
|
||||
/// Returns the type and the tail of the slice.
|
||||
#[inline]
|
||||
pub fn from_bytes<T: Pod>(data: &[u8]) -> Result<(&T, &[u8])> {
|
||||
let size = mem::size_of::<T>();
|
||||
let tail = data.get(size..).ok_or(())?;
|
||||
let ptr = data.as_ptr();
|
||||
if (ptr as usize) % mem::align_of::<T>() != 0 {
|
||||
return Err(());
|
||||
}
|
||||
// Safety:
|
||||
// The alignment and size are checked by this function.
|
||||
// The Pod trait ensures the type is valid to cast from bytes.
|
||||
let val = unsafe { &*ptr.cast() };
|
||||
Ok((val, tail))
|
||||
}
|
||||
|
||||
/// Cast a mutable byte slice to a `Pod` type.
|
||||
///
|
||||
/// Returns the type and the tail of the slice.
|
||||
#[inline]
|
||||
pub fn from_bytes_mut<T: Pod>(data: &mut [u8]) -> Result<(&mut T, &mut [u8])> {
|
||||
let size = mem::size_of::<T>();
|
||||
if size > data.len() {
|
||||
return Err(());
|
||||
}
|
||||
let (data, tail) = data.split_at_mut(size);
|
||||
let ptr = data.as_mut_ptr();
|
||||
if (ptr as usize) % mem::align_of::<T>() != 0 {
|
||||
return Err(());
|
||||
}
|
||||
// Safety:
|
||||
// The alignment and size are checked by this function.
|
||||
// The Pod trait ensures the type is valid to cast from bytes.
|
||||
let val = unsafe { &mut *ptr.cast() };
|
||||
Ok((val, tail))
|
||||
}
|
||||
|
||||
/// Cast a byte slice to a slice of a `Pod` type.
|
||||
///
|
||||
/// Returns the type slice and the tail of the byte slice.
|
||||
#[inline]
|
||||
pub fn slice_from_bytes<T: Pod>(data: &[u8], count: usize) -> Result<(&[T], &[u8])> {
|
||||
let size = count.checked_mul(mem::size_of::<T>()).ok_or(())?;
|
||||
let tail = data.get(size..).ok_or(())?;
|
||||
let ptr = data.as_ptr();
|
||||
if (ptr as usize) % mem::align_of::<T>() != 0 {
|
||||
return Err(());
|
||||
}
|
||||
// Safety:
|
||||
// The alignment and size are checked by this function.
|
||||
// The Pod trait ensures the type is valid to cast from bytes.
|
||||
let slice = unsafe { slice::from_raw_parts(ptr.cast(), count) };
|
||||
Ok((slice, tail))
|
||||
}
|
||||
|
||||
/// Cast a mutable byte slice to a slice of a `Pod` type.
|
||||
///
|
||||
/// Returns the type slice and the tail of the byte slice.
|
||||
#[inline]
|
||||
pub fn slice_from_bytes_mut<T: Pod>(
|
||||
data: &mut [u8],
|
||||
count: usize,
|
||||
) -> Result<(&mut [T], &mut [u8])> {
|
||||
let size = count.checked_mul(mem::size_of::<T>()).ok_or(())?;
|
||||
if size > data.len() {
|
||||
return Err(());
|
||||
}
|
||||
let (data, tail) = data.split_at_mut(size);
|
||||
let ptr = data.as_mut_ptr();
|
||||
if (ptr as usize) % mem::align_of::<T>() != 0 {
|
||||
return Err(());
|
||||
}
|
||||
// Safety:
|
||||
// The alignment and size are checked by this function.
|
||||
// The Pod trait ensures the type is valid to cast from bytes.
|
||||
let slice = unsafe { slice::from_raw_parts_mut(ptr.cast(), count) };
|
||||
Ok((slice, tail))
|
||||
}
|
||||
|
||||
/// Cast a `Pod` type to a byte slice.
|
||||
#[inline]
|
||||
pub fn bytes_of<T: Pod>(val: &T) -> &[u8] {
|
||||
let size = mem::size_of::<T>();
|
||||
// Safety:
|
||||
// Any alignment is allowed.
|
||||
// The size is determined in this function.
|
||||
// The Pod trait ensures the type is valid to cast to bytes.
|
||||
unsafe { slice::from_raw_parts(slice::from_ref(val).as_ptr().cast(), size) }
|
||||
}
|
||||
|
||||
/// Cast a `Pod` type to a mutable byte slice.
|
||||
#[inline]
|
||||
pub fn bytes_of_mut<T: Pod>(val: &mut T) -> &mut [u8] {
|
||||
let size = mem::size_of::<T>();
|
||||
// Safety:
|
||||
// Any alignment is allowed.
|
||||
// The size is determined in this function.
|
||||
// The Pod trait ensures the type is valid to cast to bytes.
|
||||
unsafe { slice::from_raw_parts_mut(slice::from_mut(val).as_mut_ptr().cast(), size) }
|
||||
}
|
||||
|
||||
/// Cast a slice of a `Pod` type to a byte slice.
|
||||
#[inline]
|
||||
pub fn bytes_of_slice<T: Pod>(val: &[T]) -> &[u8] {
|
||||
let size = val.len().wrapping_mul(mem::size_of::<T>());
|
||||
// Safety:
|
||||
// Any alignment is allowed.
|
||||
// The size is determined in this function.
|
||||
// The Pod trait ensures the type is valid to cast to bytes.
|
||||
unsafe { slice::from_raw_parts(val.as_ptr().cast(), size) }
|
||||
}
|
||||
|
||||
/// Cast a slice of a `Pod` type to a mutable byte slice.
|
||||
#[inline]
|
||||
pub fn bytes_of_slice_mut<T: Pod>(val: &mut [T]) -> &mut [u8] {
|
||||
let size = val.len().wrapping_mul(mem::size_of::<T>());
|
||||
// Safety:
|
||||
// Any alignment is allowed.
|
||||
// The size is determined in this function.
|
||||
// The Pod trait ensures the type is valid to cast to bytes.
|
||||
unsafe { slice::from_raw_parts_mut(val.as_mut_ptr().cast(), size) }
|
||||
}
|
||||
|
||||
macro_rules! unsafe_impl_pod {
|
||||
($($struct_name:ident),+ $(,)?) => {
|
||||
$(
|
||||
unsafe impl Pod for $struct_name { }
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
unsafe_impl_pod!(u8, u16, u32, u64);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn single() {
|
||||
let x = u32::to_be(0x0123_4567);
|
||||
let mut x_mut = x;
|
||||
let bytes = bytes_of(&x);
|
||||
let bytes_mut = bytes_of_mut(&mut x_mut);
|
||||
assert_eq!(bytes, [0x01, 0x23, 0x45, 0x67]);
|
||||
assert_eq!(bytes, bytes_mut);
|
||||
|
||||
let x16 = [u16::to_be(0x0123), u16::to_be(0x4567)];
|
||||
|
||||
let (y, tail) = from_bytes::<u32>(bytes).unwrap();
|
||||
let (y_mut, tail_mut) = from_bytes_mut::<u32>(bytes_mut).unwrap();
|
||||
assert_eq!(*y, x);
|
||||
assert_eq!(y, y_mut);
|
||||
assert_eq!(tail, &[]);
|
||||
assert_eq!(tail, tail_mut);
|
||||
|
||||
let (y, tail) = from_bytes::<u16>(bytes).unwrap();
|
||||
let (y_mut, tail_mut) = from_bytes_mut::<u16>(bytes_mut).unwrap();
|
||||
assert_eq!(*y, x16[0]);
|
||||
assert_eq!(y, y_mut);
|
||||
assert_eq!(tail, &bytes[2..]);
|
||||
assert_eq!(tail, tail_mut);
|
||||
|
||||
let (y, tail) = from_bytes::<u16>(&bytes[2..]).unwrap();
|
||||
let (y_mut, tail_mut) = from_bytes_mut::<u16>(&mut bytes_mut[2..]).unwrap();
|
||||
assert_eq!(*y, x16[1]);
|
||||
assert_eq!(y, y_mut);
|
||||
assert_eq!(tail, &[]);
|
||||
assert_eq!(tail, tail_mut);
|
||||
|
||||
assert_eq!(from_bytes::<u16>(&bytes[1..]), Err(()));
|
||||
assert_eq!(from_bytes::<u16>(&bytes[3..]), Err(()));
|
||||
assert_eq!(from_bytes::<u16>(&bytes[4..]), Err(()));
|
||||
assert_eq!(from_bytes_mut::<u16>(&mut bytes_mut[1..]), Err(()));
|
||||
assert_eq!(from_bytes_mut::<u16>(&mut bytes_mut[3..]), Err(()));
|
||||
assert_eq!(from_bytes_mut::<u16>(&mut bytes_mut[4..]), Err(()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn slice() {
|
||||
let x = [
|
||||
u16::to_be(0x0123),
|
||||
u16::to_be(0x4567),
|
||||
u16::to_be(0x89ab),
|
||||
u16::to_be(0xcdef),
|
||||
];
|
||||
let mut x_mut = x;
|
||||
|
||||
let bytes = bytes_of_slice(&x);
|
||||
let bytes_mut = bytes_of_slice_mut(&mut x_mut);
|
||||
assert_eq!(bytes, [0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]);
|
||||
assert_eq!(bytes, bytes_mut);
|
||||
|
||||
let (y, tail) = slice_from_bytes::<u16>(bytes, 4).unwrap();
|
||||
let (y_mut, tail_mut) = slice_from_bytes_mut::<u16>(bytes_mut, 4).unwrap();
|
||||
assert_eq!(y, x);
|
||||
assert_eq!(y, y_mut);
|
||||
assert_eq!(tail, &[]);
|
||||
assert_eq!(tail, tail_mut);
|
||||
|
||||
let (y, tail) = slice_from_bytes::<u16>(&bytes[2..], 2).unwrap();
|
||||
let (y_mut, tail_mut) = slice_from_bytes::<u16>(&mut bytes_mut[2..], 2).unwrap();
|
||||
assert_eq!(y, &x[1..3]);
|
||||
assert_eq!(y, y_mut);
|
||||
assert_eq!(tail, &bytes[6..]);
|
||||
assert_eq!(tail, tail_mut);
|
||||
|
||||
assert_eq!(slice_from_bytes::<u16>(bytes, 5), Err(()));
|
||||
assert_eq!(slice_from_bytes::<u16>(&bytes[2..], 4), Err(()));
|
||||
assert_eq!(slice_from_bytes::<u16>(&bytes[1..], 2), Err(()));
|
||||
assert_eq!(slice_from_bytes_mut::<u16>(bytes_mut, 5), Err(()));
|
||||
assert_eq!(slice_from_bytes_mut::<u16>(&mut bytes_mut[2..], 4), Err(()));
|
||||
assert_eq!(slice_from_bytes_mut::<u16>(&mut bytes_mut[1..], 2), Err(()));
|
||||
}
|
||||
}
|
||||
1328
vendor/object/src/read/any.rs
vendored
Normal file
1328
vendor/object/src/read/any.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
759
vendor/object/src/read/archive.rs
vendored
Normal file
759
vendor/object/src/read/archive.rs
vendored
Normal file
@@ -0,0 +1,759 @@
|
||||
//! Support for archive files.
|
||||
//!
|
||||
//! ## Example
|
||||
//! ```no_run
|
||||
//! use object::{Object, ObjectSection};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads an archive and displays the name of each member.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let file = object::read::archive::ArchiveFile::parse(&*data)?;
|
||||
//! for member in file.members() {
|
||||
//! let member = member?;
|
||||
//! println!("{}", String::from_utf8_lossy(member.name()));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
use core::convert::TryInto;
|
||||
|
||||
use crate::archive;
|
||||
use crate::read::{self, Bytes, Error, ReadError, ReadRef};
|
||||
|
||||
/// The kind of archive format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum ArchiveKind {
|
||||
/// There are no special files that indicate the archive format.
|
||||
Unknown,
|
||||
/// The GNU (or System V) archive format.
|
||||
Gnu,
|
||||
/// The GNU (or System V) archive format with 64-bit symbol table.
|
||||
Gnu64,
|
||||
/// The BSD archive format.
|
||||
Bsd,
|
||||
/// The BSD archive format with 64-bit symbol table.
|
||||
///
|
||||
/// This is used for Darwin.
|
||||
Bsd64,
|
||||
/// The Windows COFF archive format.
|
||||
Coff,
|
||||
/// The AIX big archive format.
|
||||
AixBig,
|
||||
}
|
||||
|
||||
/// The list of members in the archive.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum Members<'data> {
|
||||
Common {
|
||||
offset: u64,
|
||||
end_offset: u64,
|
||||
},
|
||||
AixBig {
|
||||
index: &'data [archive::AixMemberOffset],
|
||||
},
|
||||
}
|
||||
|
||||
/// A partially parsed archive file.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ArchiveFile<'data, R: ReadRef<'data> = &'data [u8]> {
|
||||
data: R,
|
||||
kind: ArchiveKind,
|
||||
members: Members<'data>,
|
||||
symbols: (u64, u64),
|
||||
names: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> ArchiveFile<'data, R> {
|
||||
/// Parse the archive header and special members.
|
||||
pub fn parse(data: R) -> read::Result<Self> {
|
||||
let len = data.len().read_error("Unknown archive length")?;
|
||||
let mut tail = 0;
|
||||
let magic = data
|
||||
.read_bytes(&mut tail, archive::MAGIC.len() as u64)
|
||||
.read_error("Invalid archive size")?;
|
||||
|
||||
if magic == archive::AIX_BIG_MAGIC {
|
||||
return Self::parse_aixbig(data);
|
||||
} else if magic != archive::MAGIC {
|
||||
return Err(Error("Unsupported archive identifier"));
|
||||
}
|
||||
|
||||
let mut members_offset = tail;
|
||||
let members_end_offset = len;
|
||||
|
||||
let mut file = ArchiveFile {
|
||||
data,
|
||||
kind: ArchiveKind::Unknown,
|
||||
members: Members::Common {
|
||||
offset: 0,
|
||||
end_offset: 0,
|
||||
},
|
||||
symbols: (0, 0),
|
||||
names: &[],
|
||||
};
|
||||
|
||||
// The first few members may be special, so parse them.
|
||||
// GNU has:
|
||||
// - "/" or "/SYM64/": symbol table (optional)
|
||||
// - "//": names table (optional)
|
||||
// COFF has:
|
||||
// - "/": first linker member
|
||||
// - "/": second linker member
|
||||
// - "//": names table
|
||||
// BSD has:
|
||||
// - "__.SYMDEF" or "__.SYMDEF SORTED": symbol table (optional)
|
||||
// BSD 64-bit has:
|
||||
// - "__.SYMDEF_64" or "__.SYMDEF_64 SORTED": symbol table (optional)
|
||||
// BSD may use the extended name for the symbol table. This is handled
|
||||
// by `ArchiveMember::parse`.
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"/" {
|
||||
// GNU symbol table (unless we later determine this is COFF).
|
||||
file.kind = ArchiveKind::Gnu;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"/" {
|
||||
// COFF linker member.
|
||||
file.kind = ArchiveKind::Coff;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"//" {
|
||||
// COFF names table.
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
}
|
||||
}
|
||||
} else if member.name == b"//" {
|
||||
// GNU names table.
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
}
|
||||
}
|
||||
} else if member.name == b"/SYM64/" {
|
||||
// GNU 64-bit symbol table.
|
||||
file.kind = ArchiveKind::Gnu64;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
|
||||
if tail < len {
|
||||
let member = ArchiveMember::parse(data, &mut tail, &[])?;
|
||||
if member.name == b"//" {
|
||||
// GNU names table.
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
}
|
||||
}
|
||||
} else if member.name == b"//" {
|
||||
// GNU names table.
|
||||
file.kind = ArchiveKind::Gnu;
|
||||
file.names = member.data(data)?;
|
||||
members_offset = tail;
|
||||
} else if member.name == b"__.SYMDEF" || member.name == b"__.SYMDEF SORTED" {
|
||||
// BSD symbol table.
|
||||
file.kind = ArchiveKind::Bsd;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
} else if member.name == b"__.SYMDEF_64" || member.name == b"__.SYMDEF_64 SORTED" {
|
||||
// BSD 64-bit symbol table.
|
||||
file.kind = ArchiveKind::Bsd64;
|
||||
file.symbols = member.file_range();
|
||||
members_offset = tail;
|
||||
} else {
|
||||
// TODO: This could still be a BSD file. We leave this as unknown for now.
|
||||
}
|
||||
}
|
||||
file.members = Members::Common {
|
||||
offset: members_offset,
|
||||
end_offset: members_end_offset,
|
||||
};
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
fn parse_aixbig(data: R) -> read::Result<Self> {
|
||||
let mut tail = 0;
|
||||
|
||||
let file_header = data
|
||||
.read::<archive::AixFileHeader>(&mut tail)
|
||||
.read_error("Invalid AIX big archive file header")?;
|
||||
// Caller already validated this.
|
||||
debug_assert_eq!(file_header.magic, archive::AIX_BIG_MAGIC);
|
||||
|
||||
let mut file = ArchiveFile {
|
||||
data,
|
||||
kind: ArchiveKind::AixBig,
|
||||
members: Members::AixBig { index: &[] },
|
||||
symbols: (0, 0),
|
||||
names: &[],
|
||||
};
|
||||
|
||||
// Read the span of symbol table.
|
||||
let symtbl64 = parse_u64_digits(&file_header.gst64off, 10)
|
||||
.read_error("Invalid offset to 64-bit symbol table in AIX big archive")?;
|
||||
if symtbl64 > 0 {
|
||||
// The symbol table is also a file with header.
|
||||
let member = ArchiveMember::parse_aixbig(data, symtbl64)?;
|
||||
file.symbols = member.file_range();
|
||||
} else {
|
||||
let symtbl = parse_u64_digits(&file_header.gstoff, 10)
|
||||
.read_error("Invalid offset to symbol table in AIX big archive")?;
|
||||
if symtbl > 0 {
|
||||
// The symbol table is also a file with header.
|
||||
let member = ArchiveMember::parse_aixbig(data, symtbl)?;
|
||||
file.symbols = member.file_range();
|
||||
}
|
||||
}
|
||||
|
||||
// Big archive member index table lists file entries with offsets and names.
|
||||
// To avoid potential infinite loop (members are double-linked list), the
|
||||
// iterator goes through the index instead of real members.
|
||||
let member_table_offset = parse_u64_digits(&file_header.memoff, 10)
|
||||
.read_error("Invalid offset for member table of AIX big archive")?;
|
||||
if member_table_offset == 0 {
|
||||
// The offset would be zero if archive contains no file.
|
||||
return Ok(file);
|
||||
}
|
||||
|
||||
// The member index table is also a file with header.
|
||||
let member = ArchiveMember::parse_aixbig(data, member_table_offset)?;
|
||||
let mut member_data = Bytes(member.data(data)?);
|
||||
|
||||
// Structure of member index table:
|
||||
// Number of entries (20 bytes)
|
||||
// Offsets of each entry (20*N bytes)
|
||||
// Names string table (the rest of bytes to fill size defined in header)
|
||||
let members_count_bytes = member_data
|
||||
.read_slice::<u8>(20)
|
||||
.read_error("Missing member count in AIX big archive")?;
|
||||
let members_count = parse_u64_digits(members_count_bytes, 10)
|
||||
.and_then(|size| size.try_into().ok())
|
||||
.read_error("Invalid member count in AIX big archive")?;
|
||||
let index = member_data
|
||||
.read_slice::<archive::AixMemberOffset>(members_count)
|
||||
.read_error("Member count overflow in AIX big archive")?;
|
||||
file.members = Members::AixBig { index };
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
/// Return the archive format.
|
||||
#[inline]
|
||||
pub fn kind(&self) -> ArchiveKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
/// Iterate over the members of the archive.
|
||||
///
|
||||
/// This does not return special members.
|
||||
#[inline]
|
||||
pub fn members(&self) -> ArchiveMemberIterator<'data, R> {
|
||||
ArchiveMemberIterator {
|
||||
data: self.data,
|
||||
members: self.members,
|
||||
names: self.names,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the members of an archive.
|
||||
#[derive(Debug)]
|
||||
pub struct ArchiveMemberIterator<'data, R: ReadRef<'data> = &'data [u8]> {
|
||||
data: R,
|
||||
members: Members<'data>,
|
||||
names: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> Iterator for ArchiveMemberIterator<'data, R> {
|
||||
type Item = read::Result<ArchiveMember<'data>>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &mut self.members {
|
||||
Members::Common {
|
||||
ref mut offset,
|
||||
ref mut end_offset,
|
||||
} => {
|
||||
if *offset >= *end_offset {
|
||||
return None;
|
||||
}
|
||||
let member = ArchiveMember::parse(self.data, offset, self.names);
|
||||
if member.is_err() {
|
||||
*offset = *end_offset;
|
||||
}
|
||||
Some(member)
|
||||
}
|
||||
Members::AixBig { ref mut index } => match **index {
|
||||
[] => None,
|
||||
[ref first, ref rest @ ..] => {
|
||||
*index = rest;
|
||||
let member = ArchiveMember::parse_aixbig_index(self.data, first);
|
||||
if member.is_err() {
|
||||
*index = &[];
|
||||
}
|
||||
Some(member)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An archive member header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum MemberHeader<'data> {
|
||||
/// Common header used by many formats.
|
||||
Common(&'data archive::Header),
|
||||
/// AIX big archive header
|
||||
AixBig(&'data archive::AixHeader),
|
||||
}
|
||||
|
||||
/// A partially parsed archive member.
|
||||
#[derive(Debug)]
|
||||
pub struct ArchiveMember<'data> {
|
||||
header: MemberHeader<'data>,
|
||||
name: &'data [u8],
|
||||
offset: u64,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
impl<'data> ArchiveMember<'data> {
|
||||
/// Parse the member header, name, and file data in an archive with the common format.
|
||||
///
|
||||
/// This reads the extended name (if any) and adjusts the file size.
|
||||
fn parse<R: ReadRef<'data>>(
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
names: &'data [u8],
|
||||
) -> read::Result<Self> {
|
||||
let header = data
|
||||
.read::<archive::Header>(offset)
|
||||
.read_error("Invalid archive member header")?;
|
||||
if header.terminator != archive::TERMINATOR {
|
||||
return Err(Error("Invalid archive terminator"));
|
||||
}
|
||||
|
||||
let mut file_offset = *offset;
|
||||
let mut file_size =
|
||||
parse_u64_digits(&header.size, 10).read_error("Invalid archive member size")?;
|
||||
*offset = offset
|
||||
.checked_add(file_size)
|
||||
.read_error("Archive member size is too large")?;
|
||||
// Entries are padded to an even number of bytes.
|
||||
if (file_size & 1) != 0 {
|
||||
*offset = offset.saturating_add(1);
|
||||
}
|
||||
|
||||
let name = if header.name[0] == b'/' && (header.name[1] as char).is_ascii_digit() {
|
||||
// Read file name from the names table.
|
||||
parse_sysv_extended_name(&header.name[1..], names)
|
||||
.read_error("Invalid archive extended name offset")?
|
||||
} else if &header.name[..3] == b"#1/" && (header.name[3] as char).is_ascii_digit() {
|
||||
// Read file name from the start of the file data.
|
||||
parse_bsd_extended_name(&header.name[3..], data, &mut file_offset, &mut file_size)
|
||||
.read_error("Invalid archive extended name length")?
|
||||
} else if header.name[0] == b'/' {
|
||||
let name_len = memchr::memchr(b' ', &header.name).unwrap_or(header.name.len());
|
||||
&header.name[..name_len]
|
||||
} else {
|
||||
let name_len = memchr::memchr(b'/', &header.name)
|
||||
.or_else(|| memchr::memchr(b' ', &header.name))
|
||||
.unwrap_or(header.name.len());
|
||||
&header.name[..name_len]
|
||||
};
|
||||
|
||||
Ok(ArchiveMember {
|
||||
header: MemberHeader::Common(header),
|
||||
name,
|
||||
offset: file_offset,
|
||||
size: file_size,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse a member index entry in an AIX big archive,
|
||||
/// and then parse the member header, name, and file data.
|
||||
fn parse_aixbig_index<R: ReadRef<'data>>(
|
||||
data: R,
|
||||
index: &archive::AixMemberOffset,
|
||||
) -> read::Result<Self> {
|
||||
let offset = parse_u64_digits(&index.0, 10)
|
||||
.read_error("Invalid AIX big archive file member offset")?;
|
||||
Self::parse_aixbig(data, offset)
|
||||
}
|
||||
|
||||
/// Parse the member header, name, and file data in an AIX big archive.
|
||||
fn parse_aixbig<R: ReadRef<'data>>(data: R, mut offset: u64) -> read::Result<Self> {
|
||||
// The format was described at
|
||||
// https://www.ibm.com/docs/en/aix/7.3?topic=formats-ar-file-format-big
|
||||
let header = data
|
||||
.read::<archive::AixHeader>(&mut offset)
|
||||
.read_error("Invalid AIX big archive member header")?;
|
||||
let name_length = parse_u64_digits(&header.namlen, 10)
|
||||
.read_error("Invalid AIX big archive member name length")?;
|
||||
let name = data
|
||||
.read_bytes(&mut offset, name_length)
|
||||
.read_error("Invalid AIX big archive member name")?;
|
||||
|
||||
// The actual data for a file member begins at the first even-byte boundary beyond the
|
||||
// member header and continues for the number of bytes specified by the ar_size field. The
|
||||
// ar command inserts null bytes for padding where necessary.
|
||||
if offset & 1 != 0 {
|
||||
offset = offset.saturating_add(1);
|
||||
}
|
||||
// Because of the even-byte boundary, we have to read and check terminator after header.
|
||||
let terminator = data
|
||||
.read_bytes(&mut offset, 2)
|
||||
.read_error("Invalid AIX big archive terminator")?;
|
||||
if terminator != archive::TERMINATOR {
|
||||
return Err(Error("Invalid AIX big archive terminator"));
|
||||
}
|
||||
|
||||
let size = parse_u64_digits(&header.size, 10)
|
||||
.read_error("Invalid archive member size in AIX big archive")?;
|
||||
Ok(ArchiveMember {
|
||||
header: MemberHeader::AixBig(header),
|
||||
name,
|
||||
offset,
|
||||
size,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the raw header that is common to many archive formats.
|
||||
///
|
||||
/// Returns `None` if this archive does not use the common header format.
|
||||
#[inline]
|
||||
pub fn header(&self) -> Option<&'data archive::Header> {
|
||||
match self.header {
|
||||
MemberHeader::Common(header) => Some(header),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the raw header for AIX big archives.
|
||||
///
|
||||
/// Returns `None` if this is not an AIX big archive.
|
||||
#[inline]
|
||||
pub fn aix_header(&self) -> Option<&'data archive::AixHeader> {
|
||||
match self.header {
|
||||
MemberHeader::AixBig(header) => Some(header),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the parsed file name.
|
||||
///
|
||||
/// This may be an extended file name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Parse the file modification timestamp from the header.
|
||||
#[inline]
|
||||
pub fn date(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.date, 10),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.date, 10),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the user ID from the header.
|
||||
#[inline]
|
||||
pub fn uid(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.uid, 10),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.uid, 10),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the group ID from the header.
|
||||
#[inline]
|
||||
pub fn gid(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.gid, 10),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.gid, 10),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the file mode from the header.
|
||||
#[inline]
|
||||
pub fn mode(&self) -> Option<u64> {
|
||||
match &self.header {
|
||||
MemberHeader::Common(header) => parse_u64_digits(&header.mode, 8),
|
||||
MemberHeader::AixBig(header) => parse_u64_digits(&header.mode, 8),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the file data.
|
||||
pub fn file_range(&self) -> (u64, u64) {
|
||||
(self.offset, self.size)
|
||||
}
|
||||
|
||||
/// Return the file data.
|
||||
#[inline]
|
||||
pub fn data<R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [u8]> {
|
||||
data.read_bytes_at(self.offset, self.size)
|
||||
.read_error("Archive member size is too large")
|
||||
}
|
||||
}
|
||||
|
||||
// Ignores bytes starting from the first space.
|
||||
fn parse_u64_digits(digits: &[u8], radix: u32) -> Option<u64> {
|
||||
if let [b' ', ..] = digits {
|
||||
return None;
|
||||
}
|
||||
let mut result: u64 = 0;
|
||||
for &c in digits {
|
||||
if c == b' ' {
|
||||
return Some(result);
|
||||
} else {
|
||||
let x = (c as char).to_digit(radix)?;
|
||||
result = result
|
||||
.checked_mul(u64::from(radix))?
|
||||
.checked_add(u64::from(x))?;
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn parse_sysv_extended_name<'data>(digits: &[u8], names: &'data [u8]) -> Result<&'data [u8], ()> {
|
||||
let offset = parse_u64_digits(digits, 10).ok_or(())?;
|
||||
let offset = offset.try_into().map_err(|_| ())?;
|
||||
let name_data = names.get(offset..).ok_or(())?;
|
||||
let name = match memchr::memchr2(b'/', b'\0', name_data) {
|
||||
Some(len) => &name_data[..len],
|
||||
None => name_data,
|
||||
};
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
/// Modifies `data` to start after the extended name.
|
||||
fn parse_bsd_extended_name<'data, R: ReadRef<'data>>(
|
||||
digits: &[u8],
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
size: &mut u64,
|
||||
) -> Result<&'data [u8], ()> {
|
||||
let len = parse_u64_digits(digits, 10).ok_or(())?;
|
||||
*size = size.checked_sub(len).ok_or(())?;
|
||||
let name_data = data.read_bytes(offset, len)?;
|
||||
let name = match memchr::memchr(b'\0', name_data) {
|
||||
Some(len) => &name_data[..len],
|
||||
None => name_data,
|
||||
};
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn kind() {
|
||||
let data = b"!<arch>\n";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Unknown);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/ 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/ 4 `\n\
|
||||
0000\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/SYM64/ 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/SYM64/ 4 `\n\
|
||||
0000\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
__.SYMDEF 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/9 13 `\n\
|
||||
__.SYMDEF0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/16 20 `\n\
|
||||
__.SYMDEF SORTED0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
__.SYMDEF_64 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/12 16 `\n\
|
||||
__.SYMDEF_640000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
#1/19 23 `\n\
|
||||
__.SYMDEF_64 SORTED0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Bsd64);
|
||||
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
/ 4 `\n\
|
||||
0000\
|
||||
/ 4 `\n\
|
||||
0000\
|
||||
// 4 `\n\
|
||||
0000";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Coff);
|
||||
|
||||
let data = b"\
|
||||
<bigaf>\n\
|
||||
0 0 \
|
||||
0 0 \
|
||||
0 128 \
|
||||
6 0 \
|
||||
0 \0\0\0\0\0\0\0\0\0\0\0\0\
|
||||
\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\
|
||||
\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\
|
||||
\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0";
|
||||
let archive = ArchiveFile::parse(&data[..]).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::AixBig);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gnu_names() {
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
// 18 `\n\
|
||||
0123456789abcdef/\n\
|
||||
s p a c e/ 0 0 0 644 4 `\n\
|
||||
0000\
|
||||
0123456789abcde/0 0 0 644 3 `\n\
|
||||
odd\n\
|
||||
/0 0 0 0 644 4 `\n\
|
||||
even";
|
||||
let data = &data[..];
|
||||
let archive = ArchiveFile::parse(data).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Gnu);
|
||||
let mut members = archive.members();
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"s p a c e");
|
||||
assert_eq!(member.data(data).unwrap(), &b"0000"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcde");
|
||||
assert_eq!(member.data(data).unwrap(), &b"odd"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcdef");
|
||||
assert_eq!(member.data(data).unwrap(), &b"even"[..]);
|
||||
|
||||
assert!(members.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bsd_names() {
|
||||
let data = b"\
|
||||
!<arch>\n\
|
||||
0123456789abcde 0 0 0 644 3 `\n\
|
||||
odd\n\
|
||||
#1/16 0 0 0 644 20 `\n\
|
||||
0123456789abcdefeven";
|
||||
let data = &data[..];
|
||||
let archive = ArchiveFile::parse(data).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::Unknown);
|
||||
let mut members = archive.members();
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcde");
|
||||
assert_eq!(member.data(data).unwrap(), &b"odd"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcdef");
|
||||
assert_eq!(member.data(data).unwrap(), &b"even"[..]);
|
||||
|
||||
assert!(members.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aix_names() {
|
||||
let data = b"\
|
||||
<bigaf>\n\
|
||||
396 0 0 \
|
||||
128 262 0 \
|
||||
4 262 0 \
|
||||
1662610370 223 1 644 16 \
|
||||
0123456789abcdef`\nord\n\
|
||||
4 396 128 \
|
||||
1662610374 223 1 644 16 \
|
||||
fedcba9876543210`\nrev\n\
|
||||
94 0 262 \
|
||||
0 0 0 0 0 \
|
||||
`\n2 128 \
|
||||
262 0123456789abcdef\0fedcba9876543210\0";
|
||||
let data = &data[..];
|
||||
let archive = ArchiveFile::parse(data).unwrap();
|
||||
assert_eq!(archive.kind(), ArchiveKind::AixBig);
|
||||
let mut members = archive.members();
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"0123456789abcdef");
|
||||
assert_eq!(member.data(data).unwrap(), &b"ord\n"[..]);
|
||||
|
||||
let member = members.next().unwrap().unwrap();
|
||||
assert_eq!(member.name(), b"fedcba9876543210");
|
||||
assert_eq!(member.data(data).unwrap(), &b"rev\n"[..]);
|
||||
|
||||
assert!(members.next().is_none());
|
||||
}
|
||||
}
|
||||
211
vendor/object/src/read/coff/comdat.rs
vendored
Normal file
211
vendor/object/src/read/coff/comdat.rs
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
use core::str;
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::read::{
|
||||
self, ComdatKind, ObjectComdat, ReadError, ReadRef, Result, SectionIndex, SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{CoffFile, CoffHeader, ImageSymbol};
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigComdatIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffComdatIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffComdatIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffComdatIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffComdat<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let index = self.index;
|
||||
let symbol = self.file.common.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
if let Some(comdat) = CoffComdat::parse(self.file, symbol, index) {
|
||||
return Some(comdat);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectComdat`] trait implementation.
|
||||
pub type CoffBigComdat<'data, 'file, R = &'data [u8]> =
|
||||
CoffComdat<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A COMDAT section group in a [`CoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectComdat`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffComdat<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
file: &'file CoffFile<'data, R, Coff>,
|
||||
symbol_index: SymbolIndex,
|
||||
symbol: &'data Coff::ImageSymbol,
|
||||
selection: u8,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffComdat<'data, 'file, R, Coff> {
|
||||
fn parse(
|
||||
file: &'file CoffFile<'data, R, Coff>,
|
||||
section_symbol: &'data Coff::ImageSymbol,
|
||||
index: usize,
|
||||
) -> Option<CoffComdat<'data, 'file, R, Coff>> {
|
||||
// Must be a section symbol.
|
||||
if !section_symbol.has_aux_section() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Auxiliary record must have a non-associative selection.
|
||||
let aux = file.common.symbols.aux_section(index).ok()?;
|
||||
let selection = aux.selection;
|
||||
if selection == 0 || selection == pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the COMDAT symbol.
|
||||
let mut symbol_index = index;
|
||||
let mut symbol = section_symbol;
|
||||
let section_number = section_symbol.section_number();
|
||||
loop {
|
||||
symbol_index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
symbol = file.common.symbols.symbol(symbol_index).ok()?;
|
||||
if section_number == symbol.section_number() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Some(CoffComdat {
|
||||
file,
|
||||
symbol_index: SymbolIndex(symbol_index),
|
||||
symbol,
|
||||
selection,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffComdat<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectComdat<'data>
|
||||
for CoffComdat<'data, 'file, R, Coff>
|
||||
{
|
||||
type SectionIterator = CoffComdatSectionIterator<'data, 'file, R, Coff>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
match self.selection {
|
||||
pe::IMAGE_COMDAT_SELECT_NODUPLICATES => ComdatKind::NoDuplicates,
|
||||
pe::IMAGE_COMDAT_SELECT_ANY => ComdatKind::Any,
|
||||
pe::IMAGE_COMDAT_SELECT_SAME_SIZE => ComdatKind::SameSize,
|
||||
pe::IMAGE_COMDAT_SELECT_EXACT_MATCH => ComdatKind::ExactMatch,
|
||||
pe::IMAGE_COMDAT_SELECT_LARGEST => ComdatKind::Largest,
|
||||
pe::IMAGE_COMDAT_SELECT_NEWEST => ComdatKind::Newest,
|
||||
_ => ComdatKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
self.symbol_index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
// Find the name of first symbol referring to the section.
|
||||
self.symbol.name(self.file.common.symbols.strings())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
let bytes = self.name_bytes()?;
|
||||
str::from_utf8(bytes)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF COMDAT name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
CoffComdatSectionIterator {
|
||||
file: self.file,
|
||||
section_number: self.symbol.section_number(),
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigComdatSectionIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffComdatSectionIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffComdatSectionIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
file: &'file CoffFile<'data, R, Coff>,
|
||||
section_number: i32,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffComdatSectionIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// Find associated COMDAT symbols.
|
||||
// TODO: it seems gcc doesn't use associated symbols for this
|
||||
loop {
|
||||
let index = self.index;
|
||||
let symbol = self.file.common.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
|
||||
// Must be a section symbol.
|
||||
if !symbol.has_aux_section() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let section_number = symbol.section_number();
|
||||
|
||||
let aux = self.file.common.symbols.aux_section(index).ok()?;
|
||||
if aux.selection == pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE {
|
||||
let number = if Coff::is_type_bigobj() {
|
||||
u32::from(aux.number.get(LE)) | (u32::from(aux.high_number.get(LE)) << 16)
|
||||
} else {
|
||||
u32::from(aux.number.get(LE))
|
||||
};
|
||||
if number as i32 == self.section_number {
|
||||
return Some(SectionIndex(section_number as usize));
|
||||
}
|
||||
} else if aux.selection != 0 {
|
||||
if section_number == self.section_number {
|
||||
return Some(SectionIndex(section_number as usize));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
381
vendor/object/src/read/coff/file.rs
vendored
Normal file
381
vendor/object/src/read/coff/file.rs
vendored
Normal file
@@ -0,0 +1,381 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, Export, FileFlags, Import, NoDynamicRelocationIterator, Object, ObjectKind,
|
||||
ObjectSection, ReadError, ReadRef, Result, SectionIndex, SubArchitecture, SymbolIndex,
|
||||
};
|
||||
use crate::{pe, LittleEndian as LE, Pod};
|
||||
|
||||
use super::{
|
||||
CoffComdat, CoffComdatIterator, CoffSection, CoffSectionIterator, CoffSegment,
|
||||
CoffSegmentIterator, CoffSymbol, CoffSymbolIterator, CoffSymbolTable, ImageSymbol,
|
||||
SectionTable, SymbolTable,
|
||||
};
|
||||
|
||||
/// The common parts of `PeFile` and `CoffFile`.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CoffCommon<'data, R: ReadRef<'data>, Coff: CoffHeader = pe::ImageFileHeader> {
|
||||
pub(crate) sections: SectionTable<'data>,
|
||||
pub(crate) symbols: SymbolTable<'data, R, Coff>,
|
||||
pub(crate) image_base: u64,
|
||||
}
|
||||
|
||||
/// A COFF bigobj object file with 32-bit section numbers.
|
||||
///
|
||||
/// This is a file that starts with [`pe::AnonObjectHeaderBigobj`], and corresponds
|
||||
/// to [`crate::FileKind::CoffBig`].
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
pub type CoffBigFile<'data, R = &'data [u8]> = CoffFile<'data, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A COFF object file.
|
||||
///
|
||||
/// This is a file that starts with [`pe::ImageFileHeader`], and corresponds
|
||||
/// to [`crate::FileKind::Coff`].
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffFile<'data, R: ReadRef<'data> = &'data [u8], Coff: CoffHeader = pe::ImageFileHeader>
|
||||
{
|
||||
pub(super) header: &'data Coff,
|
||||
pub(super) common: CoffCommon<'data, R, Coff>,
|
||||
pub(super) data: R,
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> CoffFile<'data, R, Coff> {
|
||||
/// Parse the raw COFF file data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = Coff::parse(data, &mut offset)?;
|
||||
let sections = header.sections(data, offset)?;
|
||||
let symbols = header.symbols(data)?;
|
||||
|
||||
Ok(CoffFile {
|
||||
header,
|
||||
common: CoffCommon {
|
||||
sections,
|
||||
symbols,
|
||||
image_base: 0,
|
||||
},
|
||||
data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffFile<'data, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R, Coff> Object<'data, 'file> for CoffFile<'data, R, Coff>
|
||||
where
|
||||
'data: 'file,
|
||||
R: 'file + ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
type Segment = CoffSegment<'data, 'file, R, Coff>;
|
||||
type SegmentIterator = CoffSegmentIterator<'data, 'file, R, Coff>;
|
||||
type Section = CoffSection<'data, 'file, R, Coff>;
|
||||
type SectionIterator = CoffSectionIterator<'data, 'file, R, Coff>;
|
||||
type Comdat = CoffComdat<'data, 'file, R, Coff>;
|
||||
type ComdatIterator = CoffComdatIterator<'data, 'file, R, Coff>;
|
||||
type Symbol = CoffSymbol<'data, 'file, R, Coff>;
|
||||
type SymbolIterator = CoffSymbolIterator<'data, 'file, R, Coff>;
|
||||
type SymbolTable = CoffSymbolTable<'data, 'file, R, Coff>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match self.header.machine() {
|
||||
pe::IMAGE_FILE_MACHINE_ARMNT => Architecture::Arm,
|
||||
pe::IMAGE_FILE_MACHINE_ARM64 | pe::IMAGE_FILE_MACHINE_ARM64EC => Architecture::Aarch64,
|
||||
pe::IMAGE_FILE_MACHINE_I386 => Architecture::I386,
|
||||
pe::IMAGE_FILE_MACHINE_AMD64 => Architecture::X86_64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
match self.header.machine() {
|
||||
pe::IMAGE_FILE_MACHINE_ARM64EC => Some(SubArchitecture::Arm64EC),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
// Windows COFF is always 32-bit, even for 64-bit architectures. This could be confusing.
|
||||
false
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
ObjectKind::Relocatable
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> CoffSegmentIterator<'data, 'file, R, Coff> {
|
||||
CoffSegmentIterator {
|
||||
file: self,
|
||||
iter: self.common.sections.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<CoffSection<'data, 'file, R, Coff>> {
|
||||
self.sections()
|
||||
.find(|section| section.name_bytes() == Ok(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> Result<CoffSection<'data, 'file, R, Coff>> {
|
||||
let section = self.common.sections.section(index.0)?;
|
||||
Ok(CoffSection {
|
||||
file: self,
|
||||
index,
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> CoffSectionIterator<'data, 'file, R, Coff> {
|
||||
CoffSectionIterator {
|
||||
file: self,
|
||||
iter: self.common.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> CoffComdatIterator<'data, 'file, R, Coff> {
|
||||
CoffComdatIterator {
|
||||
file: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> Result<CoffSymbol<'data, 'file, R, Coff>> {
|
||||
let symbol = self.common.symbols.symbol(index.0)?;
|
||||
Ok(CoffSymbol {
|
||||
file: &self.common,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> CoffSymbolIterator<'data, 'file, R, Coff> {
|
||||
CoffSymbolIterator {
|
||||
file: &self.common,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_table(&'file self) -> Option<CoffSymbolTable<'data, 'file, R, Coff>> {
|
||||
Some(CoffSymbolTable { file: &self.common })
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> CoffSymbolIterator<'data, 'file, R, Coff> {
|
||||
CoffSymbolIterator {
|
||||
file: &self.common,
|
||||
// Hack: don't return any.
|
||||
index: self.common.symbols.len(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_symbol_table(&'file self) -> Option<CoffSymbolTable<'data, 'file, R, Coff>> {
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_relocations(&'file self) -> Option<NoDynamicRelocationIterator> {
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>> {
|
||||
// TODO: this could return undefined symbols, but not needed yet.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
// TODO: this could return global symbols, but not needed yet.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.section_by_name(".debug_info").is_some()
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn entry(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::Coff {
|
||||
characteristics: self.header.characteristics(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the `class_id` field from a [`pe::AnonObjectHeader`].
|
||||
///
|
||||
/// This can be used to determine the format of the header.
|
||||
pub fn anon_object_class_id<'data, R: ReadRef<'data>>(data: R) -> Result<pe::ClsId> {
|
||||
let header = data
|
||||
.read_at::<pe::AnonObjectHeader>(0)
|
||||
.read_error("Invalid anon object header size or alignment")?;
|
||||
Ok(header.class_id)
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`pe::ImageFileHeader`] and [`pe::AnonObjectHeaderBigobj`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait CoffHeader: Debug + Pod {
|
||||
type ImageSymbol: ImageSymbol;
|
||||
type ImageSymbolBytes: Debug + Pod;
|
||||
|
||||
/// Return true if this type is [`pe::AnonObjectHeaderBigobj`].
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
fn is_type_bigobj() -> bool;
|
||||
|
||||
fn machine(&self) -> u16;
|
||||
fn number_of_sections(&self) -> u32;
|
||||
fn pointer_to_symbol_table(&self) -> u32;
|
||||
fn number_of_symbols(&self) -> u32;
|
||||
fn characteristics(&self) -> u16;
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be the file header offset. It is updated to point after the optional header,
|
||||
/// which is where the section headers are located.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> read::Result<&'data Self>;
|
||||
|
||||
/// Read the section table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be after the optional file header.
|
||||
#[inline]
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: u64,
|
||||
) -> read::Result<SectionTable<'data>> {
|
||||
SectionTable::parse(self, data, offset)
|
||||
}
|
||||
|
||||
/// Read the symbol table and string table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
#[inline]
|
||||
fn symbols<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
) -> read::Result<SymbolTable<'data, R, Self>> {
|
||||
SymbolTable::parse(self, data)
|
||||
}
|
||||
}
|
||||
|
||||
impl CoffHeader for pe::ImageFileHeader {
|
||||
type ImageSymbol = pe::ImageSymbol;
|
||||
type ImageSymbolBytes = pe::ImageSymbolBytes;
|
||||
|
||||
fn is_type_bigobj() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn machine(&self) -> u16 {
|
||||
self.machine.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_sections(&self) -> u32 {
|
||||
self.number_of_sections.get(LE).into()
|
||||
}
|
||||
|
||||
fn pointer_to_symbol_table(&self) -> u32 {
|
||||
self.pointer_to_symbol_table.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_symbols(&self) -> u32 {
|
||||
self.number_of_symbols.get(LE)
|
||||
}
|
||||
|
||||
fn characteristics(&self) -> u16 {
|
||||
self.characteristics.get(LE)
|
||||
}
|
||||
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<pe::ImageFileHeader>(offset)
|
||||
.read_error("Invalid COFF file header size or alignment")?;
|
||||
|
||||
// Skip over the optional header.
|
||||
*offset = offset
|
||||
.checked_add(header.size_of_optional_header.get(LE).into())
|
||||
.read_error("Invalid COFF optional header size")?;
|
||||
|
||||
// TODO: maybe validate that the machine is known?
|
||||
Ok(header)
|
||||
}
|
||||
}
|
||||
|
||||
impl CoffHeader for pe::AnonObjectHeaderBigobj {
|
||||
type ImageSymbol = pe::ImageSymbolEx;
|
||||
type ImageSymbolBytes = pe::ImageSymbolExBytes;
|
||||
|
||||
fn is_type_bigobj() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn machine(&self) -> u16 {
|
||||
self.machine.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_sections(&self) -> u32 {
|
||||
self.number_of_sections.get(LE)
|
||||
}
|
||||
|
||||
fn pointer_to_symbol_table(&self) -> u32 {
|
||||
self.pointer_to_symbol_table.get(LE)
|
||||
}
|
||||
|
||||
fn number_of_symbols(&self) -> u32 {
|
||||
self.number_of_symbols.get(LE)
|
||||
}
|
||||
|
||||
fn characteristics(&self) -> u16 {
|
||||
0
|
||||
}
|
||||
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<pe::AnonObjectHeaderBigobj>(offset)
|
||||
.read_error("Invalid COFF bigobj file header size or alignment")?;
|
||||
|
||||
if header.sig1.get(LE) != pe::IMAGE_FILE_MACHINE_UNKNOWN
|
||||
|| header.sig2.get(LE) != 0xffff
|
||||
|| header.version.get(LE) < 2
|
||||
|| header.class_id != pe::ANON_OBJECT_HEADER_BIGOBJ_CLASS_ID
|
||||
{
|
||||
return Err(read::Error("Invalid COFF bigobj header values"));
|
||||
}
|
||||
|
||||
// TODO: maybe validate that the machine is known?
|
||||
Ok(header)
|
||||
}
|
||||
}
|
||||
220
vendor/object/src/read/coff/import.rs
vendored
Normal file
220
vendor/object/src/read/coff/import.rs
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
//! Support for reading short import files.
|
||||
//!
|
||||
//! These are used by some Windows linkers as a more compact way to describe
|
||||
//! dynamically imported symbols.
|
||||
|
||||
use crate::read::{Architecture, Error, ReadError, ReadRef, Result};
|
||||
use crate::{pe, ByteString, Bytes, LittleEndian as LE, SubArchitecture};
|
||||
|
||||
/// A Windows short form description of a symbol to import.
|
||||
///
|
||||
/// Used in Windows import libraries to provide a mapping from
|
||||
/// a symbol name to a DLL export. This is not an object file.
|
||||
///
|
||||
/// This is a file that starts with [`pe::ImportObjectHeader`], and corresponds
|
||||
/// to [`crate::FileKind::CoffImport`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportFile<'data> {
|
||||
header: &'data pe::ImportObjectHeader,
|
||||
kind: ImportType,
|
||||
dll: ByteString<'data>,
|
||||
symbol: ByteString<'data>,
|
||||
import: Option<ByteString<'data>>,
|
||||
}
|
||||
|
||||
impl<'data> ImportFile<'data> {
|
||||
/// Parse it.
|
||||
pub fn parse<R: ReadRef<'data>>(data: R) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = pe::ImportObjectHeader::parse(data, &mut offset)?;
|
||||
let data = header.parse_data(data, &mut offset)?;
|
||||
|
||||
// Unmangles a name by removing a `?`, `@` or `_` prefix.
|
||||
fn strip_prefix(s: &[u8]) -> &[u8] {
|
||||
match s.split_first() {
|
||||
Some((b, rest)) if [b'?', b'@', b'_'].contains(b) => rest,
|
||||
_ => s,
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
header,
|
||||
dll: data.dll,
|
||||
symbol: data.symbol,
|
||||
kind: match header.import_type() {
|
||||
pe::IMPORT_OBJECT_CODE => ImportType::Code,
|
||||
pe::IMPORT_OBJECT_DATA => ImportType::Data,
|
||||
pe::IMPORT_OBJECT_CONST => ImportType::Const,
|
||||
_ => return Err(Error("Invalid COFF import library import type")),
|
||||
},
|
||||
import: match header.name_type() {
|
||||
pe::IMPORT_OBJECT_ORDINAL => None,
|
||||
pe::IMPORT_OBJECT_NAME => Some(data.symbol()),
|
||||
pe::IMPORT_OBJECT_NAME_NO_PREFIX => Some(strip_prefix(data.symbol())),
|
||||
pe::IMPORT_OBJECT_NAME_UNDECORATE => Some(
|
||||
strip_prefix(data.symbol())
|
||||
.split(|&b| b == b'@')
|
||||
.next()
|
||||
.unwrap(),
|
||||
),
|
||||
pe::IMPORT_OBJECT_NAME_EXPORTAS => data.export(),
|
||||
_ => return Err(Error("Unknown COFF import library name type")),
|
||||
}
|
||||
.map(ByteString),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the machine type.
|
||||
pub fn architecture(&self) -> Architecture {
|
||||
match self.header.machine.get(LE) {
|
||||
pe::IMAGE_FILE_MACHINE_ARMNT => Architecture::Arm,
|
||||
pe::IMAGE_FILE_MACHINE_ARM64 | pe::IMAGE_FILE_MACHINE_ARM64EC => Architecture::Aarch64,
|
||||
pe::IMAGE_FILE_MACHINE_I386 => Architecture::I386,
|
||||
pe::IMAGE_FILE_MACHINE_AMD64 => Architecture::X86_64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the sub machine type, if available.
|
||||
pub fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
match self.header.machine.get(LE) {
|
||||
pe::IMAGE_FILE_MACHINE_ARM64EC => Some(SubArchitecture::Arm64EC),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// The public symbol name.
|
||||
pub fn symbol(&self) -> &'data [u8] {
|
||||
self.symbol.0
|
||||
}
|
||||
|
||||
/// The name of the DLL to import the symbol from.
|
||||
pub fn dll(&self) -> &'data [u8] {
|
||||
self.dll.0
|
||||
}
|
||||
|
||||
/// The name exported from the DLL.
|
||||
pub fn import(&self) -> ImportName<'data> {
|
||||
match self.import {
|
||||
Some(name) => ImportName::Name(name.0),
|
||||
None => ImportName::Ordinal(self.header.ordinal_or_hint.get(LE)),
|
||||
}
|
||||
}
|
||||
|
||||
/// The type of import. Usually either a function or data.
|
||||
pub fn import_type(&self) -> ImportType {
|
||||
self.kind
|
||||
}
|
||||
}
|
||||
|
||||
/// The name or ordinal to import from a DLL.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ImportName<'data> {
|
||||
/// Import by ordinal. Ordinarily this is a 1-based index.
|
||||
Ordinal(u16),
|
||||
/// Import by name.
|
||||
Name(&'data [u8]),
|
||||
}
|
||||
|
||||
/// The kind of import symbol.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ImportType {
|
||||
/// An executable code symbol.
|
||||
Code,
|
||||
/// A data symbol.
|
||||
Data,
|
||||
/// A constant value.
|
||||
Const,
|
||||
}
|
||||
|
||||
impl pe::ImportObjectHeader {
|
||||
/// Read the short import header.
|
||||
///
|
||||
/// Also checks that the signature and version are valid.
|
||||
/// Directly following this header will be the string data.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<pe::ImportObjectHeader>(offset)
|
||||
.read_error("Invalid COFF import library header size")?;
|
||||
if header.sig1.get(LE) != 0 || header.sig2.get(LE) != pe::IMPORT_OBJECT_HDR_SIG2 {
|
||||
Err(Error("Invalid COFF import library header"))
|
||||
} else if header.version.get(LE) != 0 {
|
||||
Err(Error("Unknown COFF import library header version"))
|
||||
} else {
|
||||
Ok(header)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the data following the header.
|
||||
pub fn parse_data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
) -> Result<ImportObjectData<'data>> {
|
||||
let mut data = Bytes(
|
||||
data.read_bytes(offset, u64::from(self.size_of_data.get(LE)))
|
||||
.read_error("Invalid COFF import library data size")?,
|
||||
);
|
||||
let symbol = data
|
||||
.read_string()
|
||||
.map(ByteString)
|
||||
.read_error("Could not read COFF import library symbol name")?;
|
||||
let dll = data
|
||||
.read_string()
|
||||
.map(ByteString)
|
||||
.read_error("Could not read COFF import library DLL name")?;
|
||||
let export = if self.name_type() == pe::IMPORT_OBJECT_NAME_EXPORTAS {
|
||||
data.read_string()
|
||||
.map(ByteString)
|
||||
.map(Some)
|
||||
.read_error("Could not read COFF import library export name")?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ImportObjectData {
|
||||
symbol,
|
||||
dll,
|
||||
export,
|
||||
})
|
||||
}
|
||||
|
||||
/// The type of import.
|
||||
///
|
||||
/// This is one of the `IMPORT_OBJECT_*` constants.
|
||||
pub fn import_type(&self) -> u16 {
|
||||
self.name_type.get(LE) & pe::IMPORT_OBJECT_TYPE_MASK
|
||||
}
|
||||
|
||||
/// The type of import name.
|
||||
///
|
||||
/// This is one of the `IMPORT_OBJECT_*` constants.
|
||||
pub fn name_type(&self) -> u16 {
|
||||
(self.name_type.get(LE) >> pe::IMPORT_OBJECT_NAME_SHIFT) & pe::IMPORT_OBJECT_NAME_MASK
|
||||
}
|
||||
}
|
||||
|
||||
/// The data following [`pe::ImportObjectHeader`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportObjectData<'data> {
|
||||
symbol: ByteString<'data>,
|
||||
dll: ByteString<'data>,
|
||||
export: Option<ByteString<'data>>,
|
||||
}
|
||||
|
||||
impl<'data> ImportObjectData<'data> {
|
||||
/// The public symbol name.
|
||||
pub fn symbol(&self) -> &'data [u8] {
|
||||
self.symbol.0
|
||||
}
|
||||
|
||||
/// The name of the DLL to import the symbol from.
|
||||
pub fn dll(&self) -> &'data [u8] {
|
||||
self.dll.0
|
||||
}
|
||||
|
||||
/// The name exported from the DLL.
|
||||
///
|
||||
/// This is only set if the name is not derived from the symbol name.
|
||||
pub fn export(&self) -> Option<&'data [u8]> {
|
||||
self.export.map(|export| export.0)
|
||||
}
|
||||
}
|
||||
66
vendor/object/src/read/coff/mod.rs
vendored
Normal file
66
vendor/object/src/read/coff/mod.rs
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
//! Support for reading Windows COFF files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between COFF object files
|
||||
//! and COFF bigobj files. The primary trait for this is [`CoffHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`CoffFile`] implements the [`Object`](crate::read::Object) trait for
|
||||
//! COFF files. [`CoffFile`] is parameterised by [`CoffHeader`].
|
||||
//! The default parameter allows reading regular COFF object files,
|
||||
//! while the type alias [`CoffBigFile`] allows reading COFF bigobj files.
|
||||
//!
|
||||
//! [`ImportFile`] allows reading COFF short imports that are used in import
|
||||
//! libraries. Currently these are not integrated with the unified read API.
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`CoffHeader`] trait can be directly used to parse both COFF
|
||||
//! object files (which start with [`pe::ImageFileHeader`]) and COFF bigobj
|
||||
//! files (which start with [`pe::AnonObjectHeaderBigobj`]).
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::pe;
|
||||
//! use object::read::coff::{CoffHeader, ImageSymbol as _};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section and symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let mut offset = 0;
|
||||
//! let header = pe::ImageFileHeader::parse(&*data, &mut offset)?;
|
||||
//! let sections = header.sections(&*data, offset)?;
|
||||
//! let symbols = header.symbols(&*data)?;
|
||||
//! for section in sections.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(section.name(symbols.strings())?));
|
||||
//! }
|
||||
//! for (_index, symbol) in symbols.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(symbol.name(symbols.strings())?));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::pe;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod comdat;
|
||||
pub use comdat::*;
|
||||
|
||||
mod import;
|
||||
pub use import::*;
|
||||
106
vendor/object/src/read/coff/relocation.rs
vendored
Normal file
106
vendor/object/src/read/coff/relocation.rs
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
use alloc::fmt;
|
||||
use core::slice;
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::read::{
|
||||
ReadRef, Relocation, RelocationEncoding, RelocationKind, RelocationTarget, SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{CoffFile, CoffHeader};
|
||||
|
||||
/// An iterator for the relocations in a [`CoffBigSection`](super::CoffBigSection).
|
||||
pub type CoffBigRelocationIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffRelocationIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the relocations in a [`CoffSection`](super::CoffSection).
|
||||
pub struct CoffRelocationIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) iter: slice::Iter<'data, pe::ImageRelocation>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffRelocationIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|relocation| {
|
||||
let (kind, size, addend) = match self.file.header.machine() {
|
||||
pe::IMAGE_FILE_MACHINE_ARMNT => match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_ARM_ADDR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_ARM_ADDR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_ARM_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
pe::IMAGE_REL_ARM_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_ARM_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
},
|
||||
pe::IMAGE_FILE_MACHINE_ARM64 | pe::IMAGE_FILE_MACHINE_ARM64EC => {
|
||||
match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_ARM64_ADDR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_ARM64_ADDR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_ARM64_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
pe::IMAGE_REL_ARM64_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_ARM64_ADDR64 => (RelocationKind::Absolute, 64, 0),
|
||||
pe::IMAGE_REL_ARM64_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
}
|
||||
}
|
||||
pe::IMAGE_FILE_MACHINE_I386 => match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_I386_DIR16 => (RelocationKind::Absolute, 16, 0),
|
||||
pe::IMAGE_REL_I386_REL16 => (RelocationKind::Relative, 16, 0),
|
||||
pe::IMAGE_REL_I386_DIR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_I386_DIR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_I386_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_I386_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
pe::IMAGE_REL_I386_SECREL7 => (RelocationKind::SectionOffset, 7, 0),
|
||||
pe::IMAGE_REL_I386_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
},
|
||||
pe::IMAGE_FILE_MACHINE_AMD64 => match relocation.typ.get(LE) {
|
||||
pe::IMAGE_REL_AMD64_ADDR64 => (RelocationKind::Absolute, 64, 0),
|
||||
pe::IMAGE_REL_AMD64_ADDR32 => (RelocationKind::Absolute, 32, 0),
|
||||
pe::IMAGE_REL_AMD64_ADDR32NB => (RelocationKind::ImageOffset, 32, 0),
|
||||
pe::IMAGE_REL_AMD64_REL32 => (RelocationKind::Relative, 32, -4),
|
||||
pe::IMAGE_REL_AMD64_REL32_1 => (RelocationKind::Relative, 32, -5),
|
||||
pe::IMAGE_REL_AMD64_REL32_2 => (RelocationKind::Relative, 32, -6),
|
||||
pe::IMAGE_REL_AMD64_REL32_3 => (RelocationKind::Relative, 32, -7),
|
||||
pe::IMAGE_REL_AMD64_REL32_4 => (RelocationKind::Relative, 32, -8),
|
||||
pe::IMAGE_REL_AMD64_REL32_5 => (RelocationKind::Relative, 32, -9),
|
||||
pe::IMAGE_REL_AMD64_SECTION => (RelocationKind::SectionIndex, 16, 0),
|
||||
pe::IMAGE_REL_AMD64_SECREL => (RelocationKind::SectionOffset, 32, 0),
|
||||
pe::IMAGE_REL_AMD64_SECREL7 => (RelocationKind::SectionOffset, 7, 0),
|
||||
typ => (RelocationKind::Coff(typ), 0, 0),
|
||||
},
|
||||
_ => (RelocationKind::Coff(relocation.typ.get(LE)), 0, 0),
|
||||
};
|
||||
let target = RelocationTarget::Symbol(SymbolIndex(
|
||||
relocation.symbol_table_index.get(LE) as usize,
|
||||
));
|
||||
(
|
||||
u64::from(relocation.virtual_address.get(LE)),
|
||||
Relocation {
|
||||
kind,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
size,
|
||||
target,
|
||||
addend,
|
||||
implicit_addend: true,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> fmt::Debug
|
||||
for CoffRelocationIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CoffRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
585
vendor/object/src/read/coff/section.rs
vendored
Normal file
585
vendor/object/src/read/coff/section.rs
vendored
Normal file
@@ -0,0 +1,585 @@
|
||||
use core::convert::TryFrom;
|
||||
use core::{iter, result, slice, str};
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, CompressedData, CompressedFileRange, Error, ObjectSection, ObjectSegment, ReadError,
|
||||
ReadRef, Result, SectionFlags, SectionIndex, SectionKind, SegmentFlags,
|
||||
};
|
||||
|
||||
use super::{CoffFile, CoffHeader, CoffRelocationIterator};
|
||||
|
||||
/// The table of section headers in a COFF or PE file.
|
||||
///
|
||||
/// Returned by [`CoffHeader::sections`] and
|
||||
/// [`ImageNtHeaders::sections`](crate::read::pe::ImageNtHeaders::sections).
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct SectionTable<'data> {
|
||||
sections: &'data [pe::ImageSectionHeader],
|
||||
}
|
||||
|
||||
impl<'data> SectionTable<'data> {
|
||||
/// Parse the section table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be after the optional file header.
|
||||
pub fn parse<Coff: CoffHeader, R: ReadRef<'data>>(
|
||||
header: &Coff,
|
||||
data: R,
|
||||
offset: u64,
|
||||
) -> Result<Self> {
|
||||
let sections = data
|
||||
.read_slice_at(offset, header.number_of_sections() as usize)
|
||||
.read_error("Invalid COFF/PE section headers")?;
|
||||
Ok(SectionTable { sections })
|
||||
}
|
||||
|
||||
/// Iterate over the section headers.
|
||||
///
|
||||
/// Warning: sections indices start at 1.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, pe::ImageSectionHeader> {
|
||||
self.sections.iter()
|
||||
}
|
||||
|
||||
/// Return true if the section table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.sections.is_empty()
|
||||
}
|
||||
|
||||
/// The number of section headers.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.sections.len()
|
||||
}
|
||||
|
||||
/// Return the section header at the given index.
|
||||
///
|
||||
/// The index is 1-based.
|
||||
pub fn section(&self, index: usize) -> read::Result<&'data pe::ImageSectionHeader> {
|
||||
self.sections
|
||||
.get(index.wrapping_sub(1))
|
||||
.read_error("Invalid COFF/PE section index")
|
||||
}
|
||||
|
||||
/// Return the section header with the given name.
|
||||
///
|
||||
/// The returned index is 1-based.
|
||||
///
|
||||
/// Ignores sections with invalid names.
|
||||
pub fn section_by_name<R: ReadRef<'data>>(
|
||||
&self,
|
||||
strings: StringTable<'data, R>,
|
||||
name: &[u8],
|
||||
) -> Option<(usize, &'data pe::ImageSectionHeader)> {
|
||||
self.sections
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, section)| section.name(strings) == Ok(name))
|
||||
.map(|(index, section)| (index + 1, section))
|
||||
}
|
||||
|
||||
/// Compute the maximum file offset used by sections.
|
||||
///
|
||||
/// This will usually match the end of file, unless the PE file has a
|
||||
/// [data overlay](https://security.stackexchange.com/questions/77336/how-is-the-file-overlay-read-by-an-exe-virus)
|
||||
pub fn max_section_file_offset(&self) -> u64 {
|
||||
let mut max = 0;
|
||||
for section in self.iter() {
|
||||
match (section.pointer_to_raw_data.get(LE) as u64)
|
||||
.checked_add(section.size_of_raw_data.get(LE) as u64)
|
||||
{
|
||||
None => {
|
||||
// This cannot happen, we're suming two u32 into a u64
|
||||
continue;
|
||||
}
|
||||
Some(end_of_section) => {
|
||||
if end_of_section > max {
|
||||
max = end_of_section;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
max
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the loadable sections in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSegmentIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffSegmentIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the loadable sections in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSegmentIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) iter: slice::Iter<'data, pe::ImageSectionHeader>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffSegmentIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffSegment<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|section| CoffSegment {
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A loadable section in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
pub type CoffBigSegment<'data, 'file, R = &'data [u8]> =
|
||||
CoffSegment<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A loadable section in a [`CoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSegment<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffSegment<'data, 'file, R, Coff> {
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.section
|
||||
.coff_data(self.file.data)
|
||||
.read_error("Invalid COFF section offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSegment<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSegment<'data>
|
||||
for CoffSegment<'data, 'file, R, Coff>
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
u64::from(self.section.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.section.coff_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
let (offset, size) = self.section.coff_file_range().unwrap_or((0, 0));
|
||||
(u64::from(offset), u64::from(size))
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
self.section
|
||||
.name(self.file.common.symbols.strings())
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
let name = self.section.name(self.file.common.symbols.strings())?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF section name")
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let characteristics = self.section.characteristics.get(LE);
|
||||
SegmentFlags::Coff { characteristics }
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSectionIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffSectionIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the sections in a [`CoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSectionIterator<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, pe::ImageSectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffSectionIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffSection<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|(index, section)| CoffSection {
|
||||
file: self.file,
|
||||
index: SectionIndex(index + 1),
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
pub type CoffBigSection<'data, 'file, R = &'data [u8]> =
|
||||
CoffSection<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A section in a [`CoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct CoffSection<
|
||||
'data,
|
||||
'file,
|
||||
R: ReadRef<'data> = &'data [u8],
|
||||
Coff: CoffHeader = pe::ImageFileHeader,
|
||||
> {
|
||||
pub(super) file: &'file CoffFile<'data, R, Coff>,
|
||||
pub(super) index: SectionIndex,
|
||||
pub(super) section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffSection<'data, 'file, R, Coff> {
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.section
|
||||
.coff_data(self.file.data)
|
||||
.read_error("Invalid COFF section offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSection<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSection<'data>
|
||||
for CoffSection<'data, 'file, R, Coff>
|
||||
{
|
||||
type RelocationIterator = CoffRelocationIterator<'data, 'file, R, Coff>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
// TODO: This may need to be the length from the auxiliary symbol for this section.
|
||||
u64::from(self.section.size_of_raw_data.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.section.coff_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
let (offset, size) = self.section.coff_file_range()?;
|
||||
Some((u64::from(offset), u64::from(size)))
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
self.section.name(self.file.common.symbols.strings())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF section name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SectionKind {
|
||||
self.section.kind()
|
||||
}
|
||||
|
||||
fn relocations(&self) -> CoffRelocationIterator<'data, 'file, R, Coff> {
|
||||
let relocations = self.section.coff_relocations(self.file.data).unwrap_or(&[]);
|
||||
CoffRelocationIterator {
|
||||
file: self.file,
|
||||
iter: relocations.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::Coff {
|
||||
characteristics: self.section.characteristics.get(LE),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageSectionHeader {
|
||||
pub(crate) fn kind(&self) -> SectionKind {
|
||||
let characteristics = self.characteristics.get(LE);
|
||||
if characteristics & (pe::IMAGE_SCN_CNT_CODE | pe::IMAGE_SCN_MEM_EXECUTE) != 0 {
|
||||
SectionKind::Text
|
||||
} else if characteristics & pe::IMAGE_SCN_CNT_INITIALIZED_DATA != 0 {
|
||||
if characteristics & pe::IMAGE_SCN_MEM_DISCARDABLE != 0 {
|
||||
SectionKind::Other
|
||||
} else if characteristics & pe::IMAGE_SCN_MEM_WRITE != 0 {
|
||||
SectionKind::Data
|
||||
} else {
|
||||
SectionKind::ReadOnlyData
|
||||
}
|
||||
} else if characteristics & pe::IMAGE_SCN_CNT_UNINITIALIZED_DATA != 0 {
|
||||
SectionKind::UninitializedData
|
||||
} else if characteristics & pe::IMAGE_SCN_LNK_INFO != 0 {
|
||||
SectionKind::Linker
|
||||
} else {
|
||||
SectionKind::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageSectionHeader {
|
||||
/// Return the string table offset of the section name.
|
||||
///
|
||||
/// Returns `Ok(None)` if the name doesn't use the string table
|
||||
/// and can be obtained with `raw_name` instead.
|
||||
pub fn name_offset(&self) -> Result<Option<u32>> {
|
||||
let bytes = &self.name;
|
||||
if bytes[0] != b'/' {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if bytes[1] == b'/' {
|
||||
let mut offset = 0;
|
||||
for byte in bytes[2..].iter() {
|
||||
let digit = match byte {
|
||||
b'A'..=b'Z' => byte - b'A',
|
||||
b'a'..=b'z' => byte - b'a' + 26,
|
||||
b'0'..=b'9' => byte - b'0' + 52,
|
||||
b'+' => 62,
|
||||
b'/' => 63,
|
||||
_ => return Err(Error("Invalid COFF section name base-64 offset")),
|
||||
};
|
||||
offset = offset * 64 + digit as u64;
|
||||
}
|
||||
u32::try_from(offset)
|
||||
.ok()
|
||||
.read_error("Invalid COFF section name base-64 offset")
|
||||
.map(Some)
|
||||
} else {
|
||||
let mut offset = 0;
|
||||
for byte in bytes[1..].iter() {
|
||||
let digit = match byte {
|
||||
b'0'..=b'9' => byte - b'0',
|
||||
0 => break,
|
||||
_ => return Err(Error("Invalid COFF section name base-10 offset")),
|
||||
};
|
||||
offset = offset * 10 + digit as u32;
|
||||
}
|
||||
Ok(Some(offset))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section name.
|
||||
///
|
||||
/// This handles decoding names that are offsets into the symbol string table.
|
||||
pub fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
if let Some(offset) = self.name_offset()? {
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid COFF section name offset")
|
||||
} else {
|
||||
Ok(self.raw_name())
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the raw section name.
|
||||
pub fn raw_name(&self) -> &[u8] {
|
||||
let bytes = &self.name;
|
||||
match memchr::memchr(b'\0', bytes) {
|
||||
Some(end) => &bytes[..end],
|
||||
None => &bytes[..],
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the section in a COFF file.
|
||||
///
|
||||
/// Returns `None` for sections that have no data in the file.
|
||||
pub fn coff_file_range(&self) -> Option<(u32, u32)> {
|
||||
if self.characteristics.get(LE) & pe::IMAGE_SCN_CNT_UNINITIALIZED_DATA != 0 {
|
||||
None
|
||||
} else {
|
||||
let offset = self.pointer_to_raw_data.get(LE);
|
||||
// Note: virtual size is not used for COFF.
|
||||
let size = self.size_of_raw_data.get(LE);
|
||||
Some((offset, size))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section data in a COFF file.
|
||||
///
|
||||
/// Returns `Ok(&[])` if the section has no data.
|
||||
/// Returns `Err` for invalid values.
|
||||
pub fn coff_data<'data, R: ReadRef<'data>>(&self, data: R) -> result::Result<&'data [u8], ()> {
|
||||
if let Some((offset, size)) = self.coff_file_range() {
|
||||
data.read_bytes_at(offset.into(), size.into())
|
||||
} else {
|
||||
Ok(&[])
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section alignment in bytes.
|
||||
///
|
||||
/// This is only valid for sections in a COFF file.
|
||||
pub fn coff_alignment(&self) -> u64 {
|
||||
match self.characteristics.get(LE) & pe::IMAGE_SCN_ALIGN_MASK {
|
||||
pe::IMAGE_SCN_ALIGN_1BYTES => 1,
|
||||
pe::IMAGE_SCN_ALIGN_2BYTES => 2,
|
||||
pe::IMAGE_SCN_ALIGN_4BYTES => 4,
|
||||
pe::IMAGE_SCN_ALIGN_8BYTES => 8,
|
||||
pe::IMAGE_SCN_ALIGN_16BYTES => 16,
|
||||
pe::IMAGE_SCN_ALIGN_32BYTES => 32,
|
||||
pe::IMAGE_SCN_ALIGN_64BYTES => 64,
|
||||
pe::IMAGE_SCN_ALIGN_128BYTES => 128,
|
||||
pe::IMAGE_SCN_ALIGN_256BYTES => 256,
|
||||
pe::IMAGE_SCN_ALIGN_512BYTES => 512,
|
||||
pe::IMAGE_SCN_ALIGN_1024BYTES => 1024,
|
||||
pe::IMAGE_SCN_ALIGN_2048BYTES => 2048,
|
||||
pe::IMAGE_SCN_ALIGN_4096BYTES => 4096,
|
||||
pe::IMAGE_SCN_ALIGN_8192BYTES => 8192,
|
||||
_ => 16,
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the relocations in a COFF file.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn coff_relocations<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
) -> read::Result<&'data [pe::ImageRelocation]> {
|
||||
let mut pointer = self.pointer_to_relocations.get(LE).into();
|
||||
let mut number: usize = self.number_of_relocations.get(LE).into();
|
||||
if number == core::u16::MAX.into()
|
||||
&& self.characteristics.get(LE) & pe::IMAGE_SCN_LNK_NRELOC_OVFL != 0
|
||||
{
|
||||
// Extended relocations. Read first relocation (which contains extended count) & adjust
|
||||
// relocations pointer.
|
||||
let extended_relocation_info = data
|
||||
.read_at::<pe::ImageRelocation>(pointer)
|
||||
.read_error("Invalid COFF relocation offset or number")?;
|
||||
number = extended_relocation_info.virtual_address.get(LE) as usize;
|
||||
if number == 0 {
|
||||
return Err(Error("Invalid COFF relocation number"));
|
||||
}
|
||||
pointer += core::mem::size_of::<pe::ImageRelocation>() as u64;
|
||||
// Extended relocation info does not contribute to the count of sections.
|
||||
number -= 1;
|
||||
}
|
||||
data.read_slice_at(pointer, number)
|
||||
.read_error("Invalid COFF relocation offset or number")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn name_offset() {
|
||||
let mut section = pe::ImageSectionHeader::default();
|
||||
section.name = *b"xxxxxxxx";
|
||||
assert_eq!(section.name_offset(), Ok(None));
|
||||
section.name = *b"/0\0\0\0\0\0\0";
|
||||
assert_eq!(section.name_offset(), Ok(Some(0)));
|
||||
section.name = *b"/9999999";
|
||||
assert_eq!(section.name_offset(), Ok(Some(999_9999)));
|
||||
section.name = *b"//AAAAAA";
|
||||
assert_eq!(section.name_offset(), Ok(Some(0)));
|
||||
section.name = *b"//D/////";
|
||||
assert_eq!(section.name_offset(), Ok(Some(0xffff_ffff)));
|
||||
section.name = *b"//EAAAAA";
|
||||
assert!(section.name_offset().is_err());
|
||||
section.name = *b"////////";
|
||||
assert!(section.name_offset().is_err());
|
||||
}
|
||||
}
|
||||
635
vendor/object/src/read/coff/symbol.rs
vendored
Normal file
635
vendor/object/src/read/coff/symbol.rs
vendored
Normal file
@@ -0,0 +1,635 @@
|
||||
use alloc::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
use core::str;
|
||||
|
||||
use super::{CoffCommon, CoffHeader, SectionTable};
|
||||
use crate::endian::{LittleEndian as LE, U32Bytes};
|
||||
use crate::pe;
|
||||
use crate::pod::{bytes_of, bytes_of_slice, Pod};
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, Bytes, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Result, SectionIndex,
|
||||
SymbolFlags, SymbolIndex, SymbolKind, SymbolMap, SymbolMapEntry, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
/// A table of symbol entries in a COFF or PE file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`CoffHeader::symbols`] and
|
||||
/// [`ImageNtHeaders::symbols`](crate::read::pe::ImageNtHeaders::symbols).
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable<'data, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
symbols: &'data [Coff::ImageSymbolBytes],
|
||||
strings: StringTable<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> Default for SymbolTable<'data, R, Coff> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
symbols: &[],
|
||||
strings: StringTable::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>, Coff: CoffHeader> SymbolTable<'data, R, Coff> {
|
||||
/// Read the symbol table.
|
||||
pub fn parse(header: &Coff, data: R) -> Result<Self> {
|
||||
// The symbol table may not be present.
|
||||
let mut offset = header.pointer_to_symbol_table().into();
|
||||
let (symbols, strings) = if offset != 0 {
|
||||
let symbols = data
|
||||
.read_slice(&mut offset, header.number_of_symbols() as usize)
|
||||
.read_error("Invalid COFF symbol table offset or size")?;
|
||||
|
||||
// Note: don't update data when reading length; the length includes itself.
|
||||
let length = data
|
||||
.read_at::<U32Bytes<_>>(offset)
|
||||
.read_error("Missing COFF string table")?
|
||||
.get(LE);
|
||||
let str_end = offset
|
||||
.checked_add(length as u64)
|
||||
.read_error("Invalid COFF string table length")?;
|
||||
let strings = StringTable::new(data, offset, str_end);
|
||||
|
||||
(symbols, strings)
|
||||
} else {
|
||||
(&[][..], StringTable::default())
|
||||
};
|
||||
|
||||
Ok(SymbolTable { symbols, strings })
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbol table entries.
|
||||
///
|
||||
/// This includes auxiliary symbol table entries.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter<'table>(&'table self) -> SymbolIterator<'data, 'table, R, Coff> {
|
||||
SymbolIterator {
|
||||
symbols: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol table entry at the given index.
|
||||
#[inline]
|
||||
pub fn symbol(&self, index: usize) -> Result<&'data Coff::ImageSymbol> {
|
||||
self.get::<Coff::ImageSymbol>(index, 0)
|
||||
}
|
||||
|
||||
/// Return the auxiliary function symbol for the symbol table entry at the given index.
|
||||
///
|
||||
/// Note that the index is of the symbol, not the first auxiliary record.
|
||||
#[inline]
|
||||
pub fn aux_function(&self, index: usize) -> Result<&'data pe::ImageAuxSymbolFunction> {
|
||||
self.get::<pe::ImageAuxSymbolFunction>(index, 1)
|
||||
}
|
||||
|
||||
/// Return the auxiliary section symbol for the symbol table entry at the given index.
|
||||
///
|
||||
/// Note that the index is of the symbol, not the first auxiliary record.
|
||||
#[inline]
|
||||
pub fn aux_section(&self, index: usize) -> Result<&'data pe::ImageAuxSymbolSection> {
|
||||
self.get::<pe::ImageAuxSymbolSection>(index, 1)
|
||||
}
|
||||
|
||||
/// Return the auxiliary file name for the symbol table entry at the given index.
|
||||
///
|
||||
/// Note that the index is of the symbol, not the first auxiliary record.
|
||||
pub fn aux_file_name(&self, index: usize, aux_count: u8) -> Result<&'data [u8]> {
|
||||
let entries = index
|
||||
.checked_add(1)
|
||||
.and_then(|x| Some(x..x.checked_add(aux_count.into())?))
|
||||
.and_then(|x| self.symbols.get(x))
|
||||
.read_error("Invalid COFF symbol index")?;
|
||||
let bytes = bytes_of_slice(entries);
|
||||
// The name is padded with nulls.
|
||||
Ok(match memchr::memchr(b'\0', bytes) {
|
||||
Some(end) => &bytes[..end],
|
||||
None => bytes,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the symbol table entry or auxiliary record at the given index and offset.
|
||||
pub fn get<T: Pod>(&self, index: usize, offset: usize) -> Result<&'data T> {
|
||||
let bytes = index
|
||||
.checked_add(offset)
|
||||
.and_then(|x| self.symbols.get(x))
|
||||
.read_error("Invalid COFF symbol index")?;
|
||||
Bytes(bytes_of(bytes))
|
||||
.read()
|
||||
.read_error("Invalid COFF symbol data")
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to a user-defined map entry.
|
||||
pub fn map<Entry: SymbolMapEntry, F: Fn(&'data Coff::ImageSymbol) -> Option<Entry>>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> SymbolMap<Entry> {
|
||||
let mut symbols = Vec::with_capacity(self.symbols.len());
|
||||
for (_, symbol) in self.iter() {
|
||||
if !symbol.is_definition() {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry) = f(symbol) {
|
||||
symbols.push(entry);
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for symbol entries in a COFF or PE file.
|
||||
///
|
||||
/// Yields the index and symbol structure for each symbol.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolIterator<'data, 'table, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
symbols: &'table SymbolTable<'data, R, Coff>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'table, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for SymbolIterator<'data, 'table, R, Coff>
|
||||
{
|
||||
type Item = (usize, &'data Coff::ImageSymbol);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
Some((index, symbol))
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSymbolTable<'data, 'file, R = &'data [u8]> =
|
||||
CoffSymbolTable<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A symbol table in a [`CoffFile`](super::CoffFile)
|
||||
/// or [`PeFile`](crate::read::pe::PeFile).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct CoffSymbolTable<'data, 'file, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
pub(crate) file: &'file CoffCommon<'data, R, Coff>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSymbolTable<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSymbolTable<'data>
|
||||
for CoffSymbolTable<'data, 'file, R, Coff>
|
||||
{
|
||||
type Symbol = CoffSymbol<'data, 'file, R, Coff>;
|
||||
type SymbolIterator = CoffSymbolIterator<'data, 'file, R, Coff>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
CoffSymbolIterator {
|
||||
file: self.file,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol> {
|
||||
let symbol = self.file.symbols.symbol(index.0)?;
|
||||
Ok(CoffSymbol {
|
||||
file: self.file,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in a [`CoffBigFile`](super::CoffBigFile).
|
||||
pub type CoffBigSymbolIterator<'data, 'file, R = &'data [u8]> =
|
||||
CoffSymbolIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// An iterator for the symbols in a [`CoffFile`](super::CoffFile)
|
||||
/// or [`PeFile`](crate::read::pe::PeFile).
|
||||
pub struct CoffSymbolIterator<'data, 'file, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
pub(crate) file: &'file CoffCommon<'data, R, Coff>,
|
||||
pub(crate) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> fmt::Debug
|
||||
for CoffSymbolIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CoffSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
|
||||
for CoffSymbolIterator<'data, 'file, R, Coff>
|
||||
{
|
||||
type Item = CoffSymbol<'data, 'file, R, Coff>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.file.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.number_of_aux_symbols() as usize;
|
||||
Some(CoffSymbol {
|
||||
file: self.file,
|
||||
index: SymbolIndex(index),
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in a [`CoffBigFile`](super::CoffBigFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
pub type CoffBigSymbol<'data, 'file, R = &'data [u8]> =
|
||||
CoffSymbol<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
|
||||
|
||||
/// A symbol in a [`CoffFile`](super::CoffFile) or [`PeFile`](crate::read::pe::PeFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct CoffSymbol<'data, 'file, R = &'data [u8], Coff = pe::ImageFileHeader>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
Coff: CoffHeader,
|
||||
{
|
||||
pub(crate) file: &'file CoffCommon<'data, R, Coff>,
|
||||
pub(crate) index: SymbolIndex,
|
||||
pub(crate) symbol: &'data Coff::ImageSymbol,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffSymbol<'data, 'file, R, Coff> {
|
||||
#[inline]
|
||||
/// Get the raw `ImageSymbol` struct.
|
||||
pub fn raw_symbol(&self) -> &'data Coff::ImageSymbol {
|
||||
self.symbol
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
|
||||
for CoffSymbol<'data, 'file, R, Coff>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectSymbol<'data>
|
||||
for CoffSymbol<'data, 'file, R, Coff>
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&'data [u8]> {
|
||||
if self.symbol.has_aux_file_name() {
|
||||
self.file
|
||||
.symbols
|
||||
.aux_file_name(self.index.0, self.symbol.number_of_aux_symbols())
|
||||
} else {
|
||||
self.symbol.name(self.file.symbols.strings())
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 COFF symbol name")
|
||||
}
|
||||
|
||||
fn address(&self) -> u64 {
|
||||
// Only return an address for storage classes that we know use an address.
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC
|
||||
| pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL
|
||||
| pe::IMAGE_SYM_CLASS_LABEL => {}
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL => {
|
||||
if self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED {
|
||||
// Undefined or common data, neither of which have an address.
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
_ => return 0,
|
||||
}
|
||||
self.symbol
|
||||
.address(self.file.image_base, &self.file.sections)
|
||||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC => {
|
||||
// Section symbols may duplicate the size from the section table.
|
||||
if self.symbol.has_aux_section() {
|
||||
if let Ok(aux) = self.file.symbols.aux_section(self.index.0) {
|
||||
u64::from(aux.length.get(LE))
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL => {
|
||||
if self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED {
|
||||
// For undefined symbols, symbol.value is 0 and the size is 0.
|
||||
// For common data, symbol.value is the size.
|
||||
u64::from(self.symbol.value())
|
||||
} else if self.symbol.has_aux_function() {
|
||||
// Function symbols may have a size.
|
||||
if let Ok(aux) = self.file.symbols.aux_function(self.index.0) {
|
||||
u64::from(aux.total_size.get(LE))
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
// Most symbols don't have sizes.
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
let derived_kind = if self.symbol.derived_type() == pe::IMAGE_SYM_DTYPE_FUNCTION {
|
||||
SymbolKind::Text
|
||||
} else {
|
||||
SymbolKind::Data
|
||||
};
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC => {
|
||||
if self.symbol.has_aux_section() {
|
||||
SymbolKind::Section
|
||||
} else {
|
||||
derived_kind
|
||||
}
|
||||
}
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => derived_kind,
|
||||
pe::IMAGE_SYM_CLASS_SECTION => SymbolKind::Section,
|
||||
pe::IMAGE_SYM_CLASS_FILE => SymbolKind::File,
|
||||
pe::IMAGE_SYM_CLASS_LABEL => SymbolKind::Label,
|
||||
_ => SymbolKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.symbol.section_number() {
|
||||
pe::IMAGE_SYM_UNDEFINED => {
|
||||
if self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_EXTERNAL {
|
||||
if self.symbol.value() == 0 {
|
||||
SymbolSection::Undefined
|
||||
} else {
|
||||
SymbolSection::Common
|
||||
}
|
||||
} else if self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_SECTION {
|
||||
SymbolSection::Undefined
|
||||
} else {
|
||||
SymbolSection::Unknown
|
||||
}
|
||||
}
|
||||
pe::IMAGE_SYM_ABSOLUTE => SymbolSection::Absolute,
|
||||
pe::IMAGE_SYM_DEBUG => {
|
||||
if self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_FILE {
|
||||
SymbolSection::None
|
||||
} else {
|
||||
SymbolSection::Unknown
|
||||
}
|
||||
}
|
||||
index if index > 0 => SymbolSection::Section(SectionIndex(index as usize)),
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_EXTERNAL
|
||||
&& self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED
|
||||
&& self.symbol.value() == 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
self.symbol.is_definition()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_EXTERNAL
|
||||
&& self.symbol.section_number() == pe::IMAGE_SYM_UNDEFINED
|
||||
&& self.symbol.value() != 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.symbol.storage_class() == pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn scope(&self) -> SymbolScope {
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => {
|
||||
// TODO: determine if symbol is exported
|
||||
SymbolScope::Linkage
|
||||
}
|
||||
_ => SymbolScope::Compilation,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
match self.symbol.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
!self.is_global()
|
||||
}
|
||||
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
if self.symbol.has_aux_section() {
|
||||
if let Ok(aux) = self.file.symbols.aux_section(self.index.0) {
|
||||
let number = if Coff::is_type_bigobj() {
|
||||
u32::from(aux.number.get(LE)) | (u32::from(aux.high_number.get(LE)) << 16)
|
||||
} else {
|
||||
u32::from(aux.number.get(LE))
|
||||
};
|
||||
return SymbolFlags::CoffSection {
|
||||
selection: aux.selection,
|
||||
associative_section: if number == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(SectionIndex(number as usize))
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
SymbolFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`pe::ImageSymbol`] and [`pe::ImageSymbolEx`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait ImageSymbol: Debug + Pod {
|
||||
fn raw_name(&self) -> &[u8; 8];
|
||||
fn value(&self) -> u32;
|
||||
fn section_number(&self) -> i32;
|
||||
fn typ(&self) -> u16;
|
||||
fn storage_class(&self) -> u8;
|
||||
fn number_of_aux_symbols(&self) -> u8;
|
||||
|
||||
/// Parse a COFF symbol name.
|
||||
///
|
||||
/// `strings` must be the string table used for symbol names.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
let name = self.raw_name();
|
||||
if name[0] == 0 {
|
||||
// If the name starts with 0 then the last 4 bytes are a string table offset.
|
||||
let offset = u32::from_le_bytes(name[4..8].try_into().unwrap());
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid COFF symbol name offset")
|
||||
} else {
|
||||
// The name is inline and padded with nulls.
|
||||
Ok(match memchr::memchr(b'\0', name) {
|
||||
Some(end) => &name[..end],
|
||||
None => &name[..],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol address.
|
||||
///
|
||||
/// This takes into account the image base and the section address.
|
||||
fn address(&self, image_base: u64, sections: &SectionTable<'_>) -> Result<u64> {
|
||||
let section_number = self.section_number() as usize;
|
||||
let section = sections.section(section_number)?;
|
||||
let virtual_address = u64::from(section.virtual_address.get(LE));
|
||||
let value = u64::from(self.value());
|
||||
Ok(image_base + virtual_address + value)
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
fn is_definition(&self) -> bool {
|
||||
if self.section_number() <= 0 {
|
||||
return false;
|
||||
}
|
||||
match self.storage_class() {
|
||||
pe::IMAGE_SYM_CLASS_STATIC => !self.has_aux_section(),
|
||||
pe::IMAGE_SYM_CLASS_EXTERNAL | pe::IMAGE_SYM_CLASS_WEAK_EXTERNAL => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the symbol has an auxiliary file name.
|
||||
fn has_aux_file_name(&self) -> bool {
|
||||
self.number_of_aux_symbols() > 0 && self.storage_class() == pe::IMAGE_SYM_CLASS_FILE
|
||||
}
|
||||
|
||||
/// Return true if the symbol has an auxiliary function symbol.
|
||||
fn has_aux_function(&self) -> bool {
|
||||
self.number_of_aux_symbols() > 0 && self.derived_type() == pe::IMAGE_SYM_DTYPE_FUNCTION
|
||||
}
|
||||
|
||||
/// Return true if the symbol has an auxiliary section symbol.
|
||||
fn has_aux_section(&self) -> bool {
|
||||
self.number_of_aux_symbols() > 0
|
||||
&& self.storage_class() == pe::IMAGE_SYM_CLASS_STATIC
|
||||
&& self.typ() == 0
|
||||
}
|
||||
|
||||
fn base_type(&self) -> u16 {
|
||||
self.typ() & pe::N_BTMASK
|
||||
}
|
||||
|
||||
fn derived_type(&self) -> u16 {
|
||||
(self.typ() & pe::N_TMASK) >> pe::N_BTSHFT
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageSymbol for pe::ImageSymbol {
|
||||
fn raw_name(&self) -> &[u8; 8] {
|
||||
&self.name
|
||||
}
|
||||
fn value(&self) -> u32 {
|
||||
self.value.get(LE)
|
||||
}
|
||||
fn section_number(&self) -> i32 {
|
||||
let section_number = self.section_number.get(LE);
|
||||
if section_number >= pe::IMAGE_SYM_SECTION_MAX {
|
||||
(section_number as i16) as i32
|
||||
} else {
|
||||
section_number as i32
|
||||
}
|
||||
}
|
||||
fn typ(&self) -> u16 {
|
||||
self.typ.get(LE)
|
||||
}
|
||||
fn storage_class(&self) -> u8 {
|
||||
self.storage_class
|
||||
}
|
||||
fn number_of_aux_symbols(&self) -> u8 {
|
||||
self.number_of_aux_symbols
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageSymbol for pe::ImageSymbolEx {
|
||||
fn raw_name(&self) -> &[u8; 8] {
|
||||
&self.name
|
||||
}
|
||||
fn value(&self) -> u32 {
|
||||
self.value.get(LE)
|
||||
}
|
||||
fn section_number(&self) -> i32 {
|
||||
self.section_number.get(LE)
|
||||
}
|
||||
fn typ(&self) -> u16 {
|
||||
self.typ.get(LE)
|
||||
}
|
||||
fn storage_class(&self) -> u8 {
|
||||
self.storage_class
|
||||
}
|
||||
fn number_of_aux_symbols(&self) -> u8 {
|
||||
self.number_of_aux_symbols
|
||||
}
|
||||
}
|
||||
307
vendor/object/src/read/elf/attributes.rs
vendored
Normal file
307
vendor/object/src/read/elf/attributes.rs
vendored
Normal file
@@ -0,0 +1,307 @@
|
||||
use core::convert::TryInto;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian;
|
||||
use crate::read::{Bytes, Error, ReadError, Result};
|
||||
|
||||
use super::FileHeader;
|
||||
|
||||
/// An ELF attributes section.
|
||||
///
|
||||
/// This may be a GNU attributes section, or an architecture specific attributes section.
|
||||
///
|
||||
/// An attributes section contains a series of [`AttributesSubsection`].
|
||||
///
|
||||
/// Returned by [`SectionHeader::attributes`](super::SectionHeader::attributes)
|
||||
/// and [`SectionHeader::gnu_attributes`](super::SectionHeader::gnu_attributes).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSection<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
version: u8,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSection<'data, Elf> {
|
||||
/// Parse an ELF attributes section given the section data.
|
||||
pub fn new(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> {
|
||||
let mut data = Bytes(data);
|
||||
|
||||
// Skip the version field that is one byte long.
|
||||
let version = *data
|
||||
.read::<u8>()
|
||||
.read_error("Invalid ELF attributes section offset or size")?;
|
||||
|
||||
Ok(AttributesSection {
|
||||
endian,
|
||||
version,
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the version of the attributes section.
|
||||
pub fn version(&self) -> u8 {
|
||||
self.version
|
||||
}
|
||||
|
||||
/// Return an iterator over the subsections.
|
||||
pub fn subsections(&self) -> Result<AttributesSubsectionIterator<'data, Elf>> {
|
||||
// There is currently only one format version.
|
||||
if self.version != b'A' {
|
||||
return Err(Error("Unsupported ELF attributes section version"));
|
||||
}
|
||||
|
||||
Ok(AttributesSubsectionIterator {
|
||||
endian: self.endian,
|
||||
data: self.data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the subsections in an [`AttributesSection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsectionIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSubsectionIterator<'data, Elf> {
|
||||
/// Return the next subsection.
|
||||
pub fn next(&mut self) -> Result<Option<AttributesSubsection<'data, Elf>>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let result = self.parse();
|
||||
if result.is_err() {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn parse(&mut self) -> Result<Option<AttributesSubsection<'data, Elf>>> {
|
||||
// First read the subsection length.
|
||||
let mut data = self.data;
|
||||
let length = data
|
||||
.read::<endian::U32Bytes<Elf::Endian>>()
|
||||
.read_error("ELF attributes section is too short")?
|
||||
.get(self.endian);
|
||||
|
||||
// Now read the entire subsection, updating self.data.
|
||||
let mut data = self
|
||||
.data
|
||||
.read_bytes(length as usize)
|
||||
.read_error("Invalid ELF attributes subsection length")?;
|
||||
// Skip the subsection length field.
|
||||
data.skip(4)
|
||||
.read_error("Invalid ELF attributes subsection length")?;
|
||||
|
||||
let vendor = data
|
||||
.read_string()
|
||||
.read_error("Invalid ELF attributes vendor")?;
|
||||
|
||||
Ok(Some(AttributesSubsection {
|
||||
endian: self.endian,
|
||||
length,
|
||||
vendor,
|
||||
data,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// A subsection in an [`AttributesSection`].
|
||||
///
|
||||
/// A subsection is identified by a vendor name. It contains a series of
|
||||
/// [`AttributesSubsubsection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsection<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
length: u32,
|
||||
vendor: &'data [u8],
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSubsection<'data, Elf> {
|
||||
/// Return the length of the attributes subsection.
|
||||
pub fn length(&self) -> u32 {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Return the vendor name of the attributes subsection.
|
||||
pub fn vendor(&self) -> &'data [u8] {
|
||||
self.vendor
|
||||
}
|
||||
|
||||
/// Return an iterator over the sub-subsections.
|
||||
pub fn subsubsections(&self) -> AttributesSubsubsectionIterator<'data, Elf> {
|
||||
AttributesSubsubsectionIterator {
|
||||
endian: self.endian,
|
||||
data: self.data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sub-subsections in an [`AttributesSubsection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsubsectionIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> AttributesSubsubsectionIterator<'data, Elf> {
|
||||
/// Return the next sub-subsection.
|
||||
pub fn next(&mut self) -> Result<Option<AttributesSubsubsection<'data>>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let result = self.parse();
|
||||
if result.is_err() {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn parse(&mut self) -> Result<Option<AttributesSubsubsection<'data>>> {
|
||||
// The format of a sub-section looks like this:
|
||||
//
|
||||
// <file-tag> <size> <attribute>*
|
||||
// | <section-tag> <size> <section-number>* 0 <attribute>*
|
||||
// | <symbol-tag> <size> <symbol-number>* 0 <attribute>*
|
||||
let mut data = self.data;
|
||||
let tag = *data
|
||||
.read::<u8>()
|
||||
.read_error("ELF attributes subsection is too short")?;
|
||||
let length = data
|
||||
.read::<endian::U32Bytes<Elf::Endian>>()
|
||||
.read_error("ELF attributes subsection is too short")?
|
||||
.get(self.endian);
|
||||
|
||||
// Now read the entire sub-subsection, updating self.data.
|
||||
let mut data = self
|
||||
.data
|
||||
.read_bytes(length as usize)
|
||||
.read_error("Invalid ELF attributes sub-subsection length")?;
|
||||
// Skip the tag and sub-subsection size field.
|
||||
data.skip(1 + 4)
|
||||
.read_error("Invalid ELF attributes sub-subsection length")?;
|
||||
|
||||
let indices = if tag == elf::Tag_Section || tag == elf::Tag_Symbol {
|
||||
data.read_string()
|
||||
.map(Bytes)
|
||||
.read_error("Missing ELF attributes sub-subsection indices")?
|
||||
} else if tag == elf::Tag_File {
|
||||
Bytes(&[])
|
||||
} else {
|
||||
return Err(Error("Unimplemented ELF attributes sub-subsection tag"));
|
||||
};
|
||||
|
||||
Ok(Some(AttributesSubsubsection {
|
||||
tag,
|
||||
length,
|
||||
indices,
|
||||
data,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// A sub-subsection in an [`AttributesSubsection`].
|
||||
///
|
||||
/// A sub-subsection is identified by a tag. It contains an optional series of indices,
|
||||
/// followed by a series of attributes.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributesSubsubsection<'data> {
|
||||
tag: u8,
|
||||
length: u32,
|
||||
indices: Bytes<'data>,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> AttributesSubsubsection<'data> {
|
||||
/// Return the tag of the attributes sub-subsection.
|
||||
pub fn tag(&self) -> u8 {
|
||||
self.tag
|
||||
}
|
||||
|
||||
/// Return the length of the attributes sub-subsection.
|
||||
pub fn length(&self) -> u32 {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Return the data containing the indices.
|
||||
pub fn indices_data(&self) -> &'data [u8] {
|
||||
self.indices.0
|
||||
}
|
||||
|
||||
/// Return the indices.
|
||||
///
|
||||
/// This will be section indices if the tag is `Tag_Section`,
|
||||
/// or symbol indices if the tag is `Tag_Symbol`,
|
||||
/// and otherwise it will be empty.
|
||||
pub fn indices(&self) -> AttributeIndexIterator<'data> {
|
||||
AttributeIndexIterator { data: self.indices }
|
||||
}
|
||||
|
||||
/// Return the data containing the attributes.
|
||||
pub fn attributes_data(&self) -> &'data [u8] {
|
||||
self.data.0
|
||||
}
|
||||
|
||||
/// Return a parser for the data containing the attributes.
|
||||
pub fn attributes(&self) -> AttributeReader<'data> {
|
||||
AttributeReader { data: self.data }
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the indices in an [`AttributesSubsubsection`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributeIndexIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> AttributeIndexIterator<'data> {
|
||||
/// Parse the next index.
|
||||
pub fn next(&mut self) -> Result<Option<u32>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let err = "Invalid ELF attribute index";
|
||||
self.data
|
||||
.read_uleb128()
|
||||
.read_error(err)?
|
||||
.try_into()
|
||||
.map_err(|_| ())
|
||||
.read_error(err)
|
||||
.map(Some)
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser for the attributes in an [`AttributesSubsubsection`].
|
||||
///
|
||||
/// The parser relies on the caller to know the format of the data for each attribute tag.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttributeReader<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> AttributeReader<'data> {
|
||||
/// Parse a tag.
|
||||
pub fn read_tag(&mut self) -> Result<Option<u64>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let err = "Invalid ELF attribute tag";
|
||||
self.data.read_uleb128().read_error(err).map(Some)
|
||||
}
|
||||
|
||||
/// Parse an integer value.
|
||||
pub fn read_integer(&mut self) -> Result<u64> {
|
||||
let err = "Invalid ELF attribute integer value";
|
||||
self.data.read_uleb128().read_error(err)
|
||||
}
|
||||
|
||||
/// Parse a string value.
|
||||
pub fn read_string(&mut self) -> Result<&'data [u8]> {
|
||||
let err = "Invalid ELF attribute string value";
|
||||
self.data.read_string().read_error(err)
|
||||
}
|
||||
}
|
||||
162
vendor/object/src/read/elf/comdat.rs
vendored
Normal file
162
vendor/object/src/read/elf/comdat.rs
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{iter, slice, str};
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{Endianness, U32Bytes};
|
||||
use crate::read::{self, ComdatKind, ObjectComdat, ReadError, ReadRef, SectionIndex, SymbolIndex};
|
||||
|
||||
use super::{ElfFile, FileHeader, SectionHeader, Sym};
|
||||
|
||||
/// An iterator for the COMDAT section groups in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfComdatIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the COMDAT section groups in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfComdatIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in an [`ElfFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct ElfComdatIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, Elf::SectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfComdatIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = ElfComdat<'data, 'file, Elf, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
for (_index, section) in self.iter.by_ref() {
|
||||
if let Some(comdat) = ElfComdat::parse(self.file, section) {
|
||||
return Some(comdat);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfComdat32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdat<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A COMDAT section group in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfComdat64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdat<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A COMDAT section group in an [`ElfFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectComdat`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct ElfComdat<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file ElfFile<'data, Elf, R>,
|
||||
section: &'data Elf::SectionHeader,
|
||||
sections: &'data [U32Bytes<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> ElfComdat<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn parse(
|
||||
file: &'file ElfFile<'data, Elf, R>,
|
||||
section: &'data Elf::SectionHeader,
|
||||
) -> Option<ElfComdat<'data, 'file, Elf, R>> {
|
||||
let (flag, sections) = section.group(file.endian, file.data).ok()??;
|
||||
if flag != elf::GRP_COMDAT {
|
||||
return None;
|
||||
}
|
||||
Some(ElfComdat {
|
||||
file,
|
||||
section,
|
||||
sections,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> read::private::Sealed for ElfComdat<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> ObjectComdat<'data> for ElfComdat<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type SectionIterator = ElfComdatSectionIterator<'data, 'file, Elf, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
ComdatKind::Any
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
SymbolIndex(self.section.sh_info(self.file.endian) as usize)
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&[u8]> {
|
||||
// FIXME: check sh_link
|
||||
let index = self.section.sh_info(self.file.endian) as usize;
|
||||
let symbol = self.file.symbols.symbol(index)?;
|
||||
symbol.name(self.file.endian, self.file.symbols.strings())
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 ELF COMDAT name")
|
||||
}
|
||||
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
ElfComdatSectionIterator {
|
||||
file: self.file,
|
||||
sections: self.sections.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfComdatSectionIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatSectionIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the sections in a COMDAT section group in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfComdatSectionIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfComdatSectionIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in an [`ElfFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct ElfComdatSectionIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file ElfFile<'data, Elf, R>,
|
||||
sections: slice::Iter<'data, U32Bytes<Elf::Endian>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfComdatSectionIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.sections.next()?;
|
||||
Some(SectionIndex(index.get(self.file.endian) as usize))
|
||||
}
|
||||
}
|
||||
56
vendor/object/src/read/elf/compression.rs
vendored
Normal file
56
vendor/object/src/read/elf/compression.rs
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian;
|
||||
use crate::pod::Pod;
|
||||
|
||||
/// A trait for generic access to [`elf::CompressionHeader32`] and [`elf::CompressionHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait CompressionHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn ch_type(&self, endian: Self::Endian) -> u32;
|
||||
fn ch_size(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn ch_addralign(&self, endian: Self::Endian) -> Self::Word;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> CompressionHeader for elf::CompressionHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn ch_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.ch_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_size.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_addralign(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_addralign.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> CompressionHeader for elf::CompressionHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn ch_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.ch_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_size.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ch_addralign(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.ch_addralign.get(endian)
|
||||
}
|
||||
}
|
||||
117
vendor/object/src/read/elf/dynamic.rs
vendored
Normal file
117
vendor/object/src/read/elf/dynamic.rs
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{ReadError, Result, StringTable};
|
||||
|
||||
/// A trait for generic access to [`elf::Dyn32`] and [`elf::Dyn64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Dyn: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn d_tag(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn d_val(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
/// Try to convert the tag to a `u32`.
|
||||
fn tag32(&self, endian: Self::Endian) -> Option<u32> {
|
||||
self.d_tag(endian).into().try_into().ok()
|
||||
}
|
||||
|
||||
/// Try to convert the value to a `u32`.
|
||||
fn val32(&self, endian: Self::Endian) -> Option<u32> {
|
||||
self.d_val(endian).into().try_into().ok()
|
||||
}
|
||||
|
||||
/// Return true if the value is an offset in the dynamic string table.
|
||||
fn is_string(&self, endian: Self::Endian) -> bool {
|
||||
if let Some(tag) = self.tag32(endian) {
|
||||
match tag {
|
||||
elf::DT_NEEDED
|
||||
| elf::DT_SONAME
|
||||
| elf::DT_RPATH
|
||||
| elf::DT_RUNPATH
|
||||
| elf::DT_AUXILIARY
|
||||
| elf::DT_FILTER => true,
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the value to get a string in a string table.
|
||||
///
|
||||
/// Does not check for an appropriate tag.
|
||||
fn string<'data>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
strings: StringTable<'data>,
|
||||
) -> Result<&'data [u8]> {
|
||||
self.val32(endian)
|
||||
.and_then(|val| strings.get(val).ok())
|
||||
.read_error("Invalid ELF dyn string")
|
||||
}
|
||||
|
||||
/// Return true if the value is an address.
|
||||
fn is_address(&self, endian: Self::Endian) -> bool {
|
||||
if let Some(tag) = self.tag32(endian) {
|
||||
match tag {
|
||||
elf::DT_PLTGOT
|
||||
| elf::DT_HASH
|
||||
| elf::DT_STRTAB
|
||||
| elf::DT_SYMTAB
|
||||
| elf::DT_RELA
|
||||
| elf::DT_INIT
|
||||
| elf::DT_FINI
|
||||
| elf::DT_SYMBOLIC
|
||||
| elf::DT_REL
|
||||
| elf::DT_DEBUG
|
||||
| elf::DT_JMPREL
|
||||
| elf::DT_FINI_ARRAY
|
||||
| elf::DT_INIT_ARRAY
|
||||
| elf::DT_PREINIT_ARRAY
|
||||
| elf::DT_SYMTAB_SHNDX
|
||||
| elf::DT_VERDEF
|
||||
| elf::DT_VERNEED
|
||||
| elf::DT_VERSYM
|
||||
| elf::DT_ADDRRNGLO..=elf::DT_ADDRRNGHI => true,
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Dyn for elf::Dyn32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn d_tag(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_tag.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn d_val(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_val.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Dyn for elf::Dyn64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn d_tag(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_tag.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn d_val(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.d_val.get(endian)
|
||||
}
|
||||
}
|
||||
916
vendor/object/src/read/elf/file.rs
vendored
Normal file
916
vendor/object/src/read/elf/file.rs
vendored
Normal file
@@ -0,0 +1,916 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use crate::read::{
|
||||
self, util, Architecture, ByteString, Bytes, Error, Export, FileFlags, Import, Object,
|
||||
ObjectKind, ReadError, ReadRef, SectionIndex, StringTable, SymbolIndex,
|
||||
};
|
||||
use crate::{elf, endian, Endian, Endianness, Pod, U32};
|
||||
|
||||
use super::{
|
||||
CompressionHeader, Dyn, ElfComdat, ElfComdatIterator, ElfDynamicRelocationIterator, ElfSection,
|
||||
ElfSectionIterator, ElfSegment, ElfSegmentIterator, ElfSymbol, ElfSymbolIterator,
|
||||
ElfSymbolTable, NoteHeader, ProgramHeader, Rel, Rela, RelocationSections, SectionHeader,
|
||||
SectionTable, Sym, SymbolTable,
|
||||
};
|
||||
|
||||
/// A 32-bit ELF object file.
|
||||
///
|
||||
/// This is a file that starts with [`elf::FileHeader32`], and corresponds
|
||||
/// to [`crate::FileKind::Elf32`].
|
||||
pub type ElfFile32<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfFile<'data, elf::FileHeader32<Endian>, R>;
|
||||
/// A 64-bit ELF object file.
|
||||
///
|
||||
/// This is a file that starts with [`elf::FileHeader64`], and corresponds
|
||||
/// to [`crate::FileKind::Elf64`].
|
||||
pub type ElfFile64<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfFile<'data, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A partially parsed ELF file.
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct ElfFile<'data, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) data: R,
|
||||
pub(super) header: &'data Elf,
|
||||
pub(super) segments: &'data [Elf::ProgramHeader],
|
||||
pub(super) sections: SectionTable<'data, Elf, R>,
|
||||
pub(super) relocations: RelocationSections,
|
||||
pub(super) symbols: SymbolTable<'data, Elf, R>,
|
||||
pub(super) dynamic_symbols: SymbolTable<'data, Elf, R>,
|
||||
}
|
||||
|
||||
impl<'data, Elf, R> ElfFile<'data, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw ELF file data.
|
||||
pub fn parse(data: R) -> read::Result<Self> {
|
||||
let header = Elf::parse(data)?;
|
||||
let endian = header.endian()?;
|
||||
let segments = header.program_headers(endian, data)?;
|
||||
let sections = header.sections(endian, data)?;
|
||||
let symbols = sections.symbols(endian, data, elf::SHT_SYMTAB)?;
|
||||
// TODO: get dynamic symbols from DT_SYMTAB if there are no sections
|
||||
let dynamic_symbols = sections.symbols(endian, data, elf::SHT_DYNSYM)?;
|
||||
// The API we provide requires a mapping from section to relocations, so build it now.
|
||||
let relocations = sections.relocation_sections(endian, symbols.section())?;
|
||||
|
||||
Ok(ElfFile {
|
||||
endian,
|
||||
data,
|
||||
header,
|
||||
segments,
|
||||
sections,
|
||||
relocations,
|
||||
symbols,
|
||||
dynamic_symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the endianness.
|
||||
pub fn endian(&self) -> Elf::Endian {
|
||||
self.endian
|
||||
}
|
||||
|
||||
/// Returns the raw data.
|
||||
pub fn data(&self) -> R {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// Returns the raw ELF file header.
|
||||
pub fn raw_header(&self) -> &'data Elf {
|
||||
self.header
|
||||
}
|
||||
|
||||
/// Returns the raw ELF segments.
|
||||
pub fn raw_segments(&self) -> &'data [Elf::ProgramHeader] {
|
||||
self.segments
|
||||
}
|
||||
|
||||
fn raw_section_by_name<'file>(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
self.sections
|
||||
.section_by_name(self.endian, section_name)
|
||||
.map(|(index, section)| ElfSection {
|
||||
file: self,
|
||||
index: SectionIndex(index),
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "compression")]
|
||||
fn zdebug_section_by_name<'file>(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
if !section_name.starts_with(b".debug_") {
|
||||
return None;
|
||||
}
|
||||
let mut name = Vec::with_capacity(section_name.len() + 1);
|
||||
name.extend_from_slice(b".zdebug_");
|
||||
name.extend_from_slice(§ion_name[7..]);
|
||||
self.raw_section_by_name(&name)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "compression"))]
|
||||
fn zdebug_section_by_name<'file>(
|
||||
&'file self,
|
||||
_section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf, R> read::private::Sealed for ElfFile<'data, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Object<'data, 'file> for ElfFile<'data, Elf, R>
|
||||
where
|
||||
'data: 'file,
|
||||
Elf: FileHeader,
|
||||
R: 'file + ReadRef<'data>,
|
||||
{
|
||||
type Segment = ElfSegment<'data, 'file, Elf, R>;
|
||||
type SegmentIterator = ElfSegmentIterator<'data, 'file, Elf, R>;
|
||||
type Section = ElfSection<'data, 'file, Elf, R>;
|
||||
type SectionIterator = ElfSectionIterator<'data, 'file, Elf, R>;
|
||||
type Comdat = ElfComdat<'data, 'file, Elf, R>;
|
||||
type ComdatIterator = ElfComdatIterator<'data, 'file, Elf, R>;
|
||||
type Symbol = ElfSymbol<'data, 'file, Elf, R>;
|
||||
type SymbolIterator = ElfSymbolIterator<'data, 'file, Elf, R>;
|
||||
type SymbolTable = ElfSymbolTable<'data, 'file, Elf, R>;
|
||||
type DynamicRelocationIterator = ElfDynamicRelocationIterator<'data, 'file, Elf, R>;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match (
|
||||
self.header.e_machine(self.endian),
|
||||
self.header.is_class_64(),
|
||||
) {
|
||||
(elf::EM_AARCH64, true) => Architecture::Aarch64,
|
||||
(elf::EM_AARCH64, false) => Architecture::Aarch64_Ilp32,
|
||||
(elf::EM_ARM, _) => Architecture::Arm,
|
||||
(elf::EM_AVR, _) => Architecture::Avr,
|
||||
(elf::EM_BPF, _) => Architecture::Bpf,
|
||||
(elf::EM_CSKY, _) => Architecture::Csky,
|
||||
(elf::EM_386, _) => Architecture::I386,
|
||||
(elf::EM_X86_64, false) => Architecture::X86_64_X32,
|
||||
(elf::EM_X86_64, true) => Architecture::X86_64,
|
||||
(elf::EM_HEXAGON, _) => Architecture::Hexagon,
|
||||
(elf::EM_LOONGARCH, true) => Architecture::LoongArch64,
|
||||
(elf::EM_MIPS, false) => Architecture::Mips,
|
||||
(elf::EM_MIPS, true) => Architecture::Mips64,
|
||||
(elf::EM_MSP430, _) => Architecture::Msp430,
|
||||
(elf::EM_PPC, _) => Architecture::PowerPc,
|
||||
(elf::EM_PPC64, _) => Architecture::PowerPc64,
|
||||
(elf::EM_RISCV, false) => Architecture::Riscv32,
|
||||
(elf::EM_RISCV, true) => Architecture::Riscv64,
|
||||
// This is either s390 or s390x, depending on the ELF class.
|
||||
// We only support the 64-bit variant s390x here.
|
||||
(elf::EM_S390, true) => Architecture::S390x,
|
||||
(elf::EM_SBF, _) => Architecture::Sbf,
|
||||
(elf::EM_SHARC, false) => Architecture::Sharc,
|
||||
(elf::EM_SPARCV9, true) => Architecture::Sparc64,
|
||||
(elf::EM_XTENSA, false) => Architecture::Xtensa,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.header.is_little_endian()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
self.header.is_class_64()
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
match self.header.e_type(self.endian) {
|
||||
elf::ET_REL => ObjectKind::Relocatable,
|
||||
elf::ET_EXEC => ObjectKind::Executable,
|
||||
// TODO: check for `DF_1_PIE`?
|
||||
elf::ET_DYN => ObjectKind::Dynamic,
|
||||
elf::ET_CORE => ObjectKind::Core,
|
||||
_ => ObjectKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> ElfSegmentIterator<'data, 'file, Elf, R> {
|
||||
ElfSegmentIterator {
|
||||
file: self,
|
||||
iter: self.segments.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<ElfSection<'data, 'file, Elf, R>> {
|
||||
self.raw_section_by_name(section_name)
|
||||
.or_else(|| self.zdebug_section_by_name(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> read::Result<ElfSection<'data, 'file, Elf, R>> {
|
||||
let section = self.sections.section(index)?;
|
||||
Ok(ElfSection {
|
||||
file: self,
|
||||
index,
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> ElfSectionIterator<'data, 'file, Elf, R> {
|
||||
ElfSectionIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> ElfComdatIterator<'data, 'file, Elf, R> {
|
||||
ElfComdatIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> read::Result<ElfSymbol<'data, 'file, Elf, R>> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(ElfSymbol {
|
||||
endian: self.endian,
|
||||
symbols: &self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> ElfSymbolIterator<'data, 'file, Elf, R> {
|
||||
ElfSymbolIterator {
|
||||
endian: self.endian,
|
||||
symbols: &self.symbols,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_table(&'file self) -> Option<ElfSymbolTable<'data, 'file, Elf, R>> {
|
||||
if self.symbols.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(ElfSymbolTable {
|
||||
endian: self.endian,
|
||||
symbols: &self.symbols,
|
||||
})
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> ElfSymbolIterator<'data, 'file, Elf, R> {
|
||||
ElfSymbolIterator {
|
||||
endian: self.endian,
|
||||
symbols: &self.dynamic_symbols,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn dynamic_symbol_table(&'file self) -> Option<ElfSymbolTable<'data, 'file, Elf, R>> {
|
||||
if self.dynamic_symbols.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(ElfSymbolTable {
|
||||
endian: self.endian,
|
||||
symbols: &self.dynamic_symbols,
|
||||
})
|
||||
}
|
||||
|
||||
fn dynamic_relocations(
|
||||
&'file self,
|
||||
) -> Option<ElfDynamicRelocationIterator<'data, 'file, Elf, R>> {
|
||||
Some(ElfDynamicRelocationIterator {
|
||||
section_index: SectionIndex(1),
|
||||
file: self,
|
||||
relocations: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn imports(&self) -> read::Result<Vec<Import<'data>>> {
|
||||
let mut imports = Vec::new();
|
||||
for symbol in self.dynamic_symbols.iter() {
|
||||
if symbol.is_undefined(self.endian) {
|
||||
let name = symbol.name(self.endian, self.dynamic_symbols.strings())?;
|
||||
if !name.is_empty() {
|
||||
// TODO: use symbol versioning to determine library
|
||||
imports.push(Import {
|
||||
name: ByteString(name),
|
||||
library: ByteString(&[]),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
fn exports(&self) -> read::Result<Vec<Export<'data>>> {
|
||||
let mut exports = Vec::new();
|
||||
for symbol in self.dynamic_symbols.iter() {
|
||||
if symbol.is_definition(self.endian) {
|
||||
let name = symbol.name(self.endian, self.dynamic_symbols.strings())?;
|
||||
let address = symbol.st_value(self.endian).into();
|
||||
exports.push(Export {
|
||||
name: ByteString(name),
|
||||
address,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(exports)
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
for section in self.sections.iter() {
|
||||
if let Ok(name) = self.sections.section_name(self.endian, section) {
|
||||
if name == b".debug_info" || name == b".zdebug_info" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn build_id(&self) -> read::Result<Option<&'data [u8]>> {
|
||||
let endian = self.endian;
|
||||
// Use section headers if present, otherwise use program headers.
|
||||
if !self.sections.is_empty() {
|
||||
for section in self.sections.iter() {
|
||||
if let Some(mut notes) = section.notes(endian, self.data)? {
|
||||
while let Some(note) = notes.next()? {
|
||||
if note.name() == elf::ELF_NOTE_GNU
|
||||
&& note.n_type(endian) == elf::NT_GNU_BUILD_ID
|
||||
{
|
||||
return Ok(Some(note.desc()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for segment in self.segments {
|
||||
if let Some(mut notes) = segment.notes(endian, self.data)? {
|
||||
while let Some(note) = notes.next()? {
|
||||
if note.name() == elf::ELF_NOTE_GNU
|
||||
&& note.n_type(endian) == elf::NT_GNU_BUILD_ID
|
||||
{
|
||||
return Ok(Some(note.desc()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn gnu_debuglink(&self) -> read::Result<Option<(&'data [u8], u32)>> {
|
||||
let section = match self.raw_section_by_name(b".gnu_debuglink") {
|
||||
Some(section) => section,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let data = section
|
||||
.section
|
||||
.data(self.endian, self.data)
|
||||
.read_error("Invalid ELF .gnu_debuglink section offset or size")
|
||||
.map(Bytes)?;
|
||||
let filename = data
|
||||
.read_string_at(0)
|
||||
.read_error("Missing ELF .gnu_debuglink filename")?;
|
||||
let crc_offset = util::align(filename.len() + 1, 4);
|
||||
let crc = data
|
||||
.read_at::<U32<_>>(crc_offset)
|
||||
.read_error("Missing ELF .gnu_debuglink crc")?
|
||||
.get(self.endian);
|
||||
Ok(Some((filename, crc)))
|
||||
}
|
||||
|
||||
fn gnu_debugaltlink(&self) -> read::Result<Option<(&'data [u8], &'data [u8])>> {
|
||||
let section = match self.raw_section_by_name(b".gnu_debugaltlink") {
|
||||
Some(section) => section,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let mut data = section
|
||||
.section
|
||||
.data(self.endian, self.data)
|
||||
.read_error("Invalid ELF .gnu_debugaltlink section offset or size")
|
||||
.map(Bytes)?;
|
||||
let filename = data
|
||||
.read_string()
|
||||
.read_error("Missing ELF .gnu_debugaltlink filename")?;
|
||||
let build_id = data.0;
|
||||
Ok(Some((filename, build_id)))
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn entry(&self) -> u64 {
|
||||
self.header.e_entry(self.endian).into()
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::Elf {
|
||||
os_abi: self.header.e_ident().os_abi,
|
||||
abi_version: self.header.e_ident().abi_version,
|
||||
e_flags: self.header.e_flags(self.endian),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::FileHeader32`] and [`elf::FileHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FileHeader: Debug + Pod {
|
||||
// Ideally this would be a `u64: From<Word>`, but can't express that.
|
||||
type Word: Into<u64>;
|
||||
type Sword: Into<i64>;
|
||||
type Endian: endian::Endian;
|
||||
type ProgramHeader: ProgramHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;
|
||||
type SectionHeader: SectionHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;
|
||||
type CompressionHeader: CompressionHeader<Endian = Self::Endian, Word = Self::Word>;
|
||||
type NoteHeader: NoteHeader<Endian = Self::Endian>;
|
||||
type Dyn: Dyn<Endian = Self::Endian, Word = Self::Word>;
|
||||
type Sym: Sym<Endian = Self::Endian, Word = Self::Word>;
|
||||
type Rel: Rel<Endian = Self::Endian, Word = Self::Word>;
|
||||
type Rela: Rela<Endian = Self::Endian, Word = Self::Word> + From<Self::Rel>;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
fn is_type_64(&self) -> bool;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
///
|
||||
/// This is the same as [`Self::is_type_64`], but is non-dispatchable.
|
||||
fn is_type_64_sized() -> bool
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn e_ident(&self) -> &elf::Ident;
|
||||
fn e_type(&self, endian: Self::Endian) -> u16;
|
||||
fn e_machine(&self, endian: Self::Endian) -> u16;
|
||||
fn e_version(&self, endian: Self::Endian) -> u32;
|
||||
fn e_entry(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn e_phoff(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn e_shoff(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn e_flags(&self, endian: Self::Endian) -> u32;
|
||||
fn e_ehsize(&self, endian: Self::Endian) -> u16;
|
||||
fn e_phentsize(&self, endian: Self::Endian) -> u16;
|
||||
fn e_phnum(&self, endian: Self::Endian) -> u16;
|
||||
fn e_shentsize(&self, endian: Self::Endian) -> u16;
|
||||
fn e_shnum(&self, endian: Self::Endian) -> u16;
|
||||
fn e_shstrndx(&self, endian: Self::Endian) -> u16;
|
||||
|
||||
// Provided methods.
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// Also checks that the ident field in the file header is a supported format.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read_at::<Self>(0)
|
||||
.read_error("Invalid ELF header size or alignment")?;
|
||||
if !header.is_supported() {
|
||||
return Err(Error("Unsupported ELF header"));
|
||||
}
|
||||
// TODO: Check self.e_ehsize?
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
/// Check that the ident field in the file header is a supported format.
|
||||
///
|
||||
/// This checks the magic number, version, class, and endianness.
|
||||
fn is_supported(&self) -> bool {
|
||||
let ident = self.e_ident();
|
||||
// TODO: Check self.e_version too? Requires endian though.
|
||||
ident.magic == elf::ELFMAG
|
||||
&& (self.is_type_64() || self.is_class_32())
|
||||
&& (!self.is_type_64() || self.is_class_64())
|
||||
&& (self.is_little_endian() || self.is_big_endian())
|
||||
&& ident.version == elf::EV_CURRENT
|
||||
}
|
||||
|
||||
fn is_class_32(&self) -> bool {
|
||||
self.e_ident().class == elf::ELFCLASS32
|
||||
}
|
||||
|
||||
fn is_class_64(&self) -> bool {
|
||||
self.e_ident().class == elf::ELFCLASS64
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.e_ident().data == elf::ELFDATA2LSB
|
||||
}
|
||||
|
||||
fn is_big_endian(&self) -> bool {
|
||||
self.e_ident().data == elf::ELFDATA2MSB
|
||||
}
|
||||
|
||||
fn endian(&self) -> read::Result<Self::Endian> {
|
||||
Self::Endian::from_big_endian(self.is_big_endian()).read_error("Unsupported ELF endian")
|
||||
}
|
||||
|
||||
/// Return the first section header, if present.
|
||||
///
|
||||
/// Section 0 is a special case because getting the section headers normally
|
||||
/// requires `shnum`, but `shnum` may be in the first section header.
|
||||
fn section_0<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<Option<&'data Self::SectionHeader>> {
|
||||
let shoff: u64 = self.e_shoff(endian).into();
|
||||
if shoff == 0 {
|
||||
// No section headers is ok.
|
||||
return Ok(None);
|
||||
}
|
||||
let shentsize = usize::from(self.e_shentsize(endian));
|
||||
if shentsize != mem::size_of::<Self::SectionHeader>() {
|
||||
// Section header size must match.
|
||||
return Err(Error("Invalid ELF section header entry size"));
|
||||
}
|
||||
data.read_at(shoff)
|
||||
.map(Some)
|
||||
.read_error("Invalid ELF section header offset or size")
|
||||
}
|
||||
|
||||
/// Return the `e_phnum` field of the header. Handles extended values.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn phnum<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<usize> {
|
||||
let e_phnum = self.e_phnum(endian);
|
||||
if e_phnum < elf::PN_XNUM {
|
||||
Ok(e_phnum as usize)
|
||||
} else if let Some(section_0) = self.section_0(endian, data)? {
|
||||
Ok(section_0.sh_info(endian) as usize)
|
||||
} else {
|
||||
// Section 0 must exist if e_phnum overflows.
|
||||
Err(Error("Missing ELF section headers for e_phnum overflow"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `e_shnum` field of the header. Handles extended values.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn shnum<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<usize> {
|
||||
let e_shnum = self.e_shnum(endian);
|
||||
if e_shnum > 0 {
|
||||
Ok(e_shnum as usize)
|
||||
} else if let Some(section_0) = self.section_0(endian, data)? {
|
||||
section_0
|
||||
.sh_size(endian)
|
||||
.into()
|
||||
.try_into()
|
||||
.ok()
|
||||
.read_error("Invalid ELF extended e_shnum")
|
||||
} else {
|
||||
// No section headers is ok.
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `e_shstrndx` field of the header. Handles extended values.
|
||||
///
|
||||
/// Returns `Err` for invalid values (including if the index is 0).
|
||||
fn shstrndx<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<u32> {
|
||||
let e_shstrndx = self.e_shstrndx(endian);
|
||||
let index = if e_shstrndx != elf::SHN_XINDEX {
|
||||
e_shstrndx.into()
|
||||
} else if let Some(section_0) = self.section_0(endian, data)? {
|
||||
section_0.sh_link(endian)
|
||||
} else {
|
||||
// Section 0 must exist if we're trying to read e_shstrndx.
|
||||
return Err(Error("Missing ELF section headers for e_shstrndx overflow"));
|
||||
};
|
||||
if index == 0 {
|
||||
return Err(Error("Missing ELF e_shstrndx"));
|
||||
}
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
/// Return the slice of program headers.
|
||||
///
|
||||
/// Returns `Ok(&[])` if there are no program headers.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn program_headers<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<&'data [Self::ProgramHeader]> {
|
||||
let phoff: u64 = self.e_phoff(endian).into();
|
||||
if phoff == 0 {
|
||||
// No program headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let phnum = self.phnum(endian, data)?;
|
||||
if phnum == 0 {
|
||||
// No program headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let phentsize = self.e_phentsize(endian) as usize;
|
||||
if phentsize != mem::size_of::<Self::ProgramHeader>() {
|
||||
// Program header size must match.
|
||||
return Err(Error("Invalid ELF program header entry size"));
|
||||
}
|
||||
data.read_slice_at(phoff, phnum)
|
||||
.read_error("Invalid ELF program header size or alignment")
|
||||
}
|
||||
|
||||
/// Return the slice of section headers.
|
||||
///
|
||||
/// Returns `Ok(&[])` if there are no section headers.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn section_headers<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<&'data [Self::SectionHeader]> {
|
||||
let shoff: u64 = self.e_shoff(endian).into();
|
||||
if shoff == 0 {
|
||||
// No section headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let shnum = self.shnum(endian, data)?;
|
||||
if shnum == 0 {
|
||||
// No section headers is ok.
|
||||
return Ok(&[]);
|
||||
}
|
||||
let shentsize = usize::from(self.e_shentsize(endian));
|
||||
if shentsize != mem::size_of::<Self::SectionHeader>() {
|
||||
// Section header size must match.
|
||||
return Err(Error("Invalid ELF section header entry size"));
|
||||
}
|
||||
data.read_slice_at(shoff, shnum)
|
||||
.read_error("Invalid ELF section header offset/size/alignment")
|
||||
}
|
||||
|
||||
/// Return the string table for the section headers.
|
||||
fn section_strings<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
sections: &[Self::SectionHeader],
|
||||
) -> read::Result<StringTable<'data, R>> {
|
||||
if sections.is_empty() {
|
||||
return Ok(StringTable::default());
|
||||
}
|
||||
let index = self.shstrndx(endian, data)? as usize;
|
||||
let shstrtab = sections.get(index).read_error("Invalid ELF e_shstrndx")?;
|
||||
let strings = if let Some((shstrtab_offset, shstrtab_size)) = shstrtab.file_range(endian) {
|
||||
let shstrtab_end = shstrtab_offset
|
||||
.checked_add(shstrtab_size)
|
||||
.read_error("Invalid ELF shstrtab size")?;
|
||||
StringTable::new(data, shstrtab_offset, shstrtab_end)
|
||||
} else {
|
||||
StringTable::default()
|
||||
};
|
||||
Ok(strings)
|
||||
}
|
||||
|
||||
/// Return the section table.
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<SectionTable<'data, Self, R>> {
|
||||
let sections = self.section_headers(endian, data)?;
|
||||
let strings = self.section_strings(endian, data, sections)?;
|
||||
Ok(SectionTable::new(sections, strings))
|
||||
}
|
||||
|
||||
/// Returns whether this is a mips64el elf file.
|
||||
fn is_mips64el(&self, endian: Self::Endian) -> bool {
|
||||
self.is_class_64() && self.is_little_endian() && self.e_machine(endian) == elf::EM_MIPS
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> FileHeader for elf::FileHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Sword = i32;
|
||||
type Endian = Endian;
|
||||
type ProgramHeader = elf::ProgramHeader32<Endian>;
|
||||
type SectionHeader = elf::SectionHeader32<Endian>;
|
||||
type CompressionHeader = elf::CompressionHeader32<Endian>;
|
||||
type NoteHeader = elf::NoteHeader32<Endian>;
|
||||
type Dyn = elf::Dyn32<Endian>;
|
||||
type Sym = elf::Sym32<Endian>;
|
||||
type Rel = elf::Rel32<Endian>;
|
||||
type Rela = elf::Rela32<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn is_type_64(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_type_64_sized() -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ident(&self) -> &elf::Ident {
|
||||
&self.e_ident
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_type(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_machine(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_machine.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_version(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_version.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_entry(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_entry.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_phoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_shoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ehsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_ehsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shstrndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shstrndx.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> FileHeader for elf::FileHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Sword = i64;
|
||||
type Endian = Endian;
|
||||
type ProgramHeader = elf::ProgramHeader64<Endian>;
|
||||
type SectionHeader = elf::SectionHeader64<Endian>;
|
||||
type CompressionHeader = elf::CompressionHeader64<Endian>;
|
||||
type NoteHeader = elf::NoteHeader32<Endian>;
|
||||
type Dyn = elf::Dyn64<Endian>;
|
||||
type Sym = elf::Sym64<Endian>;
|
||||
type Rel = elf::Rel64<Endian>;
|
||||
type Rela = elf::Rela64<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn is_type_64(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_type_64_sized() -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ident(&self) -> &elf::Ident {
|
||||
&self.e_ident
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_type(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_machine(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_machine.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_version(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_version.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_entry(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_entry.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_phoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.e_shoff.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.e_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_ehsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_ehsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_phnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_phnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shentsize(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shentsize.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shnum(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shnum.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn e_shstrndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.e_shstrndx.get(endian)
|
||||
}
|
||||
}
|
||||
224
vendor/object/src/read/elf/hash.rs
vendored
Normal file
224
vendor/object/src/read/elf/hash.rs
vendored
Normal file
@@ -0,0 +1,224 @@
|
||||
use core::mem;
|
||||
|
||||
use crate::elf;
|
||||
use crate::read::{ReadError, ReadRef, Result};
|
||||
use crate::{U32, U64};
|
||||
|
||||
use super::{FileHeader, Sym, SymbolTable, Version, VersionTable};
|
||||
|
||||
/// A SysV symbol hash table in an ELF file.
|
||||
///
|
||||
/// Returned by [`SectionHeader::hash`](super::SectionHeader::hash).
|
||||
#[derive(Debug)]
|
||||
pub struct HashTable<'data, Elf: FileHeader> {
|
||||
buckets: &'data [U32<Elf::Endian>],
|
||||
chains: &'data [U32<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> HashTable<'data, Elf> {
|
||||
/// Parse a SysV hash table.
|
||||
///
|
||||
/// `data` should be from an [`elf::SHT_HASH`] section, or from a
|
||||
/// segment pointed to via the [`elf::DT_HASH`] entry.
|
||||
///
|
||||
/// The header is read at offset 0 in the given `data`.
|
||||
pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = data
|
||||
.read::<elf::HashHeader<Elf::Endian>>(&mut offset)
|
||||
.read_error("Invalid hash header")?;
|
||||
let buckets = data
|
||||
.read_slice(&mut offset, header.bucket_count.get(endian) as usize)
|
||||
.read_error("Invalid hash buckets")?;
|
||||
let chains = data
|
||||
.read_slice(&mut offset, header.chain_count.get(endian) as usize)
|
||||
.read_error("Invalid hash chains")?;
|
||||
Ok(HashTable { buckets, chains })
|
||||
}
|
||||
|
||||
/// Return the symbol table length.
|
||||
pub fn symbol_table_length(&self) -> u32 {
|
||||
self.chains.len() as u32
|
||||
}
|
||||
|
||||
/// Use the hash table to find the symbol table entry with the given name, hash and version.
|
||||
pub fn find<R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
name: &[u8],
|
||||
hash: u32,
|
||||
version: Option<&Version<'_>>,
|
||||
symbols: &SymbolTable<'data, Elf, R>,
|
||||
versions: &VersionTable<'data, Elf>,
|
||||
) -> Option<(usize, &'data Elf::Sym)> {
|
||||
// Get the chain start from the bucket for this hash.
|
||||
let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize;
|
||||
// Avoid infinite loop.
|
||||
let mut i = 0;
|
||||
let strings = symbols.strings();
|
||||
while index != 0 && i < self.chains.len() {
|
||||
if let Ok(symbol) = symbols.symbol(index) {
|
||||
if symbol.name(endian, strings) == Ok(name)
|
||||
&& versions.matches(endian, index, version)
|
||||
{
|
||||
return Some((index, symbol));
|
||||
}
|
||||
}
|
||||
index = self.chains.get(index)?.get(endian) as usize;
|
||||
i += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A GNU symbol hash table in an ELF file.
|
||||
///
|
||||
/// Returned by [`SectionHeader::gnu_hash`](super::SectionHeader::gnu_hash).
|
||||
#[derive(Debug)]
|
||||
pub struct GnuHashTable<'data, Elf: FileHeader> {
|
||||
symbol_base: u32,
|
||||
bloom_shift: u32,
|
||||
bloom_filters: &'data [u8],
|
||||
buckets: &'data [U32<Elf::Endian>],
|
||||
values: &'data [U32<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> GnuHashTable<'data, Elf> {
|
||||
/// Parse a GNU hash table.
|
||||
///
|
||||
/// `data` should be from an [`elf::SHT_GNU_HASH`] section, or from a
|
||||
/// segment pointed to via the [`elf::DT_GNU_HASH`] entry.
|
||||
///
|
||||
/// The header is read at offset 0 in the given `data`.
|
||||
///
|
||||
/// The header does not contain a length field, and so all of `data`
|
||||
/// will be used as the hash table values. It does not matter if this
|
||||
/// is longer than needed, and this will often the case when accessing
|
||||
/// the hash table via the [`elf::DT_GNU_HASH`] entry.
|
||||
pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = data
|
||||
.read::<elf::GnuHashHeader<Elf::Endian>>(&mut offset)
|
||||
.read_error("Invalid GNU hash header")?;
|
||||
let bloom_len =
|
||||
u64::from(header.bloom_count.get(endian)) * mem::size_of::<Elf::Word>() as u64;
|
||||
let bloom_filters = data
|
||||
.read_bytes(&mut offset, bloom_len)
|
||||
.read_error("Invalid GNU hash bloom filters")?;
|
||||
let buckets = data
|
||||
.read_slice(&mut offset, header.bucket_count.get(endian) as usize)
|
||||
.read_error("Invalid GNU hash buckets")?;
|
||||
let chain_count = (data.len() - offset as usize) / 4;
|
||||
let values = data
|
||||
.read_slice(&mut offset, chain_count)
|
||||
.read_error("Invalid GNU hash values")?;
|
||||
Ok(GnuHashTable {
|
||||
symbol_base: header.symbol_base.get(endian),
|
||||
bloom_shift: header.bloom_shift.get(endian),
|
||||
bloom_filters,
|
||||
buckets,
|
||||
values,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the symbol table index of the first symbol in the hash table.
|
||||
pub fn symbol_base(&self) -> u32 {
|
||||
self.symbol_base
|
||||
}
|
||||
|
||||
/// Determine the symbol table length by finding the last entry in the hash table.
|
||||
///
|
||||
/// Returns `None` if the hash table is empty or invalid.
|
||||
pub fn symbol_table_length(&self, endian: Elf::Endian) -> Option<u32> {
|
||||
// Ensure we find a non-empty bucket.
|
||||
if self.symbol_base == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the highest chain index in a bucket.
|
||||
let mut max_symbol = 0;
|
||||
for bucket in self.buckets {
|
||||
let bucket = bucket.get(endian);
|
||||
if max_symbol < bucket {
|
||||
max_symbol = bucket;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the end of the chain.
|
||||
for value in self
|
||||
.values
|
||||
.get(max_symbol.checked_sub(self.symbol_base)? as usize..)?
|
||||
{
|
||||
max_symbol += 1;
|
||||
if value.get(endian) & 1 != 0 {
|
||||
return Some(max_symbol);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Use the hash table to find the symbol table entry with the given name, hash, and version.
|
||||
pub fn find<R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
name: &[u8],
|
||||
hash: u32,
|
||||
version: Option<&Version<'_>>,
|
||||
symbols: &SymbolTable<'data, Elf, R>,
|
||||
versions: &VersionTable<'data, Elf>,
|
||||
) -> Option<(usize, &'data Elf::Sym)> {
|
||||
let word_bits = mem::size_of::<Elf::Word>() as u32 * 8;
|
||||
|
||||
// Test against bloom filter.
|
||||
let bloom_count = self.bloom_filters.len() / mem::size_of::<Elf::Word>();
|
||||
let offset =
|
||||
((hash / word_bits) & (bloom_count as u32 - 1)) * mem::size_of::<Elf::Word>() as u32;
|
||||
let filter = if word_bits == 64 {
|
||||
self.bloom_filters
|
||||
.read_at::<U64<Elf::Endian>>(offset.into())
|
||||
.ok()?
|
||||
.get(endian)
|
||||
} else {
|
||||
self.bloom_filters
|
||||
.read_at::<U32<Elf::Endian>>(offset.into())
|
||||
.ok()?
|
||||
.get(endian)
|
||||
.into()
|
||||
};
|
||||
if filter & (1 << (hash % word_bits)) == 0 {
|
||||
return None;
|
||||
}
|
||||
if filter & (1 << ((hash >> self.bloom_shift) % word_bits)) == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the chain start from the bucket for this hash.
|
||||
let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize;
|
||||
if index == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Test symbols in the chain.
|
||||
let strings = symbols.strings();
|
||||
let symbols = symbols.symbols().get(index..)?;
|
||||
let values = self
|
||||
.values
|
||||
.get(index.checked_sub(self.symbol_base as usize)?..)?;
|
||||
for (symbol, value) in symbols.iter().zip(values.iter()) {
|
||||
let value = value.get(endian);
|
||||
if value | 1 == hash | 1 {
|
||||
if symbol.name(endian, strings) == Ok(name)
|
||||
&& versions.matches(endian, index, version)
|
||||
{
|
||||
return Some((index, symbol));
|
||||
}
|
||||
}
|
||||
if value & 1 != 0 {
|
||||
break;
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
78
vendor/object/src/read/elf/mod.rs
vendored
Normal file
78
vendor/object/src/read/elf/mod.rs
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
//! Support for reading ELF files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between 32-bit and 64-bit ELF.
|
||||
//! The primary trait for this is [`FileHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`ElfFile`] implements the [`Object`](crate::read::Object) trait for ELF files.
|
||||
//! [`ElfFile`] is parameterised by [`FileHeader`] to allow reading both 32-bit and
|
||||
//! 64-bit ELF. There are type aliases for these parameters ([`ElfFile32`] and
|
||||
//! [`ElfFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`FileHeader`] trait can be directly used to parse both [`elf::FileHeader32`]
|
||||
//! and [`elf::FileHeader64`].
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::elf;
|
||||
//! use object::read::elf::{FileHeader, Sym};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let elf = elf::FileHeader64::<object::Endianness>::parse(&*data)?;
|
||||
//! let endian = elf.endian()?;
|
||||
//! let sections = elf.sections(endian, &*data)?;
|
||||
//! let symbols = sections.symbols(endian, &*data, elf::SHT_SYMTAB)?;
|
||||
//! for symbol in symbols.iter() {
|
||||
//! let name = symbol.name(endian, symbols.strings())?;
|
||||
//! println!("{}", String::from_utf8_lossy(name));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::elf;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod segment;
|
||||
pub use segment::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod comdat;
|
||||
pub use comdat::*;
|
||||
|
||||
mod dynamic;
|
||||
pub use dynamic::*;
|
||||
|
||||
mod compression;
|
||||
pub use compression::*;
|
||||
|
||||
mod note;
|
||||
pub use note::*;
|
||||
|
||||
mod hash;
|
||||
pub use hash::*;
|
||||
|
||||
mod version;
|
||||
pub use version::*;
|
||||
|
||||
mod attributes;
|
||||
pub use attributes::*;
|
||||
271
vendor/object/src/read/elf/note.rs
vendored
Normal file
271
vendor/object/src/read/elf/note.rs
vendored
Normal file
@@ -0,0 +1,271 @@
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{self, U32};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::util;
|
||||
use crate::read::{self, Bytes, Error, ReadError};
|
||||
|
||||
use super::FileHeader;
|
||||
|
||||
/// An iterator over the notes in an ELF section or segment.
|
||||
///
|
||||
/// Returned [`ProgramHeader::notes`](super::ProgramHeader::notes)
|
||||
/// and [`SectionHeader::notes`](super::SectionHeader::notes).
|
||||
#[derive(Debug)]
|
||||
pub struct NoteIterator<'data, Elf>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
{
|
||||
endian: Elf::Endian,
|
||||
align: usize,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf> NoteIterator<'data, Elf>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
{
|
||||
/// An iterator over the notes in an ELF section or segment.
|
||||
///
|
||||
/// `align` should be from the `p_align` field of the segment,
|
||||
/// or the `sh_addralign` field of the section. Supported values are
|
||||
/// either 4 or 8, but values less than 4 are treated as 4.
|
||||
/// This matches the behaviour of binutils.
|
||||
///
|
||||
/// Returns `Err` if `align` is invalid.
|
||||
pub fn new(endian: Elf::Endian, align: Elf::Word, data: &'data [u8]) -> read::Result<Self> {
|
||||
let align = match align.into() {
|
||||
0u64..=4 => 4,
|
||||
8 => 8,
|
||||
_ => return Err(Error("Invalid ELF note alignment")),
|
||||
};
|
||||
// TODO: check data alignment?
|
||||
Ok(NoteIterator {
|
||||
endian,
|
||||
align,
|
||||
data: Bytes(data),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the next note.
|
||||
pub fn next(&mut self) -> read::Result<Option<Note<'data, Elf>>> {
|
||||
let mut data = self.data;
|
||||
if data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let header = data
|
||||
.read_at::<Elf::NoteHeader>(0)
|
||||
.read_error("ELF note is too short")?;
|
||||
|
||||
// The name has no alignment requirement.
|
||||
let offset = mem::size_of::<Elf::NoteHeader>();
|
||||
let namesz = header.n_namesz(self.endian) as usize;
|
||||
let name = data
|
||||
.read_bytes_at(offset, namesz)
|
||||
.read_error("Invalid ELF note namesz")?
|
||||
.0;
|
||||
|
||||
// The descriptor must be aligned.
|
||||
let offset = util::align(offset + namesz, self.align);
|
||||
let descsz = header.n_descsz(self.endian) as usize;
|
||||
let desc = data
|
||||
.read_bytes_at(offset, descsz)
|
||||
.read_error("Invalid ELF note descsz")?
|
||||
.0;
|
||||
|
||||
// The next note (if any) must be aligned.
|
||||
let offset = util::align(offset + descsz, self.align);
|
||||
if data.skip(offset).is_err() {
|
||||
data = Bytes(&[]);
|
||||
}
|
||||
self.data = data;
|
||||
|
||||
Ok(Some(Note { header, name, desc }))
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed [`NoteHeader`].
|
||||
#[derive(Debug)]
|
||||
pub struct Note<'data, Elf>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
{
|
||||
header: &'data Elf::NoteHeader,
|
||||
name: &'data [u8],
|
||||
desc: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> Note<'data, Elf> {
|
||||
/// Return the `n_type` field of the `NoteHeader`.
|
||||
///
|
||||
/// The meaning of this field is determined by `name`.
|
||||
pub fn n_type(&self, endian: Elf::Endian) -> u32 {
|
||||
self.header.n_type(endian)
|
||||
}
|
||||
|
||||
/// Return the `n_namesz` field of the `NoteHeader`.
|
||||
pub fn n_namesz(&self, endian: Elf::Endian) -> u32 {
|
||||
self.header.n_namesz(endian)
|
||||
}
|
||||
|
||||
/// Return the `n_descsz` field of the `NoteHeader`.
|
||||
pub fn n_descsz(&self, endian: Elf::Endian) -> u32 {
|
||||
self.header.n_descsz(endian)
|
||||
}
|
||||
|
||||
/// Return the bytes for the name field following the `NoteHeader`.
|
||||
///
|
||||
/// This field is usually a string including one or more trailing null bytes
|
||||
/// (but it is not required to be).
|
||||
///
|
||||
/// The length of this field is given by `n_namesz`.
|
||||
pub fn name_bytes(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Return the bytes for the name field following the `NoteHeader`,
|
||||
/// excluding all trailing null bytes.
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
let mut name = self.name;
|
||||
while let [rest @ .., 0] = name {
|
||||
name = rest;
|
||||
}
|
||||
name
|
||||
}
|
||||
|
||||
/// Return the bytes for the desc field following the `NoteHeader`.
|
||||
///
|
||||
/// The length of this field is given by `n_descsz`. The meaning
|
||||
/// of this field is determined by `name` and `n_type`.
|
||||
pub fn desc(&self) -> &'data [u8] {
|
||||
self.desc
|
||||
}
|
||||
|
||||
/// Return an iterator for properties if this note's type is [`elf::NT_GNU_PROPERTY_TYPE_0`].
|
||||
pub fn gnu_properties(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
) -> Option<GnuPropertyIterator<'data, Elf::Endian>> {
|
||||
if self.name() != elf::ELF_NOTE_GNU || self.n_type(endian) != elf::NT_GNU_PROPERTY_TYPE_0 {
|
||||
return None;
|
||||
}
|
||||
// Use the ELF class instead of the section alignment.
|
||||
// This matches what other parsers do.
|
||||
let align = if Elf::is_type_64_sized() { 8 } else { 4 };
|
||||
Some(GnuPropertyIterator {
|
||||
endian,
|
||||
align,
|
||||
data: Bytes(self.desc),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::NoteHeader32`] and [`elf::NoteHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait NoteHeader: Debug + Pod {
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn n_namesz(&self, endian: Self::Endian) -> u32;
|
||||
fn n_descsz(&self, endian: Self::Endian) -> u32;
|
||||
fn n_type(&self, endian: Self::Endian) -> u32;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> NoteHeader for elf::NoteHeader32<Endian> {
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn n_namesz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_namesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_descsz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_descsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_type.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> NoteHeader for elf::NoteHeader64<Endian> {
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn n_namesz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_namesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_descsz(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_descsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn n_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_type.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the properties in a [`elf::NT_GNU_PROPERTY_TYPE_0`] note.
|
||||
///
|
||||
/// Returned by [`Note::gnu_properties`].
|
||||
#[derive(Debug)]
|
||||
pub struct GnuPropertyIterator<'data, Endian: endian::Endian> {
|
||||
endian: Endian,
|
||||
align: usize,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Endian: endian::Endian> GnuPropertyIterator<'data, Endian> {
|
||||
/// Returns the next property.
|
||||
pub fn next(&mut self) -> read::Result<Option<GnuProperty<'data>>> {
|
||||
let mut data = self.data;
|
||||
if data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
(|| -> Result<_, ()> {
|
||||
let pr_type = data.read_at::<U32<Endian>>(0)?.get(self.endian);
|
||||
let pr_datasz = data.read_at::<U32<Endian>>(4)?.get(self.endian) as usize;
|
||||
let pr_data = data.read_bytes_at(8, pr_datasz)?.0;
|
||||
data.skip(util::align(8 + pr_datasz, self.align))?;
|
||||
self.data = data;
|
||||
Ok(Some(GnuProperty { pr_type, pr_data }))
|
||||
})()
|
||||
.read_error("Invalid ELF GNU property")
|
||||
}
|
||||
}
|
||||
|
||||
/// A property in a [`elf::NT_GNU_PROPERTY_TYPE_0`] note.
|
||||
#[derive(Debug)]
|
||||
pub struct GnuProperty<'data> {
|
||||
pr_type: u32,
|
||||
pr_data: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data> GnuProperty<'data> {
|
||||
/// Return the property type.
|
||||
///
|
||||
/// This is one of the `GNU_PROPERTY_*` constants.
|
||||
pub fn pr_type(&self) -> u32 {
|
||||
self.pr_type
|
||||
}
|
||||
|
||||
/// Return the property data.
|
||||
pub fn pr_data(&self) -> &'data [u8] {
|
||||
self.pr_data
|
||||
}
|
||||
|
||||
/// Parse the property data as an unsigned 32-bit integer.
|
||||
pub fn data_u32<E: endian::Endian>(&self, endian: E) -> read::Result<u32> {
|
||||
Bytes(self.pr_data)
|
||||
.read_at::<U32<E>>(0)
|
||||
.read_error("Invalid ELF GNU property data")
|
||||
.map(|val| val.get(endian))
|
||||
}
|
||||
}
|
||||
628
vendor/object/src/read/elf/relocation.rs
vendored
Normal file
628
vendor/object/src/read/elf/relocation.rs
vendored
Normal file
@@ -0,0 +1,628 @@
|
||||
use alloc::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::slice;
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{
|
||||
self, Error, ReadRef, Relocation, RelocationEncoding, RelocationKind, RelocationTarget,
|
||||
SectionIndex, SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{ElfFile, FileHeader, SectionHeader, SectionTable};
|
||||
|
||||
/// A mapping from section index to associated relocation sections.
|
||||
#[derive(Debug)]
|
||||
pub struct RelocationSections {
|
||||
relocations: Vec<usize>,
|
||||
}
|
||||
|
||||
impl RelocationSections {
|
||||
/// Create a new mapping using the section table.
|
||||
///
|
||||
/// Skips relocation sections that do not use the given symbol table section.
|
||||
pub fn parse<'data, Elf: FileHeader, R: ReadRef<'data>>(
|
||||
endian: Elf::Endian,
|
||||
sections: &SectionTable<'data, Elf, R>,
|
||||
symbol_section: SectionIndex,
|
||||
) -> read::Result<Self> {
|
||||
let mut relocations = vec![0; sections.len()];
|
||||
for (index, section) in sections.iter().enumerate().rev() {
|
||||
let sh_type = section.sh_type(endian);
|
||||
if sh_type == elf::SHT_REL || sh_type == elf::SHT_RELA {
|
||||
// The symbol indices used in relocations must be for the symbol table
|
||||
// we are expecting to use.
|
||||
let sh_link = SectionIndex(section.sh_link(endian) as usize);
|
||||
if sh_link != symbol_section {
|
||||
continue;
|
||||
}
|
||||
|
||||
let sh_info = section.sh_info(endian) as usize;
|
||||
if sh_info == 0 {
|
||||
// Skip dynamic relocations.
|
||||
continue;
|
||||
}
|
||||
if sh_info >= relocations.len() {
|
||||
return Err(Error("Invalid ELF sh_info for relocation section"));
|
||||
}
|
||||
|
||||
// Handle multiple relocation sections by chaining them.
|
||||
let next = relocations[sh_info];
|
||||
relocations[sh_info] = index;
|
||||
relocations[index] = next;
|
||||
}
|
||||
}
|
||||
Ok(Self { relocations })
|
||||
}
|
||||
|
||||
/// Given a section index, return the section index of the associated relocation section.
|
||||
///
|
||||
/// This may also be called with a relocation section index, and it will return the
|
||||
/// next associated relocation section.
|
||||
pub fn get(&self, index: usize) -> Option<usize> {
|
||||
self.relocations.get(index).cloned().filter(|x| *x != 0)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) enum ElfRelaIterator<'data, Elf: FileHeader> {
|
||||
Rel(slice::Iter<'data, Elf::Rel>),
|
||||
Rela(slice::Iter<'data, Elf::Rela>),
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> ElfRelaIterator<'data, Elf> {
|
||||
fn is_rel(&self) -> bool {
|
||||
match self {
|
||||
ElfRelaIterator::Rel(_) => true,
|
||||
ElfRelaIterator::Rela(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> Iterator for ElfRelaIterator<'data, Elf> {
|
||||
type Item = Elf::Rela;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
ElfRelaIterator::Rel(ref mut i) => i.next().cloned().map(Self::Item::from),
|
||||
ElfRelaIterator::Rela(ref mut i) => i.next().cloned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the dynamic relocations in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfDynamicRelocationIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfDynamicRelocationIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the dynamic relocations in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfDynamicRelocationIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfDynamicRelocationIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the dynamic relocations in an [`ElfFile`].
|
||||
pub struct ElfDynamicRelocationIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// The current relocation section index.
|
||||
pub(super) section_index: SectionIndex,
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) relocations: Option<ElfRelaIterator<'data, Elf>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfDynamicRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let endian = self.file.endian;
|
||||
loop {
|
||||
if let Some(ref mut relocations) = self.relocations {
|
||||
if let Some(reloc) = relocations.next() {
|
||||
let relocation =
|
||||
parse_relocation(self.file.header, endian, reloc, relocations.is_rel());
|
||||
return Some((reloc.r_offset(endian).into(), relocation));
|
||||
}
|
||||
self.relocations = None;
|
||||
}
|
||||
|
||||
let section = self.file.sections.section(self.section_index).ok()?;
|
||||
self.section_index.0 += 1;
|
||||
|
||||
let sh_link = SectionIndex(section.sh_link(endian) as usize);
|
||||
if sh_link != self.file.dynamic_symbols.section() {
|
||||
continue;
|
||||
}
|
||||
|
||||
match section.sh_type(endian) {
|
||||
elf::SHT_REL => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rel(relocations.iter()));
|
||||
}
|
||||
}
|
||||
elf::SHT_RELA => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rela(relocations.iter()));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> fmt::Debug for ElfDynamicRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ElfDynamicRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the relocations for an [`ElfSection32`](super::ElfSection32).
|
||||
pub type ElfSectionRelocationIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSectionRelocationIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the relocations for an [`ElfSection64`](super::ElfSection64).
|
||||
pub type ElfSectionRelocationIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSectionRelocationIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the relocations for an [`ElfSection`](super::ElfSection).
|
||||
pub struct ElfSectionRelocationIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// The current pointer in the chain of relocation sections.
|
||||
pub(super) section_index: SectionIndex,
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) relocations: Option<ElfRelaIterator<'data, Elf>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfSectionRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let endian = self.file.endian;
|
||||
loop {
|
||||
if let Some(ref mut relocations) = self.relocations {
|
||||
if let Some(reloc) = relocations.next() {
|
||||
let relocation =
|
||||
parse_relocation(self.file.header, endian, reloc, relocations.is_rel());
|
||||
return Some((reloc.r_offset(endian).into(), relocation));
|
||||
}
|
||||
self.relocations = None;
|
||||
}
|
||||
self.section_index = SectionIndex(self.file.relocations.get(self.section_index.0)?);
|
||||
// The construction of RelocationSections ensures section_index is valid.
|
||||
let section = self.file.sections.section(self.section_index).unwrap();
|
||||
match section.sh_type(endian) {
|
||||
elf::SHT_REL => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rel(relocations.iter()));
|
||||
}
|
||||
}
|
||||
elf::SHT_RELA => {
|
||||
if let Ok(relocations) = section.data_as_array(endian, self.file.data) {
|
||||
self.relocations = Some(ElfRelaIterator::Rela(relocations.iter()));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> fmt::Debug for ElfSectionRelocationIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ElfSectionRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_relocation<Elf: FileHeader>(
|
||||
header: &Elf,
|
||||
endian: Elf::Endian,
|
||||
reloc: Elf::Rela,
|
||||
implicit_addend: bool,
|
||||
) -> Relocation {
|
||||
let mut encoding = RelocationEncoding::Generic;
|
||||
let is_mips64el = header.is_mips64el(endian);
|
||||
let (kind, size) = match header.e_machine(endian) {
|
||||
elf::EM_AARCH64 => {
|
||||
if header.is_type_64() {
|
||||
match reloc.r_type(endian, false) {
|
||||
elf::R_AARCH64_ABS64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_AARCH64_ABS32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_AARCH64_ABS16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_AARCH64_PREL64 => (RelocationKind::Relative, 64),
|
||||
elf::R_AARCH64_PREL32 => (RelocationKind::Relative, 32),
|
||||
elf::R_AARCH64_PREL16 => (RelocationKind::Relative, 16),
|
||||
elf::R_AARCH64_CALL26 => {
|
||||
encoding = RelocationEncoding::AArch64Call;
|
||||
(RelocationKind::PltRelative, 26)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
}
|
||||
} else {
|
||||
match reloc.r_type(endian, false) {
|
||||
elf::R_AARCH64_P32_ABS32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
}
|
||||
}
|
||||
}
|
||||
elf::EM_ARM => match reloc.r_type(endian, false) {
|
||||
elf::R_ARM_ABS32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_AVR => match reloc.r_type(endian, false) {
|
||||
elf::R_AVR_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_AVR_16 => (RelocationKind::Absolute, 16),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_BPF => match reloc.r_type(endian, false) {
|
||||
elf::R_BPF_64_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_BPF_64_32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_CSKY => match reloc.r_type(endian, false) {
|
||||
elf::R_CKCORE_ADDR32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_CKCORE_PCREL32 => (RelocationKind::Relative, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_386 => match reloc.r_type(endian, false) {
|
||||
elf::R_386_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_386_PC32 => (RelocationKind::Relative, 32),
|
||||
elf::R_386_GOT32 => (RelocationKind::Got, 32),
|
||||
elf::R_386_PLT32 => (RelocationKind::PltRelative, 32),
|
||||
elf::R_386_GOTOFF => (RelocationKind::GotBaseOffset, 32),
|
||||
elf::R_386_GOTPC => (RelocationKind::GotBaseRelative, 32),
|
||||
elf::R_386_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_386_PC16 => (RelocationKind::Relative, 16),
|
||||
elf::R_386_8 => (RelocationKind::Absolute, 8),
|
||||
elf::R_386_PC8 => (RelocationKind::Relative, 8),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_X86_64 => match reloc.r_type(endian, false) {
|
||||
elf::R_X86_64_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_X86_64_PC32 => (RelocationKind::Relative, 32),
|
||||
elf::R_X86_64_GOT32 => (RelocationKind::Got, 32),
|
||||
elf::R_X86_64_PLT32 => (RelocationKind::PltRelative, 32),
|
||||
elf::R_X86_64_GOTPCREL => (RelocationKind::GotRelative, 32),
|
||||
elf::R_X86_64_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_X86_64_32S => {
|
||||
encoding = RelocationEncoding::X86Signed;
|
||||
(RelocationKind::Absolute, 32)
|
||||
}
|
||||
elf::R_X86_64_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_X86_64_PC16 => (RelocationKind::Relative, 16),
|
||||
elf::R_X86_64_8 => (RelocationKind::Absolute, 8),
|
||||
elf::R_X86_64_PC8 => (RelocationKind::Relative, 8),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_HEXAGON => match reloc.r_type(endian, false) {
|
||||
elf::R_HEX_32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_LOONGARCH => match reloc.r_type(endian, false) {
|
||||
elf::R_LARCH_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_LARCH_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_LARCH_32_PCREL => (RelocationKind::Relative, 32),
|
||||
elf::R_LARCH_64_PCREL => (RelocationKind::Relative, 64),
|
||||
elf::R_LARCH_B16 => {
|
||||
encoding = RelocationEncoding::LoongArchBranch;
|
||||
(RelocationKind::Relative, 16)
|
||||
}
|
||||
elf::R_LARCH_B21 => {
|
||||
encoding = RelocationEncoding::LoongArchBranch;
|
||||
(RelocationKind::Relative, 21)
|
||||
}
|
||||
elf::R_LARCH_B26 => {
|
||||
encoding = RelocationEncoding::LoongArchBranch;
|
||||
(RelocationKind::Relative, 26)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_MIPS => match reloc.r_type(endian, is_mips64el) {
|
||||
elf::R_MIPS_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_MIPS_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_MIPS_64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_MSP430 => match reloc.r_type(endian, false) {
|
||||
elf::R_MSP430_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_MSP430_16_BYTE => (RelocationKind::Absolute, 16),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_PPC => match reloc.r_type(endian, false) {
|
||||
elf::R_PPC_ADDR32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_PPC64 => match reloc.r_type(endian, false) {
|
||||
elf::R_PPC64_ADDR32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_PPC64_ADDR64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_RISCV => match reloc.r_type(endian, false) {
|
||||
elf::R_RISCV_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_RISCV_64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_S390 => match reloc.r_type(endian, false) {
|
||||
elf::R_390_8 => (RelocationKind::Absolute, 8),
|
||||
elf::R_390_16 => (RelocationKind::Absolute, 16),
|
||||
elf::R_390_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_390_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_390_PC16 => (RelocationKind::Relative, 16),
|
||||
elf::R_390_PC32 => (RelocationKind::Relative, 32),
|
||||
elf::R_390_PC64 => (RelocationKind::Relative, 64),
|
||||
elf::R_390_PC16DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::Relative, 16)
|
||||
}
|
||||
elf::R_390_PC32DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::Relative, 32)
|
||||
}
|
||||
elf::R_390_PLT16DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::PltRelative, 16)
|
||||
}
|
||||
elf::R_390_PLT32DBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::PltRelative, 32)
|
||||
}
|
||||
elf::R_390_GOT16 => (RelocationKind::Got, 16),
|
||||
elf::R_390_GOT32 => (RelocationKind::Got, 32),
|
||||
elf::R_390_GOT64 => (RelocationKind::Got, 64),
|
||||
elf::R_390_GOTENT => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::GotRelative, 32)
|
||||
}
|
||||
elf::R_390_GOTOFF16 => (RelocationKind::GotBaseOffset, 16),
|
||||
elf::R_390_GOTOFF32 => (RelocationKind::GotBaseOffset, 32),
|
||||
elf::R_390_GOTOFF64 => (RelocationKind::GotBaseOffset, 64),
|
||||
elf::R_390_GOTPC => (RelocationKind::GotBaseRelative, 64),
|
||||
elf::R_390_GOTPCDBL => {
|
||||
encoding = RelocationEncoding::S390xDbl;
|
||||
(RelocationKind::GotBaseRelative, 32)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_SBF => match reloc.r_type(endian, false) {
|
||||
elf::R_SBF_64_64 => (RelocationKind::Absolute, 64),
|
||||
elf::R_SBF_64_32 => (RelocationKind::Absolute, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_SHARC => match reloc.r_type(endian, false) {
|
||||
elf::R_SHARC_ADDR24_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 24)
|
||||
}
|
||||
elf::R_SHARC_ADDR32_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 32)
|
||||
}
|
||||
elf::R_SHARC_ADDR_VAR_V3 => {
|
||||
encoding = RelocationEncoding::Generic;
|
||||
(RelocationKind::Absolute, 32)
|
||||
}
|
||||
elf::R_SHARC_PCRSHORT_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Relative, 6)
|
||||
}
|
||||
elf::R_SHARC_PCRLONG_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Relative, 24)
|
||||
}
|
||||
elf::R_SHARC_DATA6_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 6)
|
||||
}
|
||||
elf::R_SHARC_DATA16_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeA;
|
||||
(RelocationKind::Absolute, 16)
|
||||
}
|
||||
elf::R_SHARC_DATA6_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Absolute, 6)
|
||||
}
|
||||
elf::R_SHARC_DATA7_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Absolute, 7)
|
||||
}
|
||||
elf::R_SHARC_DATA16_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Absolute, 16)
|
||||
}
|
||||
elf::R_SHARC_PCR6_VISA_V3 => {
|
||||
encoding = RelocationEncoding::SharcTypeB;
|
||||
(RelocationKind::Relative, 16)
|
||||
}
|
||||
elf::R_SHARC_ADDR_VAR16_V3 => {
|
||||
encoding = RelocationEncoding::Generic;
|
||||
(RelocationKind::Absolute, 16)
|
||||
}
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
elf::EM_SPARC | elf::EM_SPARC32PLUS | elf::EM_SPARCV9 => {
|
||||
match reloc.r_type(endian, false) {
|
||||
elf::R_SPARC_32 | elf::R_SPARC_UA32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_SPARC_64 | elf::R_SPARC_UA64 => (RelocationKind::Absolute, 64),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
}
|
||||
}
|
||||
elf::EM_XTENSA => match reloc.r_type(endian, false) {
|
||||
elf::R_XTENSA_32 => (RelocationKind::Absolute, 32),
|
||||
elf::R_XTENSA_32_PCREL => (RelocationKind::Relative, 32),
|
||||
r_type => (RelocationKind::Elf(r_type), 0),
|
||||
},
|
||||
_ => (RelocationKind::Elf(reloc.r_type(endian, false)), 0),
|
||||
};
|
||||
let sym = reloc.r_sym(endian, is_mips64el) as usize;
|
||||
let target = if sym == 0 {
|
||||
RelocationTarget::Absolute
|
||||
} else {
|
||||
RelocationTarget::Symbol(SymbolIndex(sym))
|
||||
};
|
||||
Relocation {
|
||||
kind,
|
||||
encoding,
|
||||
size,
|
||||
target,
|
||||
addend: reloc.r_addend(endian).into(),
|
||||
implicit_addend,
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::Rel32`] and [`elf::Rel64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Rel: Debug + Pod + Clone {
|
||||
type Word: Into<u64>;
|
||||
type Sword: Into<i64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn r_info(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn r_sym(&self, endian: Self::Endian) -> u32;
|
||||
fn r_type(&self, endian: Self::Endian) -> u32;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rel for elf::Rel32<Endian> {
|
||||
type Word = u32;
|
||||
type Sword = i32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_info.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_sym(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_type(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rel for elf::Rel64<Endian> {
|
||||
type Word = u64;
|
||||
type Sword = i64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_info.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_sym(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.r_type(endian)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::Rela32`] and [`elf::Rela64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Rela: Debug + Pod + Clone {
|
||||
type Word: Into<u64>;
|
||||
type Sword: Into<i64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn r_info(&self, endian: Self::Endian, is_mips64el: bool) -> Self::Word;
|
||||
fn r_addend(&self, endian: Self::Endian) -> Self::Sword;
|
||||
fn r_sym(&self, endian: Self::Endian, is_mips64el: bool) -> u32;
|
||||
fn r_type(&self, endian: Self::Endian, is_mips64el: bool) -> u32;
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rela for elf::Rela32<Endian> {
|
||||
type Word = u32;
|
||||
type Sword = i32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian, _is_mips64el: bool) -> Self::Word {
|
||||
self.r_info.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_addend(&self, endian: Self::Endian) -> Self::Sword {
|
||||
self.r_addend.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian, _is_mips64el: bool) -> u32 {
|
||||
self.r_sym(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian, _is_mips64el: bool) -> u32 {
|
||||
self.r_type(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Rela for elf::Rela64<Endian> {
|
||||
type Word = u64;
|
||||
type Sword = i64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn r_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.r_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_info(&self, endian: Self::Endian, is_mips64el: bool) -> Self::Word {
|
||||
self.get_r_info(endian, is_mips64el)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_addend(&self, endian: Self::Endian) -> Self::Sword {
|
||||
self.r_addend.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_sym(&self, endian: Self::Endian, is_mips64el: bool) -> u32 {
|
||||
self.r_sym(endian, is_mips64el)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn r_type(&self, endian: Self::Endian, is_mips64el: bool) -> u32 {
|
||||
self.r_type(endian, is_mips64el)
|
||||
}
|
||||
}
|
||||
1150
vendor/object/src/read/elf/section.rs
vendored
Normal file
1150
vendor/object/src/read/elf/section.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
334
vendor/object/src/read/elf/segment.rs
vendored
Normal file
334
vendor/object/src/read/elf/segment.rs
vendored
Normal file
@@ -0,0 +1,334 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{mem, slice, str};
|
||||
|
||||
use crate::elf;
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{self, Bytes, ObjectSegment, ReadError, ReadRef, SegmentFlags};
|
||||
|
||||
use super::{ElfFile, FileHeader, NoteIterator};
|
||||
|
||||
/// An iterator for the segments in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSegmentIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegmentIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the segments in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSegmentIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegmentIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the segments in an [`ElfFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct ElfSegmentIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) iter: slice::Iter<'data, Elf::ProgramHeader>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> Iterator for ElfSegmentIterator<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = ElfSegment<'data, 'file, Elf, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
for segment in self.iter.by_ref() {
|
||||
if segment.p_type(self.file.endian) == elf::PT_LOAD {
|
||||
return Some(ElfSegment {
|
||||
file: self.file,
|
||||
segment,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSegment32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegment<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A segment in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSegment64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSegment<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A segment in an [`ElfFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct ElfSegment<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file ElfFile<'data, Elf, R>,
|
||||
pub(super) segment: &'data Elf::ProgramHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ElfSegment<'data, 'file, Elf, R> {
|
||||
fn bytes(&self) -> read::Result<&'data [u8]> {
|
||||
self.segment
|
||||
.data(self.file.endian, self.file.data)
|
||||
.read_error("Invalid ELF segment size or offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> read::private::Sealed for ElfSegment<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf, R> ObjectSegment<'data> for ElfSegment<'data, 'file, Elf, R>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.segment.p_vaddr(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.segment.p_memsz(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.segment.p_align(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
self.segment.file_range(self.file.endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn data(&self) -> read::Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> read::Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> read::Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> read::Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let p_flags = self.segment.p_flags(self.file.endian);
|
||||
SegmentFlags::Elf { p_flags }
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::ProgramHeader32`] and [`elf::ProgramHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait ProgramHeader: Debug + Pod {
|
||||
type Elf: FileHeader<ProgramHeader = Self, Endian = Self::Endian, Word = Self::Word>;
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn p_type(&self, endian: Self::Endian) -> u32;
|
||||
fn p_flags(&self, endian: Self::Endian) -> u32;
|
||||
fn p_offset(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_vaddr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_paddr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_filesz(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_memsz(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn p_align(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
/// Return the offset and size of the segment in the file.
|
||||
fn file_range(&self, endian: Self::Endian) -> (u64, u64) {
|
||||
(self.p_offset(endian).into(), self.p_filesz(endian).into())
|
||||
}
|
||||
|
||||
/// Return the segment data.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> Result<&'data [u8], ()> {
|
||||
let (offset, size) = self.file_range(endian);
|
||||
data.read_bytes_at(offset, size)
|
||||
}
|
||||
|
||||
/// Return the segment data as a slice of the given type.
|
||||
///
|
||||
/// Allows padding at the end of the data.
|
||||
/// Returns `Ok(&[])` if the segment has no data.
|
||||
/// Returns `Err` for invalid values, including bad alignment.
|
||||
fn data_as_array<'data, T: Pod, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> Result<&'data [T], ()> {
|
||||
let mut data = self.data(endian, data).map(Bytes)?;
|
||||
data.read_slice(data.len() / mem::size_of::<T>())
|
||||
}
|
||||
|
||||
/// Return the segment data in the given virtual address range
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment does not contain the address.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data_range<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
address: u64,
|
||||
size: u64,
|
||||
) -> Result<Option<&'data [u8]>, ()> {
|
||||
Ok(read::util::data_range(
|
||||
self.data(endian, data)?,
|
||||
self.p_vaddr(endian).into(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
/// Return entries in a dynamic segment.
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment is not `PT_DYNAMIC`.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn dynamic<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<Option<&'data [<Self::Elf as FileHeader>::Dyn]>> {
|
||||
if self.p_type(endian) != elf::PT_DYNAMIC {
|
||||
return Ok(None);
|
||||
}
|
||||
let dynamic = self
|
||||
.data_as_array(endian, data)
|
||||
.read_error("Invalid ELF dynamic segment offset or size")?;
|
||||
Ok(Some(dynamic))
|
||||
}
|
||||
|
||||
/// Return a note iterator for the segment data.
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment does not contain notes.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn notes<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> read::Result<Option<NoteIterator<'data, Self::Elf>>> {
|
||||
if self.p_type(endian) != elf::PT_NOTE {
|
||||
return Ok(None);
|
||||
}
|
||||
let data = self
|
||||
.data(endian, data)
|
||||
.read_error("Invalid ELF note segment offset or size")?;
|
||||
let notes = NoteIterator::new(endian, self.p_align(endian), data)?;
|
||||
Ok(Some(notes))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> ProgramHeader for elf::ProgramHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
type Elf = elf::FileHeader32<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn p_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_vaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_vaddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_paddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_paddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_filesz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_filesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_memsz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_memsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_align(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_align.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> ProgramHeader for elf::ProgramHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
type Elf = elf::FileHeader64<Endian>;
|
||||
|
||||
#[inline]
|
||||
fn p_type(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_type.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.p_flags.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_offset(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_offset.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_vaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_vaddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_paddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_paddr.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_filesz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_filesz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_memsz(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_memsz.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn p_align(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.p_align.get(endian)
|
||||
}
|
||||
}
|
||||
595
vendor/object/src/read/elf/symbol.rs
vendored
Normal file
595
vendor/object/src/read/elf/symbol.rs
vendored
Normal file
@@ -0,0 +1,595 @@
|
||||
use alloc::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::slice;
|
||||
use core::str;
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::pod::Pod;
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, SectionIndex, SymbolFlags,
|
||||
SymbolIndex, SymbolKind, SymbolMap, SymbolMapEntry, SymbolScope, SymbolSection,
|
||||
};
|
||||
use crate::{elf, U32};
|
||||
|
||||
use super::{FileHeader, SectionHeader, SectionTable};
|
||||
|
||||
/// A table of symbol entries in an ELF file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`SectionTable::symbols`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SymbolTable<'data, Elf: FileHeader, R = &'data [u8]>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
section: SectionIndex,
|
||||
string_section: SectionIndex,
|
||||
shndx_section: SectionIndex,
|
||||
symbols: &'data [Elf::Sym],
|
||||
strings: StringTable<'data, R>,
|
||||
shndx: &'data [U32<Elf::Endian>],
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader, R: ReadRef<'data>> Default for SymbolTable<'data, Elf, R> {
|
||||
fn default() -> Self {
|
||||
SymbolTable {
|
||||
section: SectionIndex(0),
|
||||
string_section: SectionIndex(0),
|
||||
shndx_section: SectionIndex(0),
|
||||
symbols: &[],
|
||||
strings: Default::default(),
|
||||
shndx: &[],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader, R: ReadRef<'data>> SymbolTable<'data, Elf, R> {
|
||||
/// Parse the given symbol table section.
|
||||
pub fn parse(
|
||||
endian: Elf::Endian,
|
||||
data: R,
|
||||
sections: &SectionTable<'data, Elf, R>,
|
||||
section_index: SectionIndex,
|
||||
section: &Elf::SectionHeader,
|
||||
) -> read::Result<SymbolTable<'data, Elf, R>> {
|
||||
debug_assert!(
|
||||
section.sh_type(endian) == elf::SHT_DYNSYM
|
||||
|| section.sh_type(endian) == elf::SHT_SYMTAB
|
||||
);
|
||||
|
||||
let symbols = section
|
||||
.data_as_array(endian, data)
|
||||
.read_error("Invalid ELF symbol table data")?;
|
||||
|
||||
let link = SectionIndex(section.sh_link(endian) as usize);
|
||||
let strings = sections.strings(endian, data, link)?;
|
||||
|
||||
let mut shndx_section = SectionIndex(0);
|
||||
let mut shndx = &[][..];
|
||||
for (i, s) in sections.iter().enumerate() {
|
||||
if s.sh_type(endian) == elf::SHT_SYMTAB_SHNDX
|
||||
&& s.sh_link(endian) as usize == section_index.0
|
||||
{
|
||||
shndx_section = SectionIndex(i);
|
||||
shndx = s
|
||||
.data_as_array(endian, data)
|
||||
.read_error("Invalid ELF symtab_shndx data")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(SymbolTable {
|
||||
section: section_index,
|
||||
string_section: link,
|
||||
symbols,
|
||||
strings,
|
||||
shndx,
|
||||
shndx_section,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the section index of this symbol table.
|
||||
#[inline]
|
||||
pub fn section(&self) -> SectionIndex {
|
||||
self.section
|
||||
}
|
||||
|
||||
/// Return the section index of the shndx table.
|
||||
#[inline]
|
||||
pub fn shndx_section(&self) -> SectionIndex {
|
||||
self.shndx_section
|
||||
}
|
||||
|
||||
/// Return the section index of the linked string table.
|
||||
#[inline]
|
||||
pub fn string_section(&self) -> SectionIndex {
|
||||
self.string_section
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Return the symbol table.
|
||||
#[inline]
|
||||
pub fn symbols(&self) -> &'data [Elf::Sym] {
|
||||
self.symbols
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, Elf::Sym> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbols.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
|
||||
/// Return the symbol at the given index.
|
||||
pub fn symbol(&self, index: usize) -> read::Result<&'data Elf::Sym> {
|
||||
self.symbols
|
||||
.get(index)
|
||||
.read_error("Invalid ELF symbol index")
|
||||
}
|
||||
|
||||
/// Return the extended section index for the given symbol if present.
|
||||
#[inline]
|
||||
pub fn shndx(&self, endian: Elf::Endian, index: usize) -> Option<u32> {
|
||||
self.shndx.get(index).map(|x| x.get(endian))
|
||||
}
|
||||
|
||||
/// Return the section index for the given symbol.
|
||||
///
|
||||
/// This uses the extended section index if present.
|
||||
pub fn symbol_section(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
symbol: &'data Elf::Sym,
|
||||
index: usize,
|
||||
) -> read::Result<Option<SectionIndex>> {
|
||||
match symbol.st_shndx(endian) {
|
||||
elf::SHN_UNDEF => Ok(None),
|
||||
elf::SHN_XINDEX => self
|
||||
.shndx(endian, index)
|
||||
.read_error("Missing ELF symbol extended index")
|
||||
.map(|index| Some(SectionIndex(index as usize))),
|
||||
shndx if shndx < elf::SHN_LORESERVE => Ok(Some(SectionIndex(shndx.into()))),
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol name for the given symbol.
|
||||
pub fn symbol_name(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
symbol: &'data Elf::Sym,
|
||||
) -> read::Result<&'data [u8]> {
|
||||
symbol.name(endian, self.strings)
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to a user-defined map entry.
|
||||
pub fn map<Entry: SymbolMapEntry, F: Fn(&'data Elf::Sym) -> Option<Entry>>(
|
||||
&self,
|
||||
endian: Elf::Endian,
|
||||
f: F,
|
||||
) -> SymbolMap<Entry> {
|
||||
let mut symbols = Vec::with_capacity(self.symbols.len());
|
||||
for symbol in self.symbols {
|
||||
if !symbol.is_definition(endian) {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry) = f(symbol) {
|
||||
symbols.push(entry);
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbolTable32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolTable<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A symbol table in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbolTable64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolTable<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol table in an [`ElfFile`](super::ElfFile).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ElfSymbolTable<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Elf, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for ElfSymbolTable<'data, 'file, Elf, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ObjectSymbolTable<'data>
|
||||
for ElfSymbolTable<'data, 'file, Elf, R>
|
||||
{
|
||||
type Symbol = ElfSymbol<'data, 'file, Elf, R>;
|
||||
type SymbolIterator = ElfSymbolIterator<'data, 'file, Elf, R>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
ElfSymbolIterator {
|
||||
endian: self.endian,
|
||||
symbols: self.symbols,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> read::Result<Self::Symbol> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(ElfSymbol {
|
||||
endian: self.endian,
|
||||
symbols: self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbolIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolIterator<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// An iterator for the symbols in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSymbolIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbolIterator<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the symbols in an [`ElfFile`](super::ElfFile).
|
||||
pub struct ElfSymbolIterator<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Elf, R>,
|
||||
pub(super) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> fmt::Debug
|
||||
for ElfSymbolIterator<'data, 'file, Elf, R>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ElfSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> Iterator
|
||||
for ElfSymbolIterator<'data, 'file, Elf, R>
|
||||
{
|
||||
type Item = ElfSymbol<'data, 'file, Elf, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.symbols.symbols.get(index)?;
|
||||
self.index += 1;
|
||||
Some(ElfSymbol {
|
||||
endian: self.endian,
|
||||
symbols: self.symbols,
|
||||
index: SymbolIndex(index),
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in an [`ElfFile32`](super::ElfFile32).
|
||||
pub type ElfSymbol32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbol<'data, 'file, elf::FileHeader32<Endian>, R>;
|
||||
/// A symbol in an [`ElfFile64`](super::ElfFile64).
|
||||
pub type ElfSymbol64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
ElfSymbol<'data, 'file, elf::FileHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol in an [`ElfFile`](super::ElfFile).
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ElfSymbol<'data, 'file, Elf, R = &'data [u8]>
|
||||
where
|
||||
Elf: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Elf::Endian,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Elf, R>,
|
||||
pub(super) index: SymbolIndex,
|
||||
pub(super) symbol: &'data Elf::Sym,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ElfSymbol<'data, 'file, Elf, R> {
|
||||
/// Return a reference to the raw symbol structure.
|
||||
#[inline]
|
||||
pub fn raw_symbol(&self) -> &'data Elf::Sym {
|
||||
self.symbol
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for ElfSymbol<'data, 'file, Elf, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Elf: FileHeader, R: ReadRef<'data>> ObjectSymbol<'data>
|
||||
for ElfSymbol<'data, 'file, Elf, R>
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&'data [u8]> {
|
||||
self.symbol.name(self.endian, self.symbols.strings())
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 ELF symbol name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.symbol.st_value(self.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.symbol.st_size(self.endian).into()
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
match self.symbol.st_type() {
|
||||
elf::STT_NOTYPE if self.index.0 == 0 => SymbolKind::Null,
|
||||
elf::STT_NOTYPE => SymbolKind::Unknown,
|
||||
elf::STT_OBJECT | elf::STT_COMMON => SymbolKind::Data,
|
||||
elf::STT_FUNC | elf::STT_GNU_IFUNC => SymbolKind::Text,
|
||||
elf::STT_SECTION => SymbolKind::Section,
|
||||
elf::STT_FILE => SymbolKind::File,
|
||||
elf::STT_TLS => SymbolKind::Tls,
|
||||
_ => SymbolKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.symbol.st_shndx(self.endian) {
|
||||
elf::SHN_UNDEF => SymbolSection::Undefined,
|
||||
elf::SHN_ABS => {
|
||||
if self.symbol.st_type() == elf::STT_FILE {
|
||||
SymbolSection::None
|
||||
} else {
|
||||
SymbolSection::Absolute
|
||||
}
|
||||
}
|
||||
elf::SHN_COMMON => SymbolSection::Common,
|
||||
elf::SHN_XINDEX => match self.symbols.shndx(self.endian, self.index.0) {
|
||||
Some(index) => SymbolSection::Section(SectionIndex(index as usize)),
|
||||
None => SymbolSection::Unknown,
|
||||
},
|
||||
index if index < elf::SHN_LORESERVE => {
|
||||
SymbolSection::Section(SectionIndex(index as usize))
|
||||
}
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.st_shndx(self.endian) == elf::SHN_UNDEF
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
self.symbol.is_definition(self.endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.st_shndx(self.endian) == elf::SHN_COMMON
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.symbol.st_bind() == elf::STB_WEAK
|
||||
}
|
||||
|
||||
fn scope(&self) -> SymbolScope {
|
||||
if self.symbol.st_shndx(self.endian) == elf::SHN_UNDEF {
|
||||
SymbolScope::Unknown
|
||||
} else {
|
||||
match self.symbol.st_bind() {
|
||||
elf::STB_LOCAL => SymbolScope::Compilation,
|
||||
elf::STB_GLOBAL | elf::STB_WEAK => {
|
||||
if self.symbol.st_visibility() == elf::STV_HIDDEN {
|
||||
SymbolScope::Linkage
|
||||
} else {
|
||||
SymbolScope::Dynamic
|
||||
}
|
||||
}
|
||||
_ => SymbolScope::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
self.symbol.st_bind() != elf::STB_LOCAL
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
self.symbol.st_bind() == elf::STB_LOCAL
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
SymbolFlags::Elf {
|
||||
st_info: self.symbol.st_info(),
|
||||
st_other: self.symbol.st_other(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`elf::Sym32`] and [`elf::Sym64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Sym: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn st_name(&self, endian: Self::Endian) -> u32;
|
||||
fn st_info(&self) -> u8;
|
||||
fn st_bind(&self) -> u8;
|
||||
fn st_type(&self) -> u8;
|
||||
fn st_other(&self) -> u8;
|
||||
fn st_visibility(&self) -> u8;
|
||||
fn st_shndx(&self, endian: Self::Endian) -> u16;
|
||||
fn st_value(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn st_size(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
/// Parse the symbol name from the string table.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> read::Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.st_name(endian))
|
||||
.read_error("Invalid ELF symbol name offset")
|
||||
}
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
#[inline]
|
||||
fn is_undefined(&self, endian: Self::Endian) -> bool {
|
||||
self.st_shndx(endian) == elf::SHN_UNDEF
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
fn is_definition(&self, endian: Self::Endian) -> bool {
|
||||
let shndx = self.st_shndx(endian);
|
||||
if shndx == elf::SHN_UNDEF || (shndx >= elf::SHN_LORESERVE && shndx != elf::SHN_XINDEX) {
|
||||
return false;
|
||||
}
|
||||
match self.st_type() {
|
||||
elf::STT_NOTYPE => self.st_size(endian).into() != 0,
|
||||
elf::STT_FUNC | elf::STT_OBJECT => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Sym for elf::Sym32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn st_name(&self, endian: Self::Endian) -> u32 {
|
||||
self.st_name.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_info(&self) -> u8 {
|
||||
self.st_info
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_bind(&self) -> u8 {
|
||||
self.st_bind()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_type(&self) -> u8 {
|
||||
self.st_type()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_other(&self) -> u8 {
|
||||
self.st_other
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_visibility(&self) -> u8 {
|
||||
self.st_visibility()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_shndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.st_shndx.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_value.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_size.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Sym for elf::Sym64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
#[inline]
|
||||
fn st_name(&self, endian: Self::Endian) -> u32 {
|
||||
self.st_name.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_info(&self) -> u8 {
|
||||
self.st_info
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_bind(&self) -> u8 {
|
||||
self.st_bind()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_type(&self) -> u8 {
|
||||
self.st_type()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_other(&self) -> u8 {
|
||||
self.st_other
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_visibility(&self) -> u8 {
|
||||
self.st_visibility()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_shndx(&self, endian: Self::Endian) -> u16 {
|
||||
self.st_shndx.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_value.get(endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn st_size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.st_size.get(endian)
|
||||
}
|
||||
}
|
||||
424
vendor/object/src/read/elf/version.rs
vendored
Normal file
424
vendor/object/src/read/elf/version.rs
vendored
Normal file
@@ -0,0 +1,424 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::read::{Bytes, ReadError, ReadRef, Result, StringTable};
|
||||
use crate::{elf, endian};
|
||||
|
||||
use super::FileHeader;
|
||||
|
||||
/// A version index.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct VersionIndex(pub u16);
|
||||
|
||||
impl VersionIndex {
|
||||
/// Return the version index.
|
||||
pub fn index(&self) -> u16 {
|
||||
self.0 & elf::VERSYM_VERSION
|
||||
}
|
||||
|
||||
/// Return true if it is the local index.
|
||||
pub fn is_local(&self) -> bool {
|
||||
self.index() == elf::VER_NDX_LOCAL
|
||||
}
|
||||
|
||||
/// Return true if it is the global index.
|
||||
pub fn is_global(&self) -> bool {
|
||||
self.index() == elf::VER_NDX_GLOBAL
|
||||
}
|
||||
|
||||
/// Return the hidden flag.
|
||||
pub fn is_hidden(&self) -> bool {
|
||||
self.0 & elf::VERSYM_HIDDEN != 0
|
||||
}
|
||||
}
|
||||
|
||||
/// A version definition or requirement.
|
||||
///
|
||||
/// This is derived from entries in the [`elf::SHT_GNU_VERDEF`] and [`elf::SHT_GNU_VERNEED`] sections.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct Version<'data> {
|
||||
name: &'data [u8],
|
||||
hash: u32,
|
||||
// Used to keep track of valid indices in `VersionTable`.
|
||||
valid: bool,
|
||||
}
|
||||
|
||||
impl<'data> Version<'data> {
|
||||
/// Return the version name.
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Return hash of the version name.
|
||||
pub fn hash(&self) -> u32 {
|
||||
self.hash
|
||||
}
|
||||
}
|
||||
|
||||
/// A table of version definitions and requirements.
|
||||
///
|
||||
/// It allows looking up the version information for a given symbol index.
|
||||
///
|
||||
/// This is derived from entries in the [`elf::SHT_GNU_VERSYM`], [`elf::SHT_GNU_VERDEF`]
|
||||
/// and [`elf::SHT_GNU_VERNEED`] sections.
|
||||
///
|
||||
/// Returned by [`SectionTable::versions`](super::SectionTable::versions).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VersionTable<'data, Elf: FileHeader> {
|
||||
symbols: &'data [elf::Versym<Elf::Endian>],
|
||||
versions: Vec<Version<'data>>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> Default for VersionTable<'data, Elf> {
|
||||
fn default() -> Self {
|
||||
VersionTable {
|
||||
symbols: &[],
|
||||
versions: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VersionTable<'data, Elf> {
|
||||
/// Parse the version sections.
|
||||
pub fn parse<R: ReadRef<'data>>(
|
||||
endian: Elf::Endian,
|
||||
versyms: &'data [elf::Versym<Elf::Endian>],
|
||||
verdefs: Option<VerdefIterator<'data, Elf>>,
|
||||
verneeds: Option<VerneedIterator<'data, Elf>>,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<Self> {
|
||||
let mut max_index = 0;
|
||||
if let Some(mut verdefs) = verdefs.clone() {
|
||||
while let Some((verdef, _)) = verdefs.next()? {
|
||||
if verdef.vd_flags.get(endian) & elf::VER_FLG_BASE != 0 {
|
||||
continue;
|
||||
}
|
||||
let index = verdef.vd_ndx.get(endian) & elf::VERSYM_VERSION;
|
||||
if max_index < index {
|
||||
max_index = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(mut verneeds) = verneeds.clone() {
|
||||
while let Some((_, mut vernauxs)) = verneeds.next()? {
|
||||
while let Some(vernaux) = vernauxs.next()? {
|
||||
let index = vernaux.vna_other.get(endian) & elf::VERSYM_VERSION;
|
||||
if max_index < index {
|
||||
max_index = index;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Indices should be sequential, but this could be up to
|
||||
// 32k * size_of::<Version>() if max_index is bad.
|
||||
let mut versions = vec![Version::default(); max_index as usize + 1];
|
||||
|
||||
if let Some(mut verdefs) = verdefs {
|
||||
while let Some((verdef, mut verdauxs)) = verdefs.next()? {
|
||||
if verdef.vd_flags.get(endian) & elf::VER_FLG_BASE != 0 {
|
||||
continue;
|
||||
}
|
||||
let index = verdef.vd_ndx.get(endian) & elf::VERSYM_VERSION;
|
||||
if index <= elf::VER_NDX_GLOBAL {
|
||||
// TODO: return error?
|
||||
continue;
|
||||
}
|
||||
if let Some(verdaux) = verdauxs.next()? {
|
||||
versions[usize::from(index)] = Version {
|
||||
name: verdaux.name(endian, strings)?,
|
||||
hash: verdef.vd_hash.get(endian),
|
||||
valid: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(mut verneeds) = verneeds {
|
||||
while let Some((_, mut vernauxs)) = verneeds.next()? {
|
||||
while let Some(vernaux) = vernauxs.next()? {
|
||||
let index = vernaux.vna_other.get(endian) & elf::VERSYM_VERSION;
|
||||
if index <= elf::VER_NDX_GLOBAL {
|
||||
// TODO: return error?
|
||||
continue;
|
||||
}
|
||||
versions[usize::from(index)] = Version {
|
||||
name: vernaux.name(endian, strings)?,
|
||||
hash: vernaux.vna_hash.get(endian),
|
||||
valid: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(VersionTable {
|
||||
symbols: versyms,
|
||||
versions,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return true if the version table is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// Return version index for a given symbol index.
|
||||
pub fn version_index(&self, endian: Elf::Endian, index: usize) -> VersionIndex {
|
||||
let version_index = match self.symbols.get(index) {
|
||||
Some(x) => x.0.get(endian),
|
||||
// Ideally this would be VER_NDX_LOCAL for undefined symbols,
|
||||
// but currently there are no checks that need this distinction.
|
||||
None => elf::VER_NDX_GLOBAL,
|
||||
};
|
||||
VersionIndex(version_index)
|
||||
}
|
||||
|
||||
/// Return version information for a given symbol version index.
|
||||
///
|
||||
/// Returns `Ok(None)` for local and global versions.
|
||||
/// Returns `Err(_)` if index is invalid.
|
||||
pub fn version(&self, index: VersionIndex) -> Result<Option<&Version<'data>>> {
|
||||
if index.index() <= elf::VER_NDX_GLOBAL {
|
||||
return Ok(None);
|
||||
}
|
||||
self.versions
|
||||
.get(usize::from(index.index()))
|
||||
.filter(|version| version.valid)
|
||||
.read_error("Invalid ELF symbol version index")
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
/// Return true if the given symbol index satisfies the requirements of `need`.
|
||||
///
|
||||
/// Returns false for any error.
|
||||
///
|
||||
/// Note: this function hasn't been fully tested and is likely to be incomplete.
|
||||
pub fn matches(&self, endian: Elf::Endian, index: usize, need: Option<&Version<'_>>) -> bool {
|
||||
let version_index = self.version_index(endian, index);
|
||||
let def = match self.version(version_index) {
|
||||
Ok(def) => def,
|
||||
Err(_) => return false,
|
||||
};
|
||||
match (def, need) {
|
||||
(Some(def), Some(need)) => need.hash == def.hash && need.name == def.name,
|
||||
(None, Some(_need)) => {
|
||||
// Version must be present if needed.
|
||||
false
|
||||
}
|
||||
(Some(_def), None) => {
|
||||
// For a dlsym call, use the newest version.
|
||||
// TODO: if not a dlsym call, then use the oldest version.
|
||||
!version_index.is_hidden()
|
||||
}
|
||||
(None, None) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the entries in an ELF [`elf::SHT_GNU_VERDEF`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VerdefIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VerdefIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8]) -> Self {
|
||||
VerdefIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Verdef` entry.
|
||||
pub fn next(
|
||||
&mut self,
|
||||
) -> Result<Option<(&'data elf::Verdef<Elf::Endian>, VerdauxIterator<'data, Elf>)>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let verdef = self
|
||||
.data
|
||||
.read_at::<elf::Verdef<_>>(0)
|
||||
.read_error("ELF verdef is too short")?;
|
||||
|
||||
let mut verdaux_data = self.data;
|
||||
verdaux_data
|
||||
.skip(verdef.vd_aux.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vd_aux")?;
|
||||
let verdaux =
|
||||
VerdauxIterator::new(self.endian, verdaux_data.0, verdef.vd_cnt.get(self.endian));
|
||||
|
||||
let next = verdef.vd_next.get(self.endian);
|
||||
if next != 0 {
|
||||
self.data
|
||||
.skip(next as usize)
|
||||
.read_error("Invalid ELF vd_next")?;
|
||||
} else {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
Ok(Some((verdef, verdaux)))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the auxiliary records for an entry in an ELF [`elf::SHT_GNU_VERDEF`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VerdauxIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
count: u16,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VerdauxIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8], count: u16) -> Self {
|
||||
VerdauxIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
count,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Verdaux` entry.
|
||||
pub fn next(&mut self) -> Result<Option<&'data elf::Verdaux<Elf::Endian>>> {
|
||||
if self.count == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let verdaux = self
|
||||
.data
|
||||
.read_at::<elf::Verdaux<_>>(0)
|
||||
.read_error("ELF verdaux is too short")?;
|
||||
|
||||
self.data
|
||||
.skip(verdaux.vda_next.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vda_next")?;
|
||||
self.count -= 1;
|
||||
Ok(Some(verdaux))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the entries in an ELF [`elf::SHT_GNU_VERNEED`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VerneedIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VerneedIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8]) -> Self {
|
||||
VerneedIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Verneed` entry.
|
||||
pub fn next(
|
||||
&mut self,
|
||||
) -> Result<
|
||||
Option<(
|
||||
&'data elf::Verneed<Elf::Endian>,
|
||||
VernauxIterator<'data, Elf>,
|
||||
)>,
|
||||
> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let verneed = self
|
||||
.data
|
||||
.read_at::<elf::Verneed<_>>(0)
|
||||
.read_error("ELF verneed is too short")?;
|
||||
|
||||
let mut vernaux_data = self.data;
|
||||
vernaux_data
|
||||
.skip(verneed.vn_aux.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vn_aux")?;
|
||||
let vernaux =
|
||||
VernauxIterator::new(self.endian, vernaux_data.0, verneed.vn_cnt.get(self.endian));
|
||||
|
||||
let next = verneed.vn_next.get(self.endian);
|
||||
if next != 0 {
|
||||
self.data
|
||||
.skip(next as usize)
|
||||
.read_error("Invalid ELF vn_next")?;
|
||||
} else {
|
||||
self.data = Bytes(&[]);
|
||||
}
|
||||
Ok(Some((verneed, vernaux)))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the auxiliary records for an entry in an ELF [`elf::SHT_GNU_VERNEED`] section.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VernauxIterator<'data, Elf: FileHeader> {
|
||||
endian: Elf::Endian,
|
||||
data: Bytes<'data>,
|
||||
count: u16,
|
||||
}
|
||||
|
||||
impl<'data, Elf: FileHeader> VernauxIterator<'data, Elf> {
|
||||
pub(super) fn new(endian: Elf::Endian, data: &'data [u8], count: u16) -> Self {
|
||||
VernauxIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
count,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next `Vernaux` entry.
|
||||
pub fn next(&mut self) -> Result<Option<&'data elf::Vernaux<Elf::Endian>>> {
|
||||
if self.count == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let vernaux = self
|
||||
.data
|
||||
.read_at::<elf::Vernaux<_>>(0)
|
||||
.read_error("ELF vernaux is too short")?;
|
||||
|
||||
self.data
|
||||
.skip(vernaux.vna_next.get(self.endian) as usize)
|
||||
.read_error("Invalid ELF vna_next")?;
|
||||
self.count -= 1;
|
||||
Ok(Some(vernaux))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> elf::Verdaux<Endian> {
|
||||
/// Parse the version name from the string table.
|
||||
pub fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.vda_name.get(endian))
|
||||
.read_error("Invalid ELF vda_name")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> elf::Verneed<Endian> {
|
||||
/// Parse the file from the string table.
|
||||
pub fn file<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.vn_file.get(endian))
|
||||
.read_error("Invalid ELF vn_file")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> elf::Vernaux<Endian> {
|
||||
/// Parse the version name from the string table.
|
||||
pub fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.vna_name.get(endian))
|
||||
.read_error("Invalid ELF vna_name")
|
||||
}
|
||||
}
|
||||
344
vendor/object/src/read/macho/dyld_cache.rs
vendored
Normal file
344
vendor/object/src/read/macho/dyld_cache.rs
vendored
Normal file
@@ -0,0 +1,344 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::slice;
|
||||
|
||||
use crate::read::{Error, File, ReadError, ReadRef, Result};
|
||||
use crate::{macho, Architecture, Endian, Endianness};
|
||||
|
||||
/// A parsed representation of the dyld shared cache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldCache<'data, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
endian: E,
|
||||
data: R,
|
||||
subcaches: Vec<DyldSubCache<'data, E, R>>,
|
||||
mappings: &'data [macho::DyldCacheMappingInfo<E>],
|
||||
images: &'data [macho::DyldCacheImageInfo<E>],
|
||||
arch: Architecture,
|
||||
}
|
||||
|
||||
/// Information about a subcache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldSubCache<'data, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
data: R,
|
||||
mappings: &'data [macho::DyldCacheMappingInfo<E>],
|
||||
}
|
||||
|
||||
// This is the offset of the images_across_all_subcaches_count field.
|
||||
const MIN_HEADER_SIZE_SUBCACHES: u32 = 0x1c4;
|
||||
|
||||
impl<'data, E, R> DyldCache<'data, E, R>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw dyld shared cache data.
|
||||
///
|
||||
/// For shared caches from macOS 12 / iOS 15 and above, the subcache files need to be
|
||||
/// supplied as well, in the correct order, with the `.symbols` subcache last (if present).
|
||||
/// For example, `data` would be the data for `dyld_shared_cache_x86_64`,
|
||||
/// and `subcache_data` would be the data for `[dyld_shared_cache_x86_64.1, dyld_shared_cache_x86_64.2, ...]`.
|
||||
pub fn parse(data: R, subcache_data: &[R]) -> Result<Self> {
|
||||
let header = macho::DyldCacheHeader::parse(data)?;
|
||||
let (arch, endian) = header.parse_magic()?;
|
||||
let mappings = header.mappings(endian, data)?;
|
||||
|
||||
let symbols_subcache_uuid = header.symbols_subcache_uuid(endian);
|
||||
let subcaches_info = header.subcaches(endian, data)?.unwrap_or(&[]);
|
||||
|
||||
if subcache_data.len() != subcaches_info.len() + symbols_subcache_uuid.is_some() as usize {
|
||||
return Err(Error("Incorrect number of SubCaches"));
|
||||
}
|
||||
|
||||
// Split out the .symbols subcache data from the other subcaches.
|
||||
let (symbols_subcache_data_and_uuid, subcache_data) =
|
||||
if let Some(symbols_uuid) = symbols_subcache_uuid {
|
||||
let (sym_data, rest_data) = subcache_data.split_last().unwrap();
|
||||
(Some((*sym_data, symbols_uuid)), rest_data)
|
||||
} else {
|
||||
(None, subcache_data)
|
||||
};
|
||||
|
||||
// Read the regular SubCaches (.1, .2, ...), if present.
|
||||
let mut subcaches = Vec::new();
|
||||
for (&data, info) in subcache_data.iter().zip(subcaches_info.iter()) {
|
||||
let sc_header = macho::DyldCacheHeader::<E>::parse(data)?;
|
||||
if sc_header.uuid != info.uuid {
|
||||
return Err(Error("Unexpected SubCache UUID"));
|
||||
}
|
||||
let mappings = sc_header.mappings(endian, data)?;
|
||||
subcaches.push(DyldSubCache { data, mappings });
|
||||
}
|
||||
|
||||
// Read the .symbols SubCache, if present.
|
||||
// Other than the UUID verification, the symbols SubCache is currently unused.
|
||||
let _symbols_subcache = match symbols_subcache_data_and_uuid {
|
||||
Some((data, uuid)) => {
|
||||
let sc_header = macho::DyldCacheHeader::<E>::parse(data)?;
|
||||
if sc_header.uuid != uuid {
|
||||
return Err(Error("Unexpected .symbols SubCache UUID"));
|
||||
}
|
||||
let mappings = sc_header.mappings(endian, data)?;
|
||||
Some(DyldSubCache { data, mappings })
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let images = header.images(endian, data)?;
|
||||
Ok(DyldCache {
|
||||
endian,
|
||||
data,
|
||||
subcaches,
|
||||
mappings,
|
||||
images,
|
||||
arch,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the architecture type of the file.
|
||||
pub fn architecture(&self) -> Architecture {
|
||||
self.arch
|
||||
}
|
||||
|
||||
/// Get the endianness of the file.
|
||||
#[inline]
|
||||
pub fn endianness(&self) -> Endianness {
|
||||
if self.is_little_endian() {
|
||||
Endianness::Little
|
||||
} else {
|
||||
Endianness::Big
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the file is little endian, false if it is big endian.
|
||||
pub fn is_little_endian(&self) -> bool {
|
||||
self.endian.is_little_endian()
|
||||
}
|
||||
|
||||
/// Iterate over the images in this cache.
|
||||
pub fn images<'cache>(&'cache self) -> DyldCacheImageIterator<'data, 'cache, E, R> {
|
||||
DyldCacheImageIterator {
|
||||
cache: self,
|
||||
iter: self.images.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the address in a mapping and return the cache or subcache data it was found in,
|
||||
/// together with the translated file offset.
|
||||
pub fn data_and_offset_for_address(&self, address: u64) -> Option<(R, u64)> {
|
||||
if let Some(file_offset) = address_to_file_offset(address, self.endian, self.mappings) {
|
||||
return Some((self.data, file_offset));
|
||||
}
|
||||
for subcache in &self.subcaches {
|
||||
if let Some(file_offset) =
|
||||
address_to_file_offset(address, self.endian, subcache.mappings)
|
||||
{
|
||||
return Some((subcache.data, file_offset));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over all the images (dylibs) in the dyld shared cache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldCacheImageIterator<'data, 'cache, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
cache: &'cache DyldCache<'data, E, R>,
|
||||
iter: slice::Iter<'data, macho::DyldCacheImageInfo<E>>,
|
||||
}
|
||||
|
||||
impl<'data, 'cache, E, R> Iterator for DyldCacheImageIterator<'data, 'cache, E, R>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = DyldCacheImage<'data, 'cache, E, R>;
|
||||
|
||||
fn next(&mut self) -> Option<DyldCacheImage<'data, 'cache, E, R>> {
|
||||
let image_info = self.iter.next()?;
|
||||
Some(DyldCacheImage {
|
||||
cache: self.cache,
|
||||
image_info,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// One image (dylib) from inside the dyld shared cache.
|
||||
#[derive(Debug)]
|
||||
pub struct DyldCacheImage<'data, 'cache, E = Endianness, R = &'data [u8]>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(crate) cache: &'cache DyldCache<'data, E, R>,
|
||||
image_info: &'data macho::DyldCacheImageInfo<E>,
|
||||
}
|
||||
|
||||
impl<'data, 'cache, E, R> DyldCacheImage<'data, 'cache, E, R>
|
||||
where
|
||||
E: Endian,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// The file system path of this image.
|
||||
pub fn path(&self) -> Result<&'data str> {
|
||||
let path = self.image_info.path(self.cache.endian, self.cache.data)?;
|
||||
// The path should always be ascii, so from_utf8 should always succeed.
|
||||
let path = core::str::from_utf8(path).map_err(|_| Error("Path string not valid utf-8"))?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// The subcache data which contains the Mach-O header for this image,
|
||||
/// together with the file offset at which this image starts.
|
||||
pub fn image_data_and_offset(&self) -> Result<(R, u64)> {
|
||||
let address = self.image_info.address.get(self.cache.endian);
|
||||
self.cache
|
||||
.data_and_offset_for_address(address)
|
||||
.ok_or(Error("Address not found in any mapping"))
|
||||
}
|
||||
|
||||
/// Parse this image into an Object.
|
||||
pub fn parse_object(&self) -> Result<File<'data, R>> {
|
||||
File::parse_dyld_cache_image(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> macho::DyldCacheHeader<E> {
|
||||
/// Read the dyld cache header.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(data: R) -> Result<&'data Self> {
|
||||
data.read_at::<macho::DyldCacheHeader<E>>(0)
|
||||
.read_error("Invalid dyld cache header size or alignment")
|
||||
}
|
||||
|
||||
/// Returns (arch, endian) based on the magic string.
|
||||
pub fn parse_magic(&self) -> Result<(Architecture, E)> {
|
||||
let (arch, is_big_endian) = match &self.magic {
|
||||
b"dyld_v1 i386\0" => (Architecture::I386, false),
|
||||
b"dyld_v1 x86_64\0" => (Architecture::X86_64, false),
|
||||
b"dyld_v1 x86_64h\0" => (Architecture::X86_64, false),
|
||||
b"dyld_v1 ppc\0" => (Architecture::PowerPc, true),
|
||||
b"dyld_v1 armv6\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7f\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7s\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 armv7k\0" => (Architecture::Arm, false),
|
||||
b"dyld_v1 arm64\0" => (Architecture::Aarch64, false),
|
||||
b"dyld_v1 arm64e\0" => (Architecture::Aarch64, false),
|
||||
_ => return Err(Error("Unrecognized dyld cache magic")),
|
||||
};
|
||||
let endian =
|
||||
E::from_big_endian(is_big_endian).read_error("Unsupported dyld cache endian")?;
|
||||
Ok((arch, endian))
|
||||
}
|
||||
|
||||
/// Return the mapping information table.
|
||||
pub fn mappings<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<&'data [macho::DyldCacheMappingInfo<E>]> {
|
||||
data.read_slice_at::<macho::DyldCacheMappingInfo<E>>(
|
||||
self.mapping_offset.get(endian).into(),
|
||||
self.mapping_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld cache mapping size or alignment")
|
||||
}
|
||||
|
||||
/// Return the information about subcaches, if present.
|
||||
pub fn subcaches<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<Option<&'data [macho::DyldSubCacheInfo<E>]>> {
|
||||
if self.mapping_offset.get(endian) >= MIN_HEADER_SIZE_SUBCACHES {
|
||||
let subcaches = data
|
||||
.read_slice_at::<macho::DyldSubCacheInfo<E>>(
|
||||
self.subcaches_offset.get(endian).into(),
|
||||
self.subcaches_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld subcaches size or alignment")?;
|
||||
Ok(Some(subcaches))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the UUID for the .symbols subcache, if present.
|
||||
pub fn symbols_subcache_uuid(&self, endian: E) -> Option<[u8; 16]> {
|
||||
if self.mapping_offset.get(endian) >= MIN_HEADER_SIZE_SUBCACHES {
|
||||
let uuid = self.symbols_subcache_uuid;
|
||||
if uuid != [0; 16] {
|
||||
return Some(uuid);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the image information table.
|
||||
pub fn images<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<&'data [macho::DyldCacheImageInfo<E>]> {
|
||||
if self.mapping_offset.get(endian) >= MIN_HEADER_SIZE_SUBCACHES {
|
||||
data.read_slice_at::<macho::DyldCacheImageInfo<E>>(
|
||||
self.images_across_all_subcaches_offset.get(endian).into(),
|
||||
self.images_across_all_subcaches_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld cache image size or alignment")
|
||||
} else {
|
||||
data.read_slice_at::<macho::DyldCacheImageInfo<E>>(
|
||||
self.images_offset.get(endian).into(),
|
||||
self.images_count.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid dyld cache image size or alignment")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endian> macho::DyldCacheImageInfo<E> {
|
||||
/// The file system path of this image.
|
||||
pub fn path<'data, R: ReadRef<'data>>(&self, endian: E, data: R) -> Result<&'data [u8]> {
|
||||
let r_start = self.path_file_offset.get(endian).into();
|
||||
let r_end = data.len().read_error("Couldn't get data len()")?;
|
||||
data.read_bytes_at_until(r_start..r_end, 0)
|
||||
.read_error("Couldn't read dyld cache image path")
|
||||
}
|
||||
|
||||
/// Find the file offset of the image by looking up its address in the mappings.
|
||||
pub fn file_offset(
|
||||
&self,
|
||||
endian: E,
|
||||
mappings: &[macho::DyldCacheMappingInfo<E>],
|
||||
) -> Result<u64> {
|
||||
let address = self.address.get(endian);
|
||||
address_to_file_offset(address, endian, mappings)
|
||||
.read_error("Invalid dyld cache image address")
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the file offset of the image by looking up its address in the mappings.
|
||||
pub fn address_to_file_offset<E: Endian>(
|
||||
address: u64,
|
||||
endian: E,
|
||||
mappings: &[macho::DyldCacheMappingInfo<E>],
|
||||
) -> Option<u64> {
|
||||
for mapping in mappings {
|
||||
let mapping_address = mapping.address.get(endian);
|
||||
if address >= mapping_address
|
||||
&& address < mapping_address.wrapping_add(mapping.size.get(endian))
|
||||
{
|
||||
return Some(address - mapping_address + mapping.file_offset.get(endian));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
122
vendor/object/src/read/macho/fat.rs
vendored
Normal file
122
vendor/object/src/read/macho/fat.rs
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
use crate::read::{Architecture, Error, ReadError, ReadRef, Result};
|
||||
use crate::{macho, BigEndian, Pod};
|
||||
|
||||
pub use macho::{FatArch32, FatArch64, FatHeader};
|
||||
|
||||
impl FatHeader {
|
||||
/// Attempt to parse a fat header.
|
||||
///
|
||||
/// Does not validate the magic value.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(file: R) -> Result<&'data FatHeader> {
|
||||
file.read_at::<FatHeader>(0)
|
||||
.read_error("Invalid fat header size or alignment")
|
||||
}
|
||||
|
||||
/// Attempt to parse a fat header and 32-bit fat arches.
|
||||
pub fn parse_arch32<'data, R: ReadRef<'data>>(file: R) -> Result<&'data [FatArch32]> {
|
||||
let mut offset = 0;
|
||||
let header = file
|
||||
.read::<FatHeader>(&mut offset)
|
||||
.read_error("Invalid fat header size or alignment")?;
|
||||
if header.magic.get(BigEndian) != macho::FAT_MAGIC {
|
||||
return Err(Error("Invalid 32-bit fat magic"));
|
||||
}
|
||||
file.read_slice::<FatArch32>(&mut offset, header.nfat_arch.get(BigEndian) as usize)
|
||||
.read_error("Invalid nfat_arch")
|
||||
}
|
||||
|
||||
/// Attempt to parse a fat header and 64-bit fat arches.
|
||||
pub fn parse_arch64<'data, R: ReadRef<'data>>(file: R) -> Result<&'data [FatArch64]> {
|
||||
let mut offset = 0;
|
||||
let header = file
|
||||
.read::<FatHeader>(&mut offset)
|
||||
.read_error("Invalid fat header size or alignment")?;
|
||||
if header.magic.get(BigEndian) != macho::FAT_MAGIC_64 {
|
||||
return Err(Error("Invalid 64-bit fat magic"));
|
||||
}
|
||||
file.read_slice::<FatArch64>(&mut offset, header.nfat_arch.get(BigEndian) as usize)
|
||||
.read_error("Invalid nfat_arch")
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::FatArch32`] and [`macho::FatArch64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FatArch: Pod {
|
||||
type Word: Into<u64>;
|
||||
|
||||
fn cputype(&self) -> u32;
|
||||
fn cpusubtype(&self) -> u32;
|
||||
fn offset(&self) -> Self::Word;
|
||||
fn size(&self) -> Self::Word;
|
||||
fn align(&self) -> u32;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match self.cputype() {
|
||||
macho::CPU_TYPE_ARM => Architecture::Arm,
|
||||
macho::CPU_TYPE_ARM64 => Architecture::Aarch64,
|
||||
macho::CPU_TYPE_X86 => Architecture::I386,
|
||||
macho::CPU_TYPE_X86_64 => Architecture::X86_64,
|
||||
macho::CPU_TYPE_MIPS => Architecture::Mips,
|
||||
macho::CPU_TYPE_POWERPC => Architecture::PowerPc,
|
||||
macho::CPU_TYPE_POWERPC64 => Architecture::PowerPc64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
(self.offset().into(), self.size().into())
|
||||
}
|
||||
|
||||
fn data<'data, R: ReadRef<'data>>(&self, file: R) -> Result<&'data [u8]> {
|
||||
file.read_bytes_at(self.offset().into(), self.size().into())
|
||||
.read_error("Invalid fat arch offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl FatArch for FatArch32 {
|
||||
type Word = u32;
|
||||
|
||||
fn cputype(&self) -> u32 {
|
||||
self.cputype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self) -> u32 {
|
||||
self.cpusubtype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn offset(&self) -> Self::Word {
|
||||
self.offset.get(BigEndian)
|
||||
}
|
||||
|
||||
fn size(&self) -> Self::Word {
|
||||
self.size.get(BigEndian)
|
||||
}
|
||||
|
||||
fn align(&self) -> u32 {
|
||||
self.align.get(BigEndian)
|
||||
}
|
||||
}
|
||||
|
||||
impl FatArch for FatArch64 {
|
||||
type Word = u64;
|
||||
|
||||
fn cputype(&self) -> u32 {
|
||||
self.cputype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self) -> u32 {
|
||||
self.cpusubtype.get(BigEndian)
|
||||
}
|
||||
|
||||
fn offset(&self) -> Self::Word {
|
||||
self.offset.get(BigEndian)
|
||||
}
|
||||
|
||||
fn size(&self) -> Self::Word {
|
||||
self.size.get(BigEndian)
|
||||
}
|
||||
|
||||
fn align(&self) -> u32 {
|
||||
self.align.get(BigEndian)
|
||||
}
|
||||
}
|
||||
781
vendor/object/src/read/macho/file.rs
vendored
Normal file
781
vendor/object/src/read/macho/file.rs
vendored
Normal file
@@ -0,0 +1,781 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::{mem, str};
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, ComdatKind, Error, Export, FileFlags, Import, NoDynamicRelocationIterator,
|
||||
Object, ObjectComdat, ObjectKind, ObjectMap, ObjectSection, ReadError, ReadRef, Result,
|
||||
SectionIndex, SubArchitecture, SymbolIndex,
|
||||
};
|
||||
use crate::{endian, macho, BigEndian, ByteString, Endian, Endianness, Pod};
|
||||
|
||||
use super::{
|
||||
DyldCacheImage, LoadCommandIterator, MachOSection, MachOSectionInternal, MachOSectionIterator,
|
||||
MachOSegment, MachOSegmentInternal, MachOSegmentIterator, MachOSymbol, MachOSymbolIterator,
|
||||
MachOSymbolTable, Nlist, Section, Segment, SymbolTable,
|
||||
};
|
||||
|
||||
/// A 32-bit Mach-O object file.
|
||||
///
|
||||
/// This is a file that starts with [`macho::MachHeader32`], and corresponds
|
||||
/// to [`crate::FileKind::MachO32`].
|
||||
pub type MachOFile32<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOFile<'data, macho::MachHeader32<Endian>, R>;
|
||||
/// A 64-bit Mach-O object file.
|
||||
///
|
||||
/// This is a file that starts with [`macho::MachHeader64`], and corresponds
|
||||
/// to [`crate::FileKind::MachO64`].
|
||||
pub type MachOFile64<'data, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOFile<'data, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A partially parsed Mach-O file.
|
||||
///
|
||||
/// Most of the functionality of this type is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOFile<'data, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) endian: Mach::Endian,
|
||||
pub(super) data: R,
|
||||
pub(super) header_offset: u64,
|
||||
pub(super) header: &'data Mach,
|
||||
pub(super) segments: Vec<MachOSegmentInternal<'data, Mach, R>>,
|
||||
pub(super) sections: Vec<MachOSectionInternal<'data, Mach>>,
|
||||
pub(super) symbols: SymbolTable<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, Mach, R> MachOFile<'data, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw Mach-O file data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let header = Mach::parse(data, 0)?;
|
||||
let endian = header.endian()?;
|
||||
|
||||
// Build a list of segments and sections to make some operations more efficient.
|
||||
let mut segments = Vec::new();
|
||||
let mut sections = Vec::new();
|
||||
let mut symbols = SymbolTable::default();
|
||||
if let Ok(mut commands) = header.load_commands(endian, data, 0) {
|
||||
while let Ok(Some(command)) = commands.next() {
|
||||
if let Some((segment, section_data)) = Mach::Segment::from_command(command)? {
|
||||
let segment_index = segments.len();
|
||||
segments.push(MachOSegmentInternal { segment, data });
|
||||
for section in segment.sections(endian, section_data)? {
|
||||
let index = SectionIndex(sections.len() + 1);
|
||||
sections.push(MachOSectionInternal::parse(index, segment_index, section));
|
||||
}
|
||||
} else if let Some(symtab) = command.symtab()? {
|
||||
symbols = symtab.symbols(endian, data)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(MachOFile {
|
||||
endian,
|
||||
data,
|
||||
header_offset: 0,
|
||||
header,
|
||||
segments,
|
||||
sections,
|
||||
symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse the Mach-O file for the given image from the dyld shared cache.
|
||||
/// This will read different sections from different subcaches, if necessary.
|
||||
pub fn parse_dyld_cache_image<'cache, E: Endian>(
|
||||
image: &DyldCacheImage<'data, 'cache, E, R>,
|
||||
) -> Result<Self> {
|
||||
let (data, header_offset) = image.image_data_and_offset()?;
|
||||
let header = Mach::parse(data, header_offset)?;
|
||||
let endian = header.endian()?;
|
||||
|
||||
// Build a list of sections to make some operations more efficient.
|
||||
// Also build a list of segments, because we need to remember which ReadRef
|
||||
// to read each section's data from. Only the DyldCache knows this information,
|
||||
// and we won't have access to it once we've exited this function.
|
||||
let mut segments = Vec::new();
|
||||
let mut sections = Vec::new();
|
||||
let mut linkedit_data: Option<R> = None;
|
||||
let mut symtab = None;
|
||||
if let Ok(mut commands) = header.load_commands(endian, data, header_offset) {
|
||||
while let Ok(Some(command)) = commands.next() {
|
||||
if let Some((segment, section_data)) = Mach::Segment::from_command(command)? {
|
||||
// Each segment can be stored in a different subcache. Get the segment's
|
||||
// address and look it up in the cache mappings, to find the correct cache data.
|
||||
let addr = segment.vmaddr(endian).into();
|
||||
let (data, _offset) = image
|
||||
.cache
|
||||
.data_and_offset_for_address(addr)
|
||||
.read_error("Could not find segment data in dyld shared cache")?;
|
||||
if segment.name() == macho::SEG_LINKEDIT.as_bytes() {
|
||||
linkedit_data = Some(data);
|
||||
}
|
||||
let segment_index = segments.len();
|
||||
segments.push(MachOSegmentInternal { segment, data });
|
||||
|
||||
for section in segment.sections(endian, section_data)? {
|
||||
let index = SectionIndex(sections.len() + 1);
|
||||
sections.push(MachOSectionInternal::parse(index, segment_index, section));
|
||||
}
|
||||
} else if let Some(st) = command.symtab()? {
|
||||
symtab = Some(st);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The symbols are found in the __LINKEDIT segment, so make sure to read them from the
|
||||
// correct subcache.
|
||||
let symbols = match (symtab, linkedit_data) {
|
||||
(Some(symtab), Some(linkedit_data)) => symtab.symbols(endian, linkedit_data)?,
|
||||
_ => SymbolTable::default(),
|
||||
};
|
||||
|
||||
Ok(MachOFile {
|
||||
endian,
|
||||
data,
|
||||
header_offset,
|
||||
header,
|
||||
segments,
|
||||
sections,
|
||||
symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the section at the given index.
|
||||
#[inline]
|
||||
pub(super) fn section_internal(
|
||||
&self,
|
||||
index: SectionIndex,
|
||||
) -> Result<&MachOSectionInternal<'data, Mach>> {
|
||||
index
|
||||
.0
|
||||
.checked_sub(1)
|
||||
.and_then(|index| self.sections.get(index))
|
||||
.read_error("Invalid Mach-O section index")
|
||||
}
|
||||
|
||||
pub(super) fn segment_internal(
|
||||
&self,
|
||||
index: usize,
|
||||
) -> Result<&MachOSegmentInternal<'data, Mach, R>> {
|
||||
self.segments
|
||||
.get(index)
|
||||
.read_error("Invalid Mach-O segment index")
|
||||
}
|
||||
|
||||
/// Returns the endianness.
|
||||
pub fn endian(&self) -> Mach::Endian {
|
||||
self.endian
|
||||
}
|
||||
|
||||
/// Returns the raw data.
|
||||
pub fn data(&self) -> R {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// Returns the raw Mach-O file header.
|
||||
pub fn raw_header(&self) -> &'data Mach {
|
||||
self.header
|
||||
}
|
||||
|
||||
/// Return the `LC_BUILD_VERSION` load command if present.
|
||||
pub fn build_version(&self) -> Result<Option<&'data macho::BuildVersionCommand<Mach::Endian>>> {
|
||||
let mut commands = self
|
||||
.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Some(build_version) = command.build_version()? {
|
||||
return Ok(Some(build_version));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Mach, R> read::private::Sealed for MachOFile<'data, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Object<'data, 'file> for MachOFile<'data, Mach, R>
|
||||
where
|
||||
'data: 'file,
|
||||
Mach: MachHeader,
|
||||
R: 'file + ReadRef<'data>,
|
||||
{
|
||||
type Segment = MachOSegment<'data, 'file, Mach, R>;
|
||||
type SegmentIterator = MachOSegmentIterator<'data, 'file, Mach, R>;
|
||||
type Section = MachOSection<'data, 'file, Mach, R>;
|
||||
type SectionIterator = MachOSectionIterator<'data, 'file, Mach, R>;
|
||||
type Comdat = MachOComdat<'data, 'file, Mach, R>;
|
||||
type ComdatIterator = MachOComdatIterator<'data, 'file, Mach, R>;
|
||||
type Symbol = MachOSymbol<'data, 'file, Mach, R>;
|
||||
type SymbolIterator = MachOSymbolIterator<'data, 'file, Mach, R>;
|
||||
type SymbolTable = MachOSymbolTable<'data, 'file, Mach, R>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
fn architecture(&self) -> Architecture {
|
||||
match self.header.cputype(self.endian) {
|
||||
macho::CPU_TYPE_ARM => Architecture::Arm,
|
||||
macho::CPU_TYPE_ARM64 => Architecture::Aarch64,
|
||||
macho::CPU_TYPE_ARM64_32 => Architecture::Aarch64_Ilp32,
|
||||
macho::CPU_TYPE_X86 => Architecture::I386,
|
||||
macho::CPU_TYPE_X86_64 => Architecture::X86_64,
|
||||
macho::CPU_TYPE_MIPS => Architecture::Mips,
|
||||
macho::CPU_TYPE_POWERPC => Architecture::PowerPc,
|
||||
macho::CPU_TYPE_POWERPC64 => Architecture::PowerPc64,
|
||||
_ => Architecture::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
match (
|
||||
self.header.cputype(self.endian),
|
||||
self.header.cpusubtype(self.endian),
|
||||
) {
|
||||
(macho::CPU_TYPE_ARM64, macho::CPU_SUBTYPE_ARM64E) => Some(SubArchitecture::Arm64E),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.header.is_little_endian()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
self.header.is_type_64()
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
match self.header.filetype(self.endian) {
|
||||
macho::MH_OBJECT => ObjectKind::Relocatable,
|
||||
macho::MH_EXECUTE => ObjectKind::Executable,
|
||||
macho::MH_CORE => ObjectKind::Core,
|
||||
macho::MH_DYLIB => ObjectKind::Dynamic,
|
||||
_ => ObjectKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> MachOSegmentIterator<'data, 'file, Mach, R> {
|
||||
MachOSegmentIterator {
|
||||
file: self,
|
||||
iter: self.segments.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<MachOSection<'data, 'file, Mach, R>> {
|
||||
// Translate the "." prefix to the "__" prefix used by OSX/Mach-O, eg
|
||||
// ".debug_info" to "__debug_info", and limit to 16 bytes total.
|
||||
let system_name = if section_name.starts_with(b".") {
|
||||
if section_name.len() > 15 {
|
||||
Some(§ion_name[1..15])
|
||||
} else {
|
||||
Some(§ion_name[1..])
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let cmp_section_name = |section: &MachOSection<'data, 'file, Mach, R>| {
|
||||
section
|
||||
.name_bytes()
|
||||
.map(|name| {
|
||||
section_name == name
|
||||
|| system_name
|
||||
.filter(|system_name| {
|
||||
name.starts_with(b"__") && name[2..] == **system_name
|
||||
})
|
||||
.is_some()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
};
|
||||
|
||||
self.sections().find(cmp_section_name)
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> Result<MachOSection<'data, 'file, Mach, R>> {
|
||||
let internal = *self.section_internal(index)?;
|
||||
Ok(MachOSection {
|
||||
file: self,
|
||||
internal,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> MachOSectionIterator<'data, 'file, Mach, R> {
|
||||
MachOSectionIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> MachOComdatIterator<'data, 'file, Mach, R> {
|
||||
MachOComdatIterator { file: self }
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> Result<MachOSymbol<'data, 'file, Mach, R>> {
|
||||
let nlist = self.symbols.symbol(index.0)?;
|
||||
MachOSymbol::new(self, index, nlist).read_error("Unsupported Mach-O symbol index")
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> MachOSymbolIterator<'data, 'file, Mach, R> {
|
||||
MachOSymbolIterator {
|
||||
file: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_table(&'file self) -> Option<MachOSymbolTable<'data, 'file, Mach, R>> {
|
||||
Some(MachOSymbolTable { file: self })
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> MachOSymbolIterator<'data, 'file, Mach, R> {
|
||||
MachOSymbolIterator {
|
||||
file: self,
|
||||
index: self.symbols.len(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_symbol_table(&'file self) -> Option<MachOSymbolTable<'data, 'file, Mach, R>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn object_map(&'file self) -> ObjectMap<'data> {
|
||||
self.symbols.object_map(self.endian)
|
||||
}
|
||||
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>> {
|
||||
let mut dysymtab = None;
|
||||
let mut libraries = Vec::new();
|
||||
let twolevel = self.header.flags(self.endian) & macho::MH_TWOLEVEL != 0;
|
||||
if twolevel {
|
||||
libraries.push(&[][..]);
|
||||
}
|
||||
let mut commands = self
|
||||
.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Some(command) = command.dysymtab()? {
|
||||
dysymtab = Some(command);
|
||||
}
|
||||
if twolevel {
|
||||
if let Some(dylib) = command.dylib()? {
|
||||
libraries.push(command.string(self.endian, dylib.dylib.name)?);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut imports = Vec::new();
|
||||
if let Some(dysymtab) = dysymtab {
|
||||
let index = dysymtab.iundefsym.get(self.endian) as usize;
|
||||
let number = dysymtab.nundefsym.get(self.endian) as usize;
|
||||
for i in index..(index.wrapping_add(number)) {
|
||||
let symbol = self.symbols.symbol(i)?;
|
||||
let name = symbol.name(self.endian, self.symbols.strings())?;
|
||||
let library = if twolevel {
|
||||
libraries
|
||||
.get(symbol.library_ordinal(self.endian) as usize)
|
||||
.copied()
|
||||
.read_error("Invalid Mach-O symbol library ordinal")?
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
imports.push(Import {
|
||||
name: ByteString(name),
|
||||
library: ByteString(library),
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
let mut dysymtab = None;
|
||||
let mut commands = self
|
||||
.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Some(command) = command.dysymtab()? {
|
||||
dysymtab = Some(command);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let mut exports = Vec::new();
|
||||
if let Some(dysymtab) = dysymtab {
|
||||
let index = dysymtab.iextdefsym.get(self.endian) as usize;
|
||||
let number = dysymtab.nextdefsym.get(self.endian) as usize;
|
||||
for i in index..(index.wrapping_add(number)) {
|
||||
let symbol = self.symbols.symbol(i)?;
|
||||
let name = symbol.name(self.endian, self.symbols.strings())?;
|
||||
let address = symbol.n_value(self.endian).into();
|
||||
exports.push(Export {
|
||||
name: ByteString(name),
|
||||
address,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(exports)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_relocations(&'file self) -> Option<NoDynamicRelocationIterator> {
|
||||
None
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.section_by_name(".debug_info").is_some()
|
||||
}
|
||||
|
||||
fn mach_uuid(&self) -> Result<Option<[u8; 16]>> {
|
||||
self.header.uuid(self.endian, self.data, self.header_offset)
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn entry(&self) -> u64 {
|
||||
if let Ok(mut commands) =
|
||||
self.header
|
||||
.load_commands(self.endian, self.data, self.header_offset)
|
||||
{
|
||||
while let Ok(Some(command)) = commands.next() {
|
||||
if let Ok(Some(command)) = command.entry_point() {
|
||||
return command.entryoff.get(self.endian);
|
||||
}
|
||||
}
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::MachO {
|
||||
flags: self.header.flags(self.endian),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`MachOFile64`].
|
||||
pub type MachOComdatIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the COMDAT section groups in a [`MachOFile64`].
|
||||
pub type MachOComdatIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`MachOFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOComdatIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOComdatIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOComdat<'data, 'file, Mach, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`MachOFile32`].
|
||||
pub type MachOComdat32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdat<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
|
||||
/// A COMDAT section group in a [`MachOFile64`].
|
||||
pub type MachOComdat64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdat<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A COMDAT section group in a [`MachOFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOComdat<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOComdat<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectComdat<'data> for MachOComdat<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type SectionIterator = MachOComdatSectionIterator<'data, 'file, Mach, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`MachOFile32`].
|
||||
pub type MachOComdatSectionIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatSectionIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the sections in a COMDAT section group in a [`MachOFile64`].
|
||||
pub type MachOComdatSectionIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOComdatSectionIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`MachOFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOComdatSectionIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOComdatSectionIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::MachHeader32`] and [`macho::MachHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait MachHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
type Segment: Segment<Endian = Self::Endian, Section = Self::Section>;
|
||||
type Section: Section<Endian = Self::Endian>;
|
||||
type Nlist: Nlist<Endian = Self::Endian>;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
///
|
||||
/// This is a property of the type, not a value in the header data.
|
||||
fn is_type_64(&self) -> bool;
|
||||
|
||||
/// Return true if the `magic` field signifies big-endian.
|
||||
fn is_big_endian(&self) -> bool;
|
||||
|
||||
/// Return true if the `magic` field signifies little-endian.
|
||||
fn is_little_endian(&self) -> bool;
|
||||
|
||||
fn magic(&self) -> u32;
|
||||
fn cputype(&self, endian: Self::Endian) -> u32;
|
||||
fn cpusubtype(&self, endian: Self::Endian) -> u32;
|
||||
fn filetype(&self, endian: Self::Endian) -> u32;
|
||||
fn ncmds(&self, endian: Self::Endian) -> u32;
|
||||
fn sizeofcmds(&self, endian: Self::Endian) -> u32;
|
||||
fn flags(&self, endian: Self::Endian) -> u32;
|
||||
|
||||
// Provided methods.
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// Also checks that the magic field in the file header is a supported format.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: u64) -> read::Result<&'data Self> {
|
||||
let header = data
|
||||
.read_at::<Self>(offset)
|
||||
.read_error("Invalid Mach-O header size or alignment")?;
|
||||
if !header.is_supported() {
|
||||
return Err(Error("Unsupported Mach-O header"));
|
||||
}
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn is_supported(&self) -> bool {
|
||||
self.is_little_endian() || self.is_big_endian()
|
||||
}
|
||||
|
||||
fn endian(&self) -> Result<Self::Endian> {
|
||||
Self::Endian::from_big_endian(self.is_big_endian()).read_error("Unsupported Mach-O endian")
|
||||
}
|
||||
|
||||
fn load_commands<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
header_offset: u64,
|
||||
) -> Result<LoadCommandIterator<'data, Self::Endian>> {
|
||||
let data = data
|
||||
.read_bytes_at(
|
||||
header_offset + mem::size_of::<Self>() as u64,
|
||||
self.sizeofcmds(endian).into(),
|
||||
)
|
||||
.read_error("Invalid Mach-O load command table size")?;
|
||||
Ok(LoadCommandIterator::new(endian, data, self.ncmds(endian)))
|
||||
}
|
||||
|
||||
/// Return the UUID from the `LC_UUID` load command, if one is present.
|
||||
fn uuid<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
header_offset: u64,
|
||||
) -> Result<Option<[u8; 16]>> {
|
||||
let mut commands = self.load_commands(endian, data, header_offset)?;
|
||||
while let Some(command) = commands.next()? {
|
||||
if let Ok(Some(uuid)) = command.uuid() {
|
||||
return Ok(Some(uuid.uuid));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> MachHeader for macho::MachHeader32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
type Segment = macho::SegmentCommand32<Endian>;
|
||||
type Section = macho::Section32<Endian>;
|
||||
type Nlist = macho::Nlist32<Endian>;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_big_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_MAGIC
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_CIGAM
|
||||
}
|
||||
|
||||
fn magic(&self) -> u32 {
|
||||
self.magic.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cputype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cputype.get(endian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cpusubtype.get(endian)
|
||||
}
|
||||
|
||||
fn filetype(&self, endian: Self::Endian) -> u32 {
|
||||
self.filetype.get(endian)
|
||||
}
|
||||
|
||||
fn ncmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.ncmds.get(endian)
|
||||
}
|
||||
|
||||
fn sizeofcmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.sizeofcmds.get(endian)
|
||||
}
|
||||
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> MachHeader for macho::MachHeader64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
type Segment = macho::SegmentCommand64<Endian>;
|
||||
type Section = macho::Section64<Endian>;
|
||||
type Nlist = macho::Nlist64<Endian>;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_big_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_MAGIC_64
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
self.magic() == macho::MH_CIGAM_64
|
||||
}
|
||||
|
||||
fn magic(&self) -> u32 {
|
||||
self.magic.get(BigEndian)
|
||||
}
|
||||
|
||||
fn cputype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cputype.get(endian)
|
||||
}
|
||||
|
||||
fn cpusubtype(&self, endian: Self::Endian) -> u32 {
|
||||
self.cpusubtype.get(endian)
|
||||
}
|
||||
|
||||
fn filetype(&self, endian: Self::Endian) -> u32 {
|
||||
self.filetype.get(endian)
|
||||
}
|
||||
|
||||
fn ncmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.ncmds.get(endian)
|
||||
}
|
||||
|
||||
fn sizeofcmds(&self, endian: Self::Endian) -> u32 {
|
||||
self.sizeofcmds.get(endian)
|
||||
}
|
||||
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
382
vendor/object/src/read/macho/load_command.rs
vendored
Normal file
382
vendor/object/src/read/macho/load_command.rs
vendored
Normal file
@@ -0,0 +1,382 @@
|
||||
use core::marker::PhantomData;
|
||||
use core::mem;
|
||||
|
||||
use crate::endian::Endian;
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::macho::{MachHeader, SymbolTable};
|
||||
use crate::read::{Bytes, Error, ReadError, ReadRef, Result, StringTable};
|
||||
|
||||
/// An iterator for the load commands from a [`MachHeader`].
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct LoadCommandIterator<'data, E: Endian> {
|
||||
endian: E,
|
||||
data: Bytes<'data>,
|
||||
ncmds: u32,
|
||||
}
|
||||
|
||||
impl<'data, E: Endian> LoadCommandIterator<'data, E> {
|
||||
pub(super) fn new(endian: E, data: &'data [u8], ncmds: u32) -> Self {
|
||||
LoadCommandIterator {
|
||||
endian,
|
||||
data: Bytes(data),
|
||||
ncmds,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next load command.
|
||||
pub fn next(&mut self) -> Result<Option<LoadCommandData<'data, E>>> {
|
||||
if self.ncmds == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
let header = self
|
||||
.data
|
||||
.read_at::<macho::LoadCommand<E>>(0)
|
||||
.read_error("Invalid Mach-O load command header")?;
|
||||
let cmd = header.cmd.get(self.endian);
|
||||
let cmdsize = header.cmdsize.get(self.endian) as usize;
|
||||
if cmdsize < mem::size_of::<macho::LoadCommand<E>>() {
|
||||
return Err(Error("Invalid Mach-O load command size"));
|
||||
}
|
||||
let data = self
|
||||
.data
|
||||
.read_bytes(cmdsize)
|
||||
.read_error("Invalid Mach-O load command size")?;
|
||||
self.ncmds -= 1;
|
||||
Ok(Some(LoadCommandData {
|
||||
cmd,
|
||||
data,
|
||||
marker: Default::default(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// The data for a [`macho::LoadCommand`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct LoadCommandData<'data, E: Endian> {
|
||||
cmd: u32,
|
||||
// Includes the header.
|
||||
data: Bytes<'data>,
|
||||
marker: PhantomData<E>,
|
||||
}
|
||||
|
||||
impl<'data, E: Endian> LoadCommandData<'data, E> {
|
||||
/// Return the `cmd` field of the [`macho::LoadCommand`].
|
||||
///
|
||||
/// This is one of the `LC_` constants.
|
||||
pub fn cmd(&self) -> u32 {
|
||||
self.cmd
|
||||
}
|
||||
|
||||
/// Return the `cmdsize` field of the [`macho::LoadCommand`].
|
||||
pub fn cmdsize(&self) -> u32 {
|
||||
self.data.len() as u32
|
||||
}
|
||||
|
||||
/// Parse the data as the given type.
|
||||
#[inline]
|
||||
pub fn data<T: Pod>(&self) -> Result<&'data T> {
|
||||
self.data
|
||||
.read_at(0)
|
||||
.read_error("Invalid Mach-O command size")
|
||||
}
|
||||
|
||||
/// Raw bytes of this [`macho::LoadCommand`] structure.
|
||||
pub fn raw_data(&self) -> &'data [u8] {
|
||||
self.data.0
|
||||
}
|
||||
|
||||
/// Parse a load command string value.
|
||||
///
|
||||
/// Strings used by load commands are specified by offsets that are
|
||||
/// relative to the load command header.
|
||||
pub fn string(&self, endian: E, s: macho::LcStr<E>) -> Result<&'data [u8]> {
|
||||
self.data
|
||||
.read_string_at(s.offset.get(endian) as usize)
|
||||
.read_error("Invalid load command string offset")
|
||||
}
|
||||
|
||||
/// Parse the command data according to the `cmd` field.
|
||||
pub fn variant(&self) -> Result<LoadCommandVariant<'data, E>> {
|
||||
Ok(match self.cmd {
|
||||
macho::LC_SEGMENT => {
|
||||
let mut data = self.data;
|
||||
let segment = data.read().read_error("Invalid Mach-O command size")?;
|
||||
LoadCommandVariant::Segment32(segment, data.0)
|
||||
}
|
||||
macho::LC_SYMTAB => LoadCommandVariant::Symtab(self.data()?),
|
||||
macho::LC_THREAD | macho::LC_UNIXTHREAD => {
|
||||
let mut data = self.data;
|
||||
let thread = data.read().read_error("Invalid Mach-O command size")?;
|
||||
LoadCommandVariant::Thread(thread, data.0)
|
||||
}
|
||||
macho::LC_DYSYMTAB => LoadCommandVariant::Dysymtab(self.data()?),
|
||||
macho::LC_LOAD_DYLIB
|
||||
| macho::LC_LOAD_WEAK_DYLIB
|
||||
| macho::LC_REEXPORT_DYLIB
|
||||
| macho::LC_LAZY_LOAD_DYLIB
|
||||
| macho::LC_LOAD_UPWARD_DYLIB => LoadCommandVariant::Dylib(self.data()?),
|
||||
macho::LC_ID_DYLIB => LoadCommandVariant::IdDylib(self.data()?),
|
||||
macho::LC_LOAD_DYLINKER => LoadCommandVariant::LoadDylinker(self.data()?),
|
||||
macho::LC_ID_DYLINKER => LoadCommandVariant::IdDylinker(self.data()?),
|
||||
macho::LC_PREBOUND_DYLIB => LoadCommandVariant::PreboundDylib(self.data()?),
|
||||
macho::LC_ROUTINES => LoadCommandVariant::Routines32(self.data()?),
|
||||
macho::LC_SUB_FRAMEWORK => LoadCommandVariant::SubFramework(self.data()?),
|
||||
macho::LC_SUB_UMBRELLA => LoadCommandVariant::SubUmbrella(self.data()?),
|
||||
macho::LC_SUB_CLIENT => LoadCommandVariant::SubClient(self.data()?),
|
||||
macho::LC_SUB_LIBRARY => LoadCommandVariant::SubLibrary(self.data()?),
|
||||
macho::LC_TWOLEVEL_HINTS => LoadCommandVariant::TwolevelHints(self.data()?),
|
||||
macho::LC_PREBIND_CKSUM => LoadCommandVariant::PrebindCksum(self.data()?),
|
||||
macho::LC_SEGMENT_64 => {
|
||||
let mut data = self.data;
|
||||
let segment = data.read().read_error("Invalid Mach-O command size")?;
|
||||
LoadCommandVariant::Segment64(segment, data.0)
|
||||
}
|
||||
macho::LC_ROUTINES_64 => LoadCommandVariant::Routines64(self.data()?),
|
||||
macho::LC_UUID => LoadCommandVariant::Uuid(self.data()?),
|
||||
macho::LC_RPATH => LoadCommandVariant::Rpath(self.data()?),
|
||||
macho::LC_CODE_SIGNATURE
|
||||
| macho::LC_SEGMENT_SPLIT_INFO
|
||||
| macho::LC_FUNCTION_STARTS
|
||||
| macho::LC_DATA_IN_CODE
|
||||
| macho::LC_DYLIB_CODE_SIGN_DRS
|
||||
| macho::LC_LINKER_OPTIMIZATION_HINT
|
||||
| macho::LC_DYLD_EXPORTS_TRIE
|
||||
| macho::LC_DYLD_CHAINED_FIXUPS => LoadCommandVariant::LinkeditData(self.data()?),
|
||||
macho::LC_ENCRYPTION_INFO => LoadCommandVariant::EncryptionInfo32(self.data()?),
|
||||
macho::LC_DYLD_INFO | macho::LC_DYLD_INFO_ONLY => {
|
||||
LoadCommandVariant::DyldInfo(self.data()?)
|
||||
}
|
||||
macho::LC_VERSION_MIN_MACOSX
|
||||
| macho::LC_VERSION_MIN_IPHONEOS
|
||||
| macho::LC_VERSION_MIN_TVOS
|
||||
| macho::LC_VERSION_MIN_WATCHOS => LoadCommandVariant::VersionMin(self.data()?),
|
||||
macho::LC_DYLD_ENVIRONMENT => LoadCommandVariant::DyldEnvironment(self.data()?),
|
||||
macho::LC_MAIN => LoadCommandVariant::EntryPoint(self.data()?),
|
||||
macho::LC_SOURCE_VERSION => LoadCommandVariant::SourceVersion(self.data()?),
|
||||
macho::LC_ENCRYPTION_INFO_64 => LoadCommandVariant::EncryptionInfo64(self.data()?),
|
||||
macho::LC_LINKER_OPTION => LoadCommandVariant::LinkerOption(self.data()?),
|
||||
macho::LC_NOTE => LoadCommandVariant::Note(self.data()?),
|
||||
macho::LC_BUILD_VERSION => LoadCommandVariant::BuildVersion(self.data()?),
|
||||
macho::LC_FILESET_ENTRY => LoadCommandVariant::FilesetEntry(self.data()?),
|
||||
_ => LoadCommandVariant::Other,
|
||||
})
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::SegmentCommand32`].
|
||||
///
|
||||
/// Returns the segment command and the data containing the sections.
|
||||
pub fn segment_32(self) -> Result<Option<(&'data macho::SegmentCommand32<E>, &'data [u8])>> {
|
||||
if self.cmd == macho::LC_SEGMENT {
|
||||
let mut data = self.data;
|
||||
let segment = data.read().read_error("Invalid Mach-O command size")?;
|
||||
Ok(Some((segment, data.0)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::SymtabCommand`].
|
||||
///
|
||||
/// Returns the segment command and the data containing the sections.
|
||||
pub fn symtab(self) -> Result<Option<&'data macho::SymtabCommand<E>>> {
|
||||
if self.cmd == macho::LC_SYMTAB {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::DysymtabCommand`].
|
||||
pub fn dysymtab(self) -> Result<Option<&'data macho::DysymtabCommand<E>>> {
|
||||
if self.cmd == macho::LC_DYSYMTAB {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::DylibCommand`].
|
||||
pub fn dylib(self) -> Result<Option<&'data macho::DylibCommand<E>>> {
|
||||
if self.cmd == macho::LC_LOAD_DYLIB
|
||||
|| self.cmd == macho::LC_LOAD_WEAK_DYLIB
|
||||
|| self.cmd == macho::LC_REEXPORT_DYLIB
|
||||
|| self.cmd == macho::LC_LAZY_LOAD_DYLIB
|
||||
|| self.cmd == macho::LC_LOAD_UPWARD_DYLIB
|
||||
{
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::UuidCommand`].
|
||||
pub fn uuid(self) -> Result<Option<&'data macho::UuidCommand<E>>> {
|
||||
if self.cmd == macho::LC_UUID {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::SegmentCommand64`].
|
||||
pub fn segment_64(self) -> Result<Option<(&'data macho::SegmentCommand64<E>, &'data [u8])>> {
|
||||
if self.cmd == macho::LC_SEGMENT_64 {
|
||||
let mut data = self.data;
|
||||
let command = data.read().read_error("Invalid Mach-O command size")?;
|
||||
Ok(Some((command, data.0)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::DyldInfoCommand`].
|
||||
pub fn dyld_info(self) -> Result<Option<&'data macho::DyldInfoCommand<E>>> {
|
||||
if self.cmd == macho::LC_DYLD_INFO || self.cmd == macho::LC_DYLD_INFO_ONLY {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as an [`macho::EntryPointCommand`].
|
||||
pub fn entry_point(self) -> Result<Option<&'data macho::EntryPointCommand<E>>> {
|
||||
if self.cmd == macho::LC_MAIN {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse this command as a [`macho::BuildVersionCommand`].
|
||||
pub fn build_version(self) -> Result<Option<&'data macho::BuildVersionCommand<E>>> {
|
||||
if self.cmd == macho::LC_BUILD_VERSION {
|
||||
Some(self.data()).transpose()
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A [`macho::LoadCommand`] that has been interpreted according to its `cmd` field.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[non_exhaustive]
|
||||
pub enum LoadCommandVariant<'data, E: Endian> {
|
||||
/// `LC_SEGMENT`
|
||||
Segment32(&'data macho::SegmentCommand32<E>, &'data [u8]),
|
||||
/// `LC_SYMTAB`
|
||||
Symtab(&'data macho::SymtabCommand<E>),
|
||||
// obsolete: `LC_SYMSEG`
|
||||
//Symseg(&'data macho::SymsegCommand<E>),
|
||||
/// `LC_THREAD` or `LC_UNIXTHREAD`
|
||||
Thread(&'data macho::ThreadCommand<E>, &'data [u8]),
|
||||
// obsolete: `LC_IDFVMLIB` or `LC_LOADFVMLIB`
|
||||
//Fvmlib(&'data macho::FvmlibCommand<E>),
|
||||
// obsolete: `LC_IDENT`
|
||||
//Ident(&'data macho::IdentCommand<E>),
|
||||
// internal: `LC_FVMFILE`
|
||||
//Fvmfile(&'data macho::FvmfileCommand<E>),
|
||||
// internal: `LC_PREPAGE`
|
||||
/// `LC_DYSYMTAB`
|
||||
Dysymtab(&'data macho::DysymtabCommand<E>),
|
||||
/// `LC_LOAD_DYLIB`, `LC_LOAD_WEAK_DYLIB`, `LC_REEXPORT_DYLIB`,
|
||||
/// `LC_LAZY_LOAD_DYLIB`, or `LC_LOAD_UPWARD_DYLIB`
|
||||
Dylib(&'data macho::DylibCommand<E>),
|
||||
/// `LC_ID_DYLIB`
|
||||
IdDylib(&'data macho::DylibCommand<E>),
|
||||
/// `LC_LOAD_DYLINKER`
|
||||
LoadDylinker(&'data macho::DylinkerCommand<E>),
|
||||
/// `LC_ID_DYLINKER`
|
||||
IdDylinker(&'data macho::DylinkerCommand<E>),
|
||||
/// `LC_PREBOUND_DYLIB`
|
||||
PreboundDylib(&'data macho::PreboundDylibCommand<E>),
|
||||
/// `LC_ROUTINES`
|
||||
Routines32(&'data macho::RoutinesCommand32<E>),
|
||||
/// `LC_SUB_FRAMEWORK`
|
||||
SubFramework(&'data macho::SubFrameworkCommand<E>),
|
||||
/// `LC_SUB_UMBRELLA`
|
||||
SubUmbrella(&'data macho::SubUmbrellaCommand<E>),
|
||||
/// `LC_SUB_CLIENT`
|
||||
SubClient(&'data macho::SubClientCommand<E>),
|
||||
/// `LC_SUB_LIBRARY`
|
||||
SubLibrary(&'data macho::SubLibraryCommand<E>),
|
||||
/// `LC_TWOLEVEL_HINTS`
|
||||
TwolevelHints(&'data macho::TwolevelHintsCommand<E>),
|
||||
/// `LC_PREBIND_CKSUM`
|
||||
PrebindCksum(&'data macho::PrebindCksumCommand<E>),
|
||||
/// `LC_SEGMENT_64`
|
||||
Segment64(&'data macho::SegmentCommand64<E>, &'data [u8]),
|
||||
/// `LC_ROUTINES_64`
|
||||
Routines64(&'data macho::RoutinesCommand64<E>),
|
||||
/// `LC_UUID`
|
||||
Uuid(&'data macho::UuidCommand<E>),
|
||||
/// `LC_RPATH`
|
||||
Rpath(&'data macho::RpathCommand<E>),
|
||||
/// `LC_CODE_SIGNATURE`, `LC_SEGMENT_SPLIT_INFO`, `LC_FUNCTION_STARTS`,
|
||||
/// `LC_DATA_IN_CODE`, `LC_DYLIB_CODE_SIGN_DRS`, `LC_LINKER_OPTIMIZATION_HINT`,
|
||||
/// `LC_DYLD_EXPORTS_TRIE`, or `LC_DYLD_CHAINED_FIXUPS`.
|
||||
LinkeditData(&'data macho::LinkeditDataCommand<E>),
|
||||
/// `LC_ENCRYPTION_INFO`
|
||||
EncryptionInfo32(&'data macho::EncryptionInfoCommand32<E>),
|
||||
/// `LC_DYLD_INFO` or `LC_DYLD_INFO_ONLY`
|
||||
DyldInfo(&'data macho::DyldInfoCommand<E>),
|
||||
/// `LC_VERSION_MIN_MACOSX`, `LC_VERSION_MIN_IPHONEOS`, `LC_VERSION_MIN_WATCHOS`,
|
||||
/// or `LC_VERSION_MIN_TVOS`
|
||||
VersionMin(&'data macho::VersionMinCommand<E>),
|
||||
/// `LC_DYLD_ENVIRONMENT`
|
||||
DyldEnvironment(&'data macho::DylinkerCommand<E>),
|
||||
/// `LC_MAIN`
|
||||
EntryPoint(&'data macho::EntryPointCommand<E>),
|
||||
/// `LC_SOURCE_VERSION`
|
||||
SourceVersion(&'data macho::SourceVersionCommand<E>),
|
||||
/// `LC_ENCRYPTION_INFO_64`
|
||||
EncryptionInfo64(&'data macho::EncryptionInfoCommand64<E>),
|
||||
/// `LC_LINKER_OPTION`
|
||||
LinkerOption(&'data macho::LinkerOptionCommand<E>),
|
||||
/// `LC_NOTE`
|
||||
Note(&'data macho::NoteCommand<E>),
|
||||
/// `LC_BUILD_VERSION`
|
||||
BuildVersion(&'data macho::BuildVersionCommand<E>),
|
||||
/// `LC_FILESET_ENTRY`
|
||||
FilesetEntry(&'data macho::FilesetEntryCommand<E>),
|
||||
/// An unrecognized or obsolete load command.
|
||||
Other,
|
||||
}
|
||||
|
||||
impl<E: Endian> macho::SymtabCommand<E> {
|
||||
/// Return the symbol table that this command references.
|
||||
pub fn symbols<'data, Mach: MachHeader<Endian = E>, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: E,
|
||||
data: R,
|
||||
) -> Result<SymbolTable<'data, Mach, R>> {
|
||||
let symbols = data
|
||||
.read_slice_at(
|
||||
self.symoff.get(endian).into(),
|
||||
self.nsyms.get(endian) as usize,
|
||||
)
|
||||
.read_error("Invalid Mach-O symbol table offset or size")?;
|
||||
let str_start: u64 = self.stroff.get(endian).into();
|
||||
let str_end = str_start
|
||||
.checked_add(self.strsize.get(endian).into())
|
||||
.read_error("Invalid Mach-O string table length")?;
|
||||
let strings = StringTable::new(data, str_start, str_end);
|
||||
Ok(SymbolTable::new(symbols, strings))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::LittleEndian;
|
||||
|
||||
#[test]
|
||||
fn cmd_size_invalid() {
|
||||
let mut commands = LoadCommandIterator::new(LittleEndian, &[0; 8], 10);
|
||||
assert!(commands.next().is_err());
|
||||
let mut commands = LoadCommandIterator::new(LittleEndian, &[0, 0, 0, 0, 7, 0, 0, 0, 0], 10);
|
||||
assert!(commands.next().is_err());
|
||||
let mut commands = LoadCommandIterator::new(LittleEndian, &[0, 0, 0, 0, 8, 0, 0, 0, 0], 10);
|
||||
assert!(commands.next().is_ok());
|
||||
}
|
||||
}
|
||||
72
vendor/object/src/read/macho/mod.rs
vendored
Normal file
72
vendor/object/src/read/macho/mod.rs
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
//! Support for reading Mach-O files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between 32-bit and 64-bit Mach-O
|
||||
//! files. The primary trait for this is [`MachHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`MachOFile`] implements the [`Object`](crate::read::Object) trait for Mach-O files.
|
||||
//! [`MachOFile`] is parameterised by [`MachHeader`] to allow reading both 32-bit and
|
||||
//! 64-bit Mach-O files. There are type aliases for these parameters ([`MachOFile32`] and
|
||||
//! [`MachOFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`MachHeader`] trait can be directly used to parse both [`macho::MachHeader32`]
|
||||
//! and [`macho::MachHeader64`]. Additionally, [`FatHeader`] and the [`FatArch`] trait
|
||||
//! can be used to iterate images in multi-architecture binaries, and [`DyldCache`] can
|
||||
//! be used to locate images in a dyld shared cache.
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::macho;
|
||||
//! use object::read::macho::{MachHeader, Nlist};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let header = macho::MachHeader64::<object::Endianness>::parse(&*data, 0)?;
|
||||
//! let endian = header.endian()?;
|
||||
//! let mut commands = header.load_commands(endian, &*data, 0)?;
|
||||
//! while let Some(command) = commands.next()? {
|
||||
//! if let Some(symtab_command) = command.symtab()? {
|
||||
//! let symbols = symtab_command.symbols::<macho::MachHeader64<_>, _>(endian, &*data)?;
|
||||
//! for symbol in symbols.iter() {
|
||||
//! let name = symbol.name(endian, symbols.strings())?;
|
||||
//! println!("{}", String::from_utf8_lossy(name));
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::macho;
|
||||
|
||||
mod dyld_cache;
|
||||
pub use dyld_cache::*;
|
||||
|
||||
mod fat;
|
||||
pub use fat::*;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod load_command;
|
||||
pub use load_command::*;
|
||||
|
||||
mod segment;
|
||||
pub use segment::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
158
vendor/object/src/read/macho/relocation.rs
vendored
Normal file
158
vendor/object/src/read/macho/relocation.rs
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
use core::{fmt, slice};
|
||||
|
||||
use crate::endian::Endianness;
|
||||
use crate::macho;
|
||||
use crate::read::{
|
||||
ReadRef, Relocation, RelocationEncoding, RelocationKind, RelocationTarget, SectionIndex,
|
||||
SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{MachHeader, MachOFile};
|
||||
|
||||
/// An iterator for the relocations in a [`MachOSection32`](super::MachOSection32).
|
||||
pub type MachORelocationIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachORelocationIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the relocations in a [`MachOSection64`](super::MachOSection64).
|
||||
pub type MachORelocationIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachORelocationIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the relocations in a [`MachOSection`](super::MachOSection).
|
||||
pub struct MachORelocationIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) relocations: slice::Iter<'data, macho::Relocation<Mach::Endian>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachORelocationIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut paired_addend = 0;
|
||||
loop {
|
||||
let reloc = self.relocations.next()?;
|
||||
let endian = self.file.endian;
|
||||
let cputype = self.file.header.cputype(endian);
|
||||
if reloc.r_scattered(endian, cputype) {
|
||||
// FIXME: handle scattered relocations
|
||||
// We need to add `RelocationTarget::Address` for this.
|
||||
continue;
|
||||
}
|
||||
let reloc = reloc.info(self.file.endian);
|
||||
let mut encoding = RelocationEncoding::Generic;
|
||||
let kind = match cputype {
|
||||
macho::CPU_TYPE_ARM => match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::ARM_RELOC_VANILLA, false) => RelocationKind::Absolute,
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
},
|
||||
macho::CPU_TYPE_ARM64 | macho::CPU_TYPE_ARM64_32 => {
|
||||
match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::ARM64_RELOC_UNSIGNED, false) => RelocationKind::Absolute,
|
||||
(macho::ARM64_RELOC_ADDEND, _) => {
|
||||
paired_addend = i64::from(reloc.r_symbolnum)
|
||||
.wrapping_shl(64 - 24)
|
||||
.wrapping_shr(64 - 24);
|
||||
continue;
|
||||
}
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
}
|
||||
}
|
||||
macho::CPU_TYPE_X86 => match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::GENERIC_RELOC_VANILLA, false) => RelocationKind::Absolute,
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
},
|
||||
macho::CPU_TYPE_X86_64 => match (reloc.r_type, reloc.r_pcrel) {
|
||||
(macho::X86_64_RELOC_UNSIGNED, false) => RelocationKind::Absolute,
|
||||
(macho::X86_64_RELOC_SIGNED, true) => {
|
||||
encoding = RelocationEncoding::X86RipRelative;
|
||||
RelocationKind::Relative
|
||||
}
|
||||
(macho::X86_64_RELOC_BRANCH, true) => {
|
||||
encoding = RelocationEncoding::X86Branch;
|
||||
RelocationKind::Relative
|
||||
}
|
||||
(macho::X86_64_RELOC_GOT, true) => RelocationKind::GotRelative,
|
||||
(macho::X86_64_RELOC_GOT_LOAD, true) => {
|
||||
encoding = RelocationEncoding::X86RipRelativeMovq;
|
||||
RelocationKind::GotRelative
|
||||
}
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
},
|
||||
_ => RelocationKind::MachO {
|
||||
value: reloc.r_type,
|
||||
relative: reloc.r_pcrel,
|
||||
},
|
||||
};
|
||||
let size = 8 << reloc.r_length;
|
||||
let target = if reloc.r_extern {
|
||||
RelocationTarget::Symbol(SymbolIndex(reloc.r_symbolnum as usize))
|
||||
} else {
|
||||
RelocationTarget::Section(SectionIndex(reloc.r_symbolnum as usize))
|
||||
};
|
||||
let implicit_addend = paired_addend == 0;
|
||||
let mut addend = paired_addend;
|
||||
if reloc.r_pcrel {
|
||||
// For PC relative relocations on some architectures, the
|
||||
// addend does not include the offset required due to the
|
||||
// PC being different from the place of the relocation.
|
||||
// This differs from other file formats, so adjust the
|
||||
// addend here to account for this.
|
||||
match cputype {
|
||||
macho::CPU_TYPE_X86 => {
|
||||
addend -= 1 << reloc.r_length;
|
||||
}
|
||||
macho::CPU_TYPE_X86_64 => {
|
||||
addend -= 1 << reloc.r_length;
|
||||
match reloc.r_type {
|
||||
macho::X86_64_RELOC_SIGNED_1 => addend -= 1,
|
||||
macho::X86_64_RELOC_SIGNED_2 => addend -= 2,
|
||||
macho::X86_64_RELOC_SIGNED_4 => addend -= 4,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
// TODO: maybe missing support for some architectures and relocations
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
return Some((
|
||||
reloc.r_address as u64,
|
||||
Relocation {
|
||||
kind,
|
||||
encoding,
|
||||
size,
|
||||
target,
|
||||
addend,
|
||||
implicit_addend,
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> fmt::Debug for MachORelocationIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("MachORelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
389
vendor/object/src/read/macho/section.rs
vendored
Normal file
389
vendor/object/src/read/macho/section.rs
vendored
Normal file
@@ -0,0 +1,389 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{fmt, result, slice, str};
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{
|
||||
self, CompressedData, CompressedFileRange, ObjectSection, ReadError, ReadRef, Result,
|
||||
SectionFlags, SectionIndex, SectionKind,
|
||||
};
|
||||
|
||||
use super::{MachHeader, MachOFile, MachORelocationIterator};
|
||||
|
||||
/// An iterator for the sections in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSectionIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSectionIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the sections in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSectionIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSectionIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the sections in a [`MachOFile`].
|
||||
pub struct MachOSectionIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) iter: slice::Iter<'file, MachOSectionInternal<'data, Mach>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> fmt::Debug for MachOSectionIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// It's painful to do much better than this
|
||||
f.debug_struct("MachOSectionIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOSectionIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOSection<'data, 'file, Mach, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|&internal| MachOSection {
|
||||
file: self.file,
|
||||
internal,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSection32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSection<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A section in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSection64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSection<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A section in a [`MachOFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOSection<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) internal: MachOSectionInternal<'data, Mach>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> MachOSection<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
let segment_index = self.internal.segment_index;
|
||||
let segment = self.file.segment_internal(segment_index)?;
|
||||
self.internal
|
||||
.section
|
||||
.data(self.file.endian, segment.data)
|
||||
.read_error("Invalid Mach-O section size or offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSection<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSection<'data> for MachOSection<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type RelocationIterator = MachORelocationIterator<'data, 'file, Mach, R>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.internal.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.internal.section.addr(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.internal.section.size(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
let align = self.internal.section.align(self.file.endian);
|
||||
if align < 64 {
|
||||
1 << align
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
self.internal.section.file_range(self.file.endian)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
Ok(self.internal.section.name())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
str::from_utf8(self.internal.section.name())
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O section name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(Some(self.internal.section.segment_name()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(Some(
|
||||
str::from_utf8(self.internal.section.segment_name())
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O segment name")?,
|
||||
))
|
||||
}
|
||||
|
||||
fn kind(&self) -> SectionKind {
|
||||
self.internal.kind
|
||||
}
|
||||
|
||||
fn relocations(&self) -> MachORelocationIterator<'data, 'file, Mach, R> {
|
||||
MachORelocationIterator {
|
||||
file: self.file,
|
||||
relocations: self
|
||||
.internal
|
||||
.section
|
||||
.relocations(self.file.endian, self.file.data)
|
||||
.unwrap_or(&[])
|
||||
.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::MachO {
|
||||
flags: self.internal.section.flags(self.file.endian),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct MachOSectionInternal<'data, Mach: MachHeader> {
|
||||
pub index: SectionIndex,
|
||||
pub segment_index: usize,
|
||||
pub kind: SectionKind,
|
||||
pub section: &'data Mach::Section,
|
||||
}
|
||||
|
||||
impl<'data, Mach: MachHeader> MachOSectionInternal<'data, Mach> {
|
||||
pub(super) fn parse(
|
||||
index: SectionIndex,
|
||||
segment_index: usize,
|
||||
section: &'data Mach::Section,
|
||||
) -> Self {
|
||||
// TODO: we don't validate flags, should we?
|
||||
let kind = match (section.segment_name(), section.name()) {
|
||||
(b"__TEXT", b"__text") => SectionKind::Text,
|
||||
(b"__TEXT", b"__const") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__cstring") => SectionKind::ReadOnlyString,
|
||||
(b"__TEXT", b"__literal4") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__literal8") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__literal16") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__eh_frame") => SectionKind::ReadOnlyData,
|
||||
(b"__TEXT", b"__gcc_except_tab") => SectionKind::ReadOnlyData,
|
||||
(b"__DATA", b"__data") => SectionKind::Data,
|
||||
(b"__DATA", b"__const") => SectionKind::ReadOnlyData,
|
||||
(b"__DATA", b"__bss") => SectionKind::UninitializedData,
|
||||
(b"__DATA", b"__common") => SectionKind::Common,
|
||||
(b"__DATA", b"__thread_data") => SectionKind::Tls,
|
||||
(b"__DATA", b"__thread_bss") => SectionKind::UninitializedTls,
|
||||
(b"__DATA", b"__thread_vars") => SectionKind::TlsVariables,
|
||||
(b"__DWARF", _) => SectionKind::Debug,
|
||||
_ => SectionKind::Unknown,
|
||||
};
|
||||
MachOSectionInternal {
|
||||
index,
|
||||
segment_index,
|
||||
kind,
|
||||
section,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::Section32`] and [`macho::Section64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Section: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn sectname(&self) -> &[u8; 16];
|
||||
fn segname(&self) -> &[u8; 16];
|
||||
fn addr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn size(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn offset(&self, endian: Self::Endian) -> u32;
|
||||
fn align(&self, endian: Self::Endian) -> u32;
|
||||
fn reloff(&self, endian: Self::Endian) -> u32;
|
||||
fn nreloc(&self, endian: Self::Endian) -> u32;
|
||||
fn flags(&self, endian: Self::Endian) -> u32;
|
||||
|
||||
/// Return the `sectname` bytes up until the null terminator.
|
||||
fn name(&self) -> &[u8] {
|
||||
let sectname = &self.sectname()[..];
|
||||
match memchr::memchr(b'\0', sectname) {
|
||||
Some(end) => §name[..end],
|
||||
None => sectname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `segname` bytes up until the null terminator.
|
||||
fn segment_name(&self) -> &[u8] {
|
||||
let segname = &self.segname()[..];
|
||||
match memchr::memchr(b'\0', segname) {
|
||||
Some(end) => &segname[..end],
|
||||
None => segname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the section in the file.
|
||||
///
|
||||
/// Returns `None` for sections that have no data in the file.
|
||||
fn file_range(&self, endian: Self::Endian) -> Option<(u64, u64)> {
|
||||
match self.flags(endian) & macho::SECTION_TYPE {
|
||||
macho::S_ZEROFILL | macho::S_GB_ZEROFILL | macho::S_THREAD_LOCAL_ZEROFILL => None,
|
||||
_ => Some((self.offset(endian).into(), self.size(endian).into())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section data.
|
||||
///
|
||||
/// Returns `Ok(&[])` if the section has no data.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> result::Result<&'data [u8], ()> {
|
||||
if let Some((offset, size)) = self.file_range(endian) {
|
||||
data.read_bytes_at(offset, size)
|
||||
} else {
|
||||
Ok(&[])
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the relocation array.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn relocations<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> Result<&'data [macho::Relocation<Self::Endian>]> {
|
||||
data.read_slice_at(self.reloff(endian).into(), self.nreloc(endian) as usize)
|
||||
.read_error("Invalid Mach-O relocations offset or number")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Section for macho::Section32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
fn sectname(&self) -> &[u8; 16] {
|
||||
&self.sectname
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn addr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.addr.get(endian)
|
||||
}
|
||||
fn size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.size.get(endian)
|
||||
}
|
||||
fn offset(&self, endian: Self::Endian) -> u32 {
|
||||
self.offset.get(endian)
|
||||
}
|
||||
fn align(&self, endian: Self::Endian) -> u32 {
|
||||
self.align.get(endian)
|
||||
}
|
||||
fn reloff(&self, endian: Self::Endian) -> u32 {
|
||||
self.reloff.get(endian)
|
||||
}
|
||||
fn nreloc(&self, endian: Self::Endian) -> u32 {
|
||||
self.nreloc.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Section for macho::Section64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
fn sectname(&self) -> &[u8; 16] {
|
||||
&self.sectname
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn addr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.addr.get(endian)
|
||||
}
|
||||
fn size(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.size.get(endian)
|
||||
}
|
||||
fn offset(&self, endian: Self::Endian) -> u32 {
|
||||
self.offset.get(endian)
|
||||
}
|
||||
fn align(&self, endian: Self::Endian) -> u32 {
|
||||
self.align.get(endian)
|
||||
}
|
||||
fn reloff(&self, endian: Self::Endian) -> u32 {
|
||||
self.reloff.get(endian)
|
||||
}
|
||||
fn nreloc(&self, endian: Self::Endian) -> u32 {
|
||||
self.nreloc.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
303
vendor/object/src/read/macho/segment.rs
vendored
Normal file
303
vendor/object/src/read/macho/segment.rs
vendored
Normal file
@@ -0,0 +1,303 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{result, slice, str};
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::{self, ObjectSegment, ReadError, ReadRef, Result, SegmentFlags};
|
||||
|
||||
use super::{LoadCommandData, MachHeader, MachOFile, Section};
|
||||
|
||||
/// An iterator for the segments in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSegmentIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegmentIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the segments in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSegmentIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegmentIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the segments in a [`MachOFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct MachOSegmentIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) iter: slice::Iter<'file, MachOSegmentInternal<'data, Mach, R>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOSegmentIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOSegment<'data, 'file, Mach, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|internal| MachOSegment {
|
||||
file: self.file,
|
||||
internal,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSegment32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegment<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A segment in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSegment64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSegment<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A segment in a [`MachOFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct MachOSegment<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
internal: &'file MachOSegmentInternal<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> MachOSegment<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.internal
|
||||
.segment
|
||||
.data(self.file.endian, self.file.data)
|
||||
.read_error("Invalid Mach-O segment size or offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSegment<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSegment<'data> for MachOSegment<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.internal.segment.vmaddr(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.internal.segment.vmsize(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
// Page size.
|
||||
0x1000
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
self.internal.segment.file_range(self.file.endian)
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(Some(self.internal.segment.name()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
Ok(Some(
|
||||
str::from_utf8(self.internal.segment.name())
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O segment name")?,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let flags = self.internal.segment.flags(self.file.endian);
|
||||
let maxprot = self.internal.segment.maxprot(self.file.endian);
|
||||
let initprot = self.internal.segment.initprot(self.file.endian);
|
||||
SegmentFlags::MachO {
|
||||
flags,
|
||||
maxprot,
|
||||
initprot,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct MachOSegmentInternal<'data, Mach: MachHeader, R: ReadRef<'data>> {
|
||||
pub data: R,
|
||||
pub segment: &'data Mach::Segment,
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::SegmentCommand32`] and [`macho::SegmentCommand64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Segment: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
type Section: Section<Endian = Self::Endian>;
|
||||
|
||||
fn from_command(command: LoadCommandData<'_, Self::Endian>) -> Result<Option<(&Self, &[u8])>>;
|
||||
|
||||
fn cmd(&self, endian: Self::Endian) -> u32;
|
||||
fn cmdsize(&self, endian: Self::Endian) -> u32;
|
||||
fn segname(&self) -> &[u8; 16];
|
||||
fn vmaddr(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn vmsize(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn fileoff(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn filesize(&self, endian: Self::Endian) -> Self::Word;
|
||||
fn maxprot(&self, endian: Self::Endian) -> u32;
|
||||
fn initprot(&self, endian: Self::Endian) -> u32;
|
||||
fn nsects(&self, endian: Self::Endian) -> u32;
|
||||
fn flags(&self, endian: Self::Endian) -> u32;
|
||||
|
||||
/// Return the `segname` bytes up until the null terminator.
|
||||
fn name(&self) -> &[u8] {
|
||||
let segname = &self.segname()[..];
|
||||
match memchr::memchr(b'\0', segname) {
|
||||
Some(end) => &segname[..end],
|
||||
None => segname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the segment in the file.
|
||||
fn file_range(&self, endian: Self::Endian) -> (u64, u64) {
|
||||
(self.fileoff(endian).into(), self.filesize(endian).into())
|
||||
}
|
||||
|
||||
/// Get the segment data from the file data.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
data: R,
|
||||
) -> result::Result<&'data [u8], ()> {
|
||||
let (offset, size) = self.file_range(endian);
|
||||
data.read_bytes_at(offset, size)
|
||||
}
|
||||
|
||||
/// Get the array of sections from the data following the segment command.
|
||||
///
|
||||
/// Returns `Err` for invalid values.
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
section_data: R,
|
||||
) -> Result<&'data [Self::Section]> {
|
||||
section_data
|
||||
.read_slice_at(0, self.nsects(endian) as usize)
|
||||
.read_error("Invalid Mach-O number of sections")
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Segment for macho::SegmentCommand32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
type Section = macho::Section32<Self::Endian>;
|
||||
|
||||
fn from_command(command: LoadCommandData<'_, Self::Endian>) -> Result<Option<(&Self, &[u8])>> {
|
||||
command.segment_32()
|
||||
}
|
||||
|
||||
fn cmd(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmd.get(endian)
|
||||
}
|
||||
fn cmdsize(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmdsize.get(endian)
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn vmaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmaddr.get(endian)
|
||||
}
|
||||
fn vmsize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmsize.get(endian)
|
||||
}
|
||||
fn fileoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.fileoff.get(endian)
|
||||
}
|
||||
fn filesize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.filesize.get(endian)
|
||||
}
|
||||
fn maxprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.maxprot.get(endian)
|
||||
}
|
||||
fn initprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.initprot.get(endian)
|
||||
}
|
||||
fn nsects(&self, endian: Self::Endian) -> u32 {
|
||||
self.nsects.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Segment for macho::SegmentCommand64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
type Section = macho::Section64<Self::Endian>;
|
||||
|
||||
fn from_command(command: LoadCommandData<'_, Self::Endian>) -> Result<Option<(&Self, &[u8])>> {
|
||||
command.segment_64()
|
||||
}
|
||||
|
||||
fn cmd(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmd.get(endian)
|
||||
}
|
||||
fn cmdsize(&self, endian: Self::Endian) -> u32 {
|
||||
self.cmdsize.get(endian)
|
||||
}
|
||||
fn segname(&self) -> &[u8; 16] {
|
||||
&self.segname
|
||||
}
|
||||
fn vmaddr(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmaddr.get(endian)
|
||||
}
|
||||
fn vmsize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.vmsize.get(endian)
|
||||
}
|
||||
fn fileoff(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.fileoff.get(endian)
|
||||
}
|
||||
fn filesize(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.filesize.get(endian)
|
||||
}
|
||||
fn maxprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.maxprot.get(endian)
|
||||
}
|
||||
fn initprot(&self, endian: Self::Endian) -> u32 {
|
||||
self.initprot.get(endian)
|
||||
}
|
||||
fn nsects(&self, endian: Self::Endian) -> u32 {
|
||||
self.nsects.get(endian)
|
||||
}
|
||||
fn flags(&self, endian: Self::Endian) -> u32 {
|
||||
self.flags.get(endian)
|
||||
}
|
||||
}
|
||||
492
vendor/object/src/read/macho/symbol.rs
vendored
Normal file
492
vendor/object/src/read/macho/symbol.rs
vendored
Normal file
@@ -0,0 +1,492 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
use core::{fmt, slice, str};
|
||||
|
||||
use crate::endian::{self, Endianness};
|
||||
use crate::macho;
|
||||
use crate::pod::Pod;
|
||||
use crate::read::util::StringTable;
|
||||
use crate::read::{
|
||||
self, ObjectMap, ObjectMapEntry, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Result,
|
||||
SectionIndex, SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolMap, SymbolMapEntry,
|
||||
SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
use super::{MachHeader, MachOFile};
|
||||
|
||||
/// A table of symbol entries in a Mach-O file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`macho::SymtabCommand::symbols`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SymbolTable<'data, Mach: MachHeader, R = &'data [u8]>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
symbols: &'data [Mach::Nlist],
|
||||
strings: StringTable<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, Mach: MachHeader, R: ReadRef<'data>> Default for SymbolTable<'data, Mach, R> {
|
||||
fn default() -> Self {
|
||||
SymbolTable {
|
||||
symbols: &[],
|
||||
strings: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Mach: MachHeader, R: ReadRef<'data>> SymbolTable<'data, Mach, R> {
|
||||
#[inline]
|
||||
pub(super) fn new(symbols: &'data [Mach::Nlist], strings: StringTable<'data, R>) -> Self {
|
||||
SymbolTable { symbols, strings }
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, Mach::Nlist> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbols.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
|
||||
/// Return the symbol at the given index.
|
||||
pub fn symbol(&self, index: usize) -> Result<&'data Mach::Nlist> {
|
||||
self.symbols
|
||||
.get(index)
|
||||
.read_error("Invalid Mach-O symbol index")
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to a user-defined map entry.
|
||||
pub fn map<Entry: SymbolMapEntry, F: Fn(&'data Mach::Nlist) -> Option<Entry>>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> SymbolMap<Entry> {
|
||||
let mut symbols = Vec::new();
|
||||
for nlist in self.symbols {
|
||||
if !nlist.is_definition() {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry) = f(nlist) {
|
||||
symbols.push(entry);
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to symbol names and object file names.
|
||||
pub fn object_map(&self, endian: Mach::Endian) -> ObjectMap<'data> {
|
||||
let mut symbols = Vec::new();
|
||||
let mut objects = Vec::new();
|
||||
let mut object = None;
|
||||
let mut current_function = None;
|
||||
// Each module starts with one or two N_SO symbols (path, or directory + filename)
|
||||
// and one N_OSO symbol. The module is terminated by an empty N_SO symbol.
|
||||
for nlist in self.symbols {
|
||||
let n_type = nlist.n_type();
|
||||
if n_type & macho::N_STAB == 0 {
|
||||
continue;
|
||||
}
|
||||
// TODO: includes variables too (N_GSYM, N_STSYM). These may need to get their
|
||||
// address from regular symbols though.
|
||||
match n_type {
|
||||
macho::N_SO => {
|
||||
object = None;
|
||||
}
|
||||
macho::N_OSO => {
|
||||
object = None;
|
||||
if let Ok(name) = nlist.name(endian, self.strings) {
|
||||
if !name.is_empty() {
|
||||
object = Some(objects.len());
|
||||
objects.push(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
macho::N_FUN => {
|
||||
if let Ok(name) = nlist.name(endian, self.strings) {
|
||||
if !name.is_empty() {
|
||||
current_function = Some((name, nlist.n_value(endian).into()))
|
||||
} else if let Some((name, address)) = current_function.take() {
|
||||
if let Some(object) = object {
|
||||
symbols.push(ObjectMapEntry {
|
||||
address,
|
||||
size: nlist.n_value(endian).into(),
|
||||
name,
|
||||
object,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
ObjectMap {
|
||||
symbols: SymbolMap::new(symbols),
|
||||
objects,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSymbolTable32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolTable<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A symbol table in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSymbolTable64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolTable<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol table in a [`MachOFile`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct MachOSymbolTable<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSymbolTable<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSymbolTable<'data> for MachOSymbolTable<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Symbol = MachOSymbol<'data, 'file, Mach, R>;
|
||||
type SymbolIterator = MachOSymbolIterator<'data, 'file, Mach, R>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
MachOSymbolIterator {
|
||||
file: self.file,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol> {
|
||||
let nlist = self.file.symbols.symbol(index.0)?;
|
||||
MachOSymbol::new(self.file, index, nlist).read_error("Unsupported Mach-O symbol index")
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSymbolIterator32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolIterator<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// An iterator for the symbols in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSymbolIterator64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbolIterator<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// An iterator for the symbols in a [`MachOFile`].
|
||||
pub struct MachOSymbolIterator<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file MachOFile<'data, Mach, R>,
|
||||
pub(super) index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> fmt::Debug for MachOSymbolIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("MachOSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> Iterator for MachOSymbolIterator<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = MachOSymbol<'data, 'file, Mach, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let index = self.index;
|
||||
let nlist = self.file.symbols.symbols.get(index)?;
|
||||
self.index += 1;
|
||||
if let Some(symbol) = MachOSymbol::new(self.file, SymbolIndex(index), nlist) {
|
||||
return Some(symbol);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in a [`MachOFile32`](super::MachOFile32).
|
||||
pub type MachOSymbol32<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbol<'data, 'file, macho::MachHeader32<Endian>, R>;
|
||||
/// A symbol in a [`MachOFile64`](super::MachOFile64).
|
||||
pub type MachOSymbol64<'data, 'file, Endian = Endianness, R = &'data [u8]> =
|
||||
MachOSymbol<'data, 'file, macho::MachHeader64<Endian>, R>;
|
||||
|
||||
/// A symbol in a [`MachOFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct MachOSymbol<'data, 'file, Mach, R = &'data [u8]>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
index: SymbolIndex,
|
||||
nlist: &'data Mach::Nlist,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> MachOSymbol<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) fn new(
|
||||
file: &'file MachOFile<'data, Mach, R>,
|
||||
index: SymbolIndex,
|
||||
nlist: &'data Mach::Nlist,
|
||||
) -> Option<Self> {
|
||||
if nlist.n_type() & macho::N_STAB != 0 {
|
||||
return None;
|
||||
}
|
||||
Some(MachOSymbol { file, index, nlist })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> read::private::Sealed for MachOSymbol<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Mach, R> ObjectSymbol<'data> for MachOSymbol<'data, 'file, Mach, R>
|
||||
where
|
||||
Mach: MachHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> Result<&'data [u8]> {
|
||||
self.nlist.name(self.file.endian, self.file.symbols.strings)
|
||||
}
|
||||
|
||||
fn name(&self) -> Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 Mach-O symbol name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.nlist.n_value(self.file.endian).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
self.section()
|
||||
.index()
|
||||
.and_then(|index| self.file.section_internal(index).ok())
|
||||
.map(|section| match section.kind {
|
||||
SectionKind::Text => SymbolKind::Text,
|
||||
SectionKind::Data
|
||||
| SectionKind::ReadOnlyData
|
||||
| SectionKind::ReadOnlyString
|
||||
| SectionKind::UninitializedData
|
||||
| SectionKind::Common => SymbolKind::Data,
|
||||
SectionKind::Tls | SectionKind::UninitializedTls | SectionKind::TlsVariables => {
|
||||
SymbolKind::Tls
|
||||
}
|
||||
_ => SymbolKind::Unknown,
|
||||
})
|
||||
.unwrap_or(SymbolKind::Unknown)
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.nlist.n_type() & macho::N_TYPE {
|
||||
macho::N_UNDF => SymbolSection::Undefined,
|
||||
macho::N_ABS => SymbolSection::Absolute,
|
||||
macho::N_SECT => {
|
||||
let n_sect = self.nlist.n_sect();
|
||||
if n_sect != 0 {
|
||||
SymbolSection::Section(SectionIndex(n_sect as usize))
|
||||
} else {
|
||||
SymbolSection::Unknown
|
||||
}
|
||||
}
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.nlist.n_type() & macho::N_TYPE == macho::N_UNDF
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
self.nlist.is_definition()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
// Mach-O common symbols are based on section, not symbol
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.nlist.n_desc(self.file.endian) & (macho::N_WEAK_REF | macho::N_WEAK_DEF) != 0
|
||||
}
|
||||
|
||||
fn scope(&self) -> SymbolScope {
|
||||
let n_type = self.nlist.n_type();
|
||||
if n_type & macho::N_TYPE == macho::N_UNDF {
|
||||
SymbolScope::Unknown
|
||||
} else if n_type & macho::N_EXT == 0 {
|
||||
SymbolScope::Compilation
|
||||
} else if n_type & macho::N_PEXT != 0 {
|
||||
SymbolScope::Linkage
|
||||
} else {
|
||||
SymbolScope::Dynamic
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
self.scope() != SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
self.scope() == SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
let n_desc = self.nlist.n_desc(self.file.endian);
|
||||
SymbolFlags::MachO { n_desc }
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`macho::Nlist32`] and [`macho::Nlist64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Nlist: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type Endian: endian::Endian;
|
||||
|
||||
fn n_strx(&self, endian: Self::Endian) -> u32;
|
||||
fn n_type(&self) -> u8;
|
||||
fn n_sect(&self) -> u8;
|
||||
fn n_desc(&self, endian: Self::Endian) -> u16;
|
||||
fn n_value(&self, endian: Self::Endian) -> Self::Word;
|
||||
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
endian: Self::Endian,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.n_strx(endian))
|
||||
.read_error("Invalid Mach-O symbol name offset")
|
||||
}
|
||||
|
||||
/// Return true if this is a STAB symbol.
|
||||
///
|
||||
/// This determines the meaning of the `n_type` field.
|
||||
fn is_stab(&self) -> bool {
|
||||
self.n_type() & macho::N_STAB != 0
|
||||
}
|
||||
|
||||
/// Return true if this is an undefined symbol.
|
||||
fn is_undefined(&self) -> bool {
|
||||
let n_type = self.n_type();
|
||||
n_type & macho::N_STAB == 0 && n_type & macho::N_TYPE == macho::N_UNDF
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
fn is_definition(&self) -> bool {
|
||||
let n_type = self.n_type();
|
||||
n_type & macho::N_STAB == 0 && n_type & macho::N_TYPE == macho::N_SECT
|
||||
}
|
||||
|
||||
/// Return the library ordinal.
|
||||
///
|
||||
/// This is either a 1-based index into the dylib load commands,
|
||||
/// or a special ordinal.
|
||||
#[inline]
|
||||
fn library_ordinal(&self, endian: Self::Endian) -> u8 {
|
||||
(self.n_desc(endian) >> 8) as u8
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Nlist for macho::Nlist32<Endian> {
|
||||
type Word = u32;
|
||||
type Endian = Endian;
|
||||
|
||||
fn n_strx(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_strx.get(endian)
|
||||
}
|
||||
fn n_type(&self) -> u8 {
|
||||
self.n_type
|
||||
}
|
||||
fn n_sect(&self) -> u8 {
|
||||
self.n_sect
|
||||
}
|
||||
fn n_desc(&self, endian: Self::Endian) -> u16 {
|
||||
self.n_desc.get(endian)
|
||||
}
|
||||
fn n_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.n_value.get(endian)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Endian: endian::Endian> Nlist for macho::Nlist64<Endian> {
|
||||
type Word = u64;
|
||||
type Endian = Endian;
|
||||
|
||||
fn n_strx(&self, endian: Self::Endian) -> u32 {
|
||||
self.n_strx.get(endian)
|
||||
}
|
||||
fn n_type(&self) -> u8 {
|
||||
self.n_type
|
||||
}
|
||||
fn n_sect(&self) -> u8 {
|
||||
self.n_sect
|
||||
}
|
||||
fn n_desc(&self, endian: Self::Endian) -> u16 {
|
||||
self.n_desc.get(endian)
|
||||
}
|
||||
fn n_value(&self, endian: Self::Endian) -> Self::Word {
|
||||
self.n_value.get(endian)
|
||||
}
|
||||
}
|
||||
860
vendor/object/src/read/mod.rs
vendored
Normal file
860
vendor/object/src/read/mod.rs
vendored
Normal file
@@ -0,0 +1,860 @@
|
||||
//! Interface for reading object files.
|
||||
//!
|
||||
//! ## Unified read API
|
||||
//!
|
||||
//! The [`Object`] trait provides a unified read API for accessing common features of
|
||||
//! object files, such as sections and symbols. There is an implementation of this
|
||||
//! trait for [`File`], which allows reading any file format, as well as implementations
|
||||
//! for each file format:
|
||||
//! [`ElfFile`](elf::ElfFile), [`MachOFile`](macho::MachOFile), [`CoffFile`](coff::CoffFile),
|
||||
//! [`PeFile`](pe::PeFile), [`WasmFile`](wasm::WasmFile), [`XcoffFile`](xcoff::XcoffFile).
|
||||
//!
|
||||
//! ## Low level read API
|
||||
//!
|
||||
//! The submodules for each file format define helpers that operate on the raw structs.
|
||||
//! These can be used instead of the unified API, or in conjunction with it to access
|
||||
//! details that are not available via the unified API.
|
||||
//!
|
||||
//! See the [submodules](#modules) for examples of the low level read API.
|
||||
//!
|
||||
//! ## Naming Convention
|
||||
//!
|
||||
//! Types that form part of the unified API for a file format are prefixed with the
|
||||
//! name of the file format.
|
||||
//!
|
||||
//! ## Example for unified read API
|
||||
//! ```no_run
|
||||
//! use object::{Object, ObjectSection};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let file = object::File::parse(&*data)?;
|
||||
//! for section in file.sections() {
|
||||
//! println!("{}", section.name()?);
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::vec::Vec;
|
||||
use core::{fmt, result};
|
||||
|
||||
use crate::common::*;
|
||||
|
||||
mod read_ref;
|
||||
pub use read_ref::*;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod read_cache;
|
||||
#[cfg(feature = "std")]
|
||||
pub use read_cache::*;
|
||||
|
||||
mod util;
|
||||
pub use util::*;
|
||||
|
||||
#[cfg(any(
|
||||
feature = "coff",
|
||||
feature = "elf",
|
||||
feature = "macho",
|
||||
feature = "pe",
|
||||
feature = "wasm",
|
||||
feature = "xcoff"
|
||||
))]
|
||||
mod any;
|
||||
#[cfg(any(
|
||||
feature = "coff",
|
||||
feature = "elf",
|
||||
feature = "macho",
|
||||
feature = "pe",
|
||||
feature = "wasm",
|
||||
feature = "xcoff"
|
||||
))]
|
||||
pub use any::*;
|
||||
|
||||
#[cfg(feature = "archive")]
|
||||
pub mod archive;
|
||||
|
||||
#[cfg(feature = "coff")]
|
||||
pub mod coff;
|
||||
|
||||
#[cfg(feature = "elf")]
|
||||
pub mod elf;
|
||||
|
||||
#[cfg(feature = "macho")]
|
||||
pub mod macho;
|
||||
|
||||
#[cfg(feature = "pe")]
|
||||
pub mod pe;
|
||||
|
||||
#[cfg(feature = "wasm")]
|
||||
pub mod wasm;
|
||||
|
||||
#[cfg(feature = "xcoff")]
|
||||
pub mod xcoff;
|
||||
|
||||
mod traits;
|
||||
pub use traits::*;
|
||||
|
||||
mod private {
|
||||
pub trait Sealed {}
|
||||
}
|
||||
|
||||
/// The error type used within the read module.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Error(&'static str);
|
||||
|
||||
impl fmt::Display for Error {
|
||||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
/// The result type used within the read module.
|
||||
pub type Result<T> = result::Result<T, Error>;
|
||||
|
||||
trait ReadError<T> {
|
||||
fn read_error(self, error: &'static str) -> Result<T>;
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for result::Result<T, ()> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.map_err(|()| Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for result::Result<T, Error> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.map_err(|_| Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for Option<T> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.ok_or(Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(
|
||||
unix,
|
||||
not(target_os = "macos"),
|
||||
target_pointer_width = "32",
|
||||
feature = "elf"
|
||||
))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = elf::ElfFile32<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(
|
||||
unix,
|
||||
not(target_os = "macos"),
|
||||
target_pointer_width = "64",
|
||||
feature = "elf"
|
||||
))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = elf::ElfFile64<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "macos", target_pointer_width = "32", feature = "macho"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = macho::MachOFile32<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "macos", target_pointer_width = "64", feature = "macho"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = macho::MachOFile64<'data, crate::Endianness, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "windows", target_pointer_width = "32", feature = "pe"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = pe::PeFile32<'data, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(target_os = "windows", target_pointer_width = "64", feature = "pe"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = pe::PeFile64<'data, R>;
|
||||
|
||||
/// The native executable file for the target platform.
|
||||
#[cfg(all(feature = "wasm", target_arch = "wasm32", feature = "wasm"))]
|
||||
pub type NativeFile<'data, R = &'data [u8]> = wasm::WasmFile<'data, R>;
|
||||
|
||||
/// A file format kind.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum FileKind {
|
||||
/// A Unix archive.
|
||||
///
|
||||
/// See [`archive::ArchiveFile`].
|
||||
#[cfg(feature = "archive")]
|
||||
Archive,
|
||||
/// A COFF object file.
|
||||
///
|
||||
/// See [`coff::CoffFile`].
|
||||
#[cfg(feature = "coff")]
|
||||
Coff,
|
||||
/// A COFF bigobj object file.
|
||||
///
|
||||
/// This supports a larger number of sections.
|
||||
///
|
||||
/// See [`coff::CoffBigFile`].
|
||||
#[cfg(feature = "coff")]
|
||||
CoffBig,
|
||||
/// A Windows short import file.
|
||||
///
|
||||
/// See [`coff::ImportFile`].
|
||||
#[cfg(feature = "coff")]
|
||||
CoffImport,
|
||||
/// A dyld cache file containing Mach-O images.
|
||||
///
|
||||
/// See [`macho::DyldCache`]
|
||||
#[cfg(feature = "macho")]
|
||||
DyldCache,
|
||||
/// A 32-bit ELF file.
|
||||
///
|
||||
/// See [`elf::ElfFile32`].
|
||||
#[cfg(feature = "elf")]
|
||||
Elf32,
|
||||
/// A 64-bit ELF file.
|
||||
///
|
||||
/// See [`elf::ElfFile64`].
|
||||
#[cfg(feature = "elf")]
|
||||
Elf64,
|
||||
/// A 32-bit Mach-O file.
|
||||
///
|
||||
/// See [`macho::MachOFile32`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachO32,
|
||||
/// A 64-bit Mach-O file.
|
||||
///
|
||||
/// See [`macho::MachOFile64`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachO64,
|
||||
/// A 32-bit Mach-O fat binary.
|
||||
///
|
||||
/// See [`macho::FatHeader::parse_arch32`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachOFat32,
|
||||
/// A 64-bit Mach-O fat binary.
|
||||
///
|
||||
/// See [`macho::FatHeader::parse_arch64`].
|
||||
#[cfg(feature = "macho")]
|
||||
MachOFat64,
|
||||
/// A 32-bit PE file.
|
||||
///
|
||||
/// See [`pe::PeFile32`].
|
||||
#[cfg(feature = "pe")]
|
||||
Pe32,
|
||||
/// A 64-bit PE file.
|
||||
///
|
||||
/// See [`pe::PeFile64`].
|
||||
#[cfg(feature = "pe")]
|
||||
Pe64,
|
||||
/// A Wasm file.
|
||||
///
|
||||
/// See [`wasm::WasmFile`].
|
||||
#[cfg(feature = "wasm")]
|
||||
Wasm,
|
||||
/// A 32-bit XCOFF file.
|
||||
///
|
||||
/// See [`xcoff::XcoffFile32`].
|
||||
#[cfg(feature = "xcoff")]
|
||||
Xcoff32,
|
||||
/// A 64-bit XCOFF file.
|
||||
///
|
||||
/// See [`xcoff::XcoffFile64`].
|
||||
#[cfg(feature = "xcoff")]
|
||||
Xcoff64,
|
||||
}
|
||||
|
||||
impl FileKind {
|
||||
/// Determine a file kind by parsing the start of the file.
|
||||
pub fn parse<'data, R: ReadRef<'data>>(data: R) -> Result<FileKind> {
|
||||
Self::parse_at(data, 0)
|
||||
}
|
||||
|
||||
/// Determine a file kind by parsing at the given offset.
|
||||
pub fn parse_at<'data, R: ReadRef<'data>>(data: R, offset: u64) -> Result<FileKind> {
|
||||
let magic = data
|
||||
.read_bytes_at(offset, 16)
|
||||
.read_error("Could not read file magic")?;
|
||||
if magic.len() < 16 {
|
||||
return Err(Error("File too short"));
|
||||
}
|
||||
|
||||
let kind = match [magic[0], magic[1], magic[2], magic[3], magic[4], magic[5], magic[6], magic[7]] {
|
||||
#[cfg(feature = "archive")]
|
||||
[b'!', b'<', b'a', b'r', b'c', b'h', b'>', b'\n'] => FileKind::Archive,
|
||||
#[cfg(feature = "macho")]
|
||||
[b'd', b'y', b'l', b'd', b'_', b'v', b'1', b' '] => FileKind::DyldCache,
|
||||
#[cfg(feature = "elf")]
|
||||
[0x7f, b'E', b'L', b'F', 1, ..] => FileKind::Elf32,
|
||||
#[cfg(feature = "elf")]
|
||||
[0x7f, b'E', b'L', b'F', 2, ..] => FileKind::Elf64,
|
||||
#[cfg(feature = "macho")]
|
||||
[0xfe, 0xed, 0xfa, 0xce, ..]
|
||||
| [0xce, 0xfa, 0xed, 0xfe, ..] => FileKind::MachO32,
|
||||
#[cfg(feature = "macho")]
|
||||
| [0xfe, 0xed, 0xfa, 0xcf, ..]
|
||||
| [0xcf, 0xfa, 0xed, 0xfe, ..] => FileKind::MachO64,
|
||||
#[cfg(feature = "macho")]
|
||||
[0xca, 0xfe, 0xba, 0xbe, ..] => FileKind::MachOFat32,
|
||||
#[cfg(feature = "macho")]
|
||||
[0xca, 0xfe, 0xba, 0xbf, ..] => FileKind::MachOFat64,
|
||||
#[cfg(feature = "wasm")]
|
||||
[0x00, b'a', b's', b'm', ..] => FileKind::Wasm,
|
||||
#[cfg(feature = "pe")]
|
||||
[b'M', b'Z', ..] if offset == 0 => {
|
||||
// offset == 0 restriction is because optional_header_magic only looks at offset 0
|
||||
match pe::optional_header_magic(data) {
|
||||
Ok(crate::pe::IMAGE_NT_OPTIONAL_HDR32_MAGIC) => {
|
||||
FileKind::Pe32
|
||||
}
|
||||
Ok(crate::pe::IMAGE_NT_OPTIONAL_HDR64_MAGIC) => {
|
||||
FileKind::Pe64
|
||||
}
|
||||
_ => return Err(Error("Unknown MS-DOS file")),
|
||||
}
|
||||
}
|
||||
// TODO: more COFF machines
|
||||
#[cfg(feature = "coff")]
|
||||
// COFF arm
|
||||
[0xc4, 0x01, ..]
|
||||
// COFF arm64
|
||||
| [0x64, 0xaa, ..]
|
||||
// COFF arm64ec
|
||||
| [0x41, 0xa6, ..]
|
||||
// COFF x86
|
||||
| [0x4c, 0x01, ..]
|
||||
// COFF x86-64
|
||||
| [0x64, 0x86, ..] => FileKind::Coff,
|
||||
#[cfg(feature = "coff")]
|
||||
[0x00, 0x00, 0xff, 0xff, 0x00, 0x00, ..] => FileKind::CoffImport,
|
||||
#[cfg(feature = "coff")]
|
||||
[0x00, 0x00, 0xff, 0xff, 0x02, 0x00, ..] if offset == 0 => {
|
||||
// offset == 0 restriction is because anon_object_class_id only looks at offset 0
|
||||
match coff::anon_object_class_id(data) {
|
||||
Ok(crate::pe::ANON_OBJECT_HEADER_BIGOBJ_CLASS_ID) => FileKind::CoffBig,
|
||||
_ => return Err(Error("Unknown anon object file")),
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "xcoff")]
|
||||
[0x01, 0xdf, ..] => FileKind::Xcoff32,
|
||||
#[cfg(feature = "xcoff")]
|
||||
[0x01, 0xf7, ..] => FileKind::Xcoff64,
|
||||
_ => return Err(Error("Unknown file magic")),
|
||||
};
|
||||
Ok(kind)
|
||||
}
|
||||
}
|
||||
|
||||
/// An object kind.
|
||||
///
|
||||
/// Returned by [`Object::kind`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum ObjectKind {
|
||||
/// The object kind is unknown.
|
||||
Unknown,
|
||||
/// Relocatable object.
|
||||
Relocatable,
|
||||
/// Executable.
|
||||
Executable,
|
||||
/// Dynamic shared object.
|
||||
Dynamic,
|
||||
/// Core.
|
||||
Core,
|
||||
}
|
||||
|
||||
/// The index used to identify a section in a file.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SectionIndex(pub usize);
|
||||
|
||||
/// The index used to identify a symbol in a symbol table.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SymbolIndex(pub usize);
|
||||
|
||||
/// The section where an [`ObjectSymbol`] is defined.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SymbolSection {
|
||||
/// The section is unknown.
|
||||
Unknown,
|
||||
/// The section is not applicable for this symbol (such as file symbols).
|
||||
None,
|
||||
/// The symbol is undefined.
|
||||
Undefined,
|
||||
/// The symbol has an absolute value.
|
||||
Absolute,
|
||||
/// The symbol is a zero-initialized symbol that will be combined with duplicate definitions.
|
||||
Common,
|
||||
/// The symbol is defined in the given section.
|
||||
Section(SectionIndex),
|
||||
}
|
||||
|
||||
impl SymbolSection {
|
||||
/// Returns the section index for the section where the symbol is defined.
|
||||
///
|
||||
/// May return `None` if the symbol is not defined in a section.
|
||||
#[inline]
|
||||
pub fn index(self) -> Option<SectionIndex> {
|
||||
if let SymbolSection::Section(index) = self {
|
||||
Some(index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An entry in a [`SymbolMap`].
|
||||
pub trait SymbolMapEntry {
|
||||
/// The symbol address.
|
||||
fn address(&self) -> u64;
|
||||
}
|
||||
|
||||
/// A map from addresses to symbol information.
|
||||
///
|
||||
/// The symbol information depends on the chosen entry type, such as [`SymbolMapName`].
|
||||
///
|
||||
/// Returned by [`Object::symbol_map`].
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct SymbolMap<T: SymbolMapEntry> {
|
||||
symbols: Vec<T>,
|
||||
}
|
||||
|
||||
impl<T: SymbolMapEntry> SymbolMap<T> {
|
||||
/// Construct a new symbol map.
|
||||
///
|
||||
/// This function will sort the symbols by address.
|
||||
pub fn new(mut symbols: Vec<T>) -> Self {
|
||||
symbols.sort_by_key(|s| s.address());
|
||||
SymbolMap { symbols }
|
||||
}
|
||||
|
||||
/// Get the symbol before the given address.
|
||||
pub fn get(&self, address: u64) -> Option<&T> {
|
||||
let index = match self
|
||||
.symbols
|
||||
.binary_search_by_key(&address, |symbol| symbol.address())
|
||||
{
|
||||
Ok(index) => index,
|
||||
Err(index) => index.checked_sub(1)?,
|
||||
};
|
||||
self.symbols.get(index)
|
||||
}
|
||||
|
||||
/// Get all symbols in the map.
|
||||
#[inline]
|
||||
pub fn symbols(&self) -> &[T] {
|
||||
&self.symbols
|
||||
}
|
||||
}
|
||||
|
||||
/// The type used for entries in a [`SymbolMap`] that maps from addresses to names.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SymbolMapName<'data> {
|
||||
address: u64,
|
||||
name: &'data str,
|
||||
}
|
||||
|
||||
impl<'data> SymbolMapName<'data> {
|
||||
/// Construct a `SymbolMapName`.
|
||||
pub fn new(address: u64, name: &'data str) -> Self {
|
||||
SymbolMapName { address, name }
|
||||
}
|
||||
|
||||
/// The symbol address.
|
||||
#[inline]
|
||||
pub fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
|
||||
/// The symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data str {
|
||||
self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> SymbolMapEntry for SymbolMapName<'data> {
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// A map from addresses to symbol names and object files.
|
||||
///
|
||||
/// This is derived from STAB entries in Mach-O files.
|
||||
///
|
||||
/// Returned by [`Object::object_map`].
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct ObjectMap<'data> {
|
||||
symbols: SymbolMap<ObjectMapEntry<'data>>,
|
||||
objects: Vec<&'data [u8]>,
|
||||
}
|
||||
|
||||
impl<'data> ObjectMap<'data> {
|
||||
/// Get the entry containing the given address.
|
||||
pub fn get(&self, address: u64) -> Option<&ObjectMapEntry<'data>> {
|
||||
self.symbols
|
||||
.get(address)
|
||||
.filter(|entry| entry.size == 0 || address.wrapping_sub(entry.address) < entry.size)
|
||||
}
|
||||
|
||||
/// Get all symbols in the map.
|
||||
#[inline]
|
||||
pub fn symbols(&self) -> &[ObjectMapEntry<'data>] {
|
||||
self.symbols.symbols()
|
||||
}
|
||||
|
||||
/// Get all objects in the map.
|
||||
#[inline]
|
||||
pub fn objects(&self) -> &[&'data [u8]] {
|
||||
&self.objects
|
||||
}
|
||||
}
|
||||
|
||||
/// An [`ObjectMap`] entry.
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ObjectMapEntry<'data> {
|
||||
address: u64,
|
||||
size: u64,
|
||||
name: &'data [u8],
|
||||
object: usize,
|
||||
}
|
||||
|
||||
impl<'data> ObjectMapEntry<'data> {
|
||||
/// Get the symbol address.
|
||||
#[inline]
|
||||
pub fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
|
||||
/// Get the symbol size.
|
||||
///
|
||||
/// This may be 0 if the size is unknown.
|
||||
#[inline]
|
||||
pub fn size(&self) -> u64 {
|
||||
self.size
|
||||
}
|
||||
|
||||
/// Get the symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name
|
||||
}
|
||||
|
||||
/// Get the index of the object file name.
|
||||
#[inline]
|
||||
pub fn object_index(&self) -> usize {
|
||||
self.object
|
||||
}
|
||||
|
||||
/// Get the object file name.
|
||||
#[inline]
|
||||
pub fn object(&self, map: &ObjectMap<'data>) -> &'data [u8] {
|
||||
map.objects[self.object]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> SymbolMapEntry for ObjectMapEntry<'data> {
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// An imported symbol.
|
||||
///
|
||||
/// Returned by [`Object::imports`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Import<'data> {
|
||||
library: ByteString<'data>,
|
||||
// TODO: or ordinal
|
||||
name: ByteString<'data>,
|
||||
}
|
||||
|
||||
impl<'data> Import<'data> {
|
||||
/// The symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name.0
|
||||
}
|
||||
|
||||
/// The name of the library to import the symbol from.
|
||||
#[inline]
|
||||
pub fn library(&self) -> &'data [u8] {
|
||||
self.library.0
|
||||
}
|
||||
}
|
||||
|
||||
/// An exported symbol.
|
||||
///
|
||||
/// Returned by [`Object::exports`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Export<'data> {
|
||||
// TODO: and ordinal?
|
||||
name: ByteString<'data>,
|
||||
address: u64,
|
||||
}
|
||||
|
||||
impl<'data> Export<'data> {
|
||||
/// The symbol name.
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'data [u8] {
|
||||
self.name.0
|
||||
}
|
||||
|
||||
/// The virtual address of the symbol.
|
||||
#[inline]
|
||||
pub fn address(&self) -> u64 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// PDB information from the debug directory in a PE file.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct CodeView<'data> {
|
||||
guid: [u8; 16],
|
||||
path: ByteString<'data>,
|
||||
age: u32,
|
||||
}
|
||||
|
||||
impl<'data> CodeView<'data> {
|
||||
/// The path to the PDB as stored in CodeView.
|
||||
#[inline]
|
||||
pub fn path(&self) -> &'data [u8] {
|
||||
self.path.0
|
||||
}
|
||||
|
||||
/// The age of the PDB.
|
||||
#[inline]
|
||||
pub fn age(&self) -> u32 {
|
||||
self.age
|
||||
}
|
||||
|
||||
/// The GUID of the PDB.
|
||||
#[inline]
|
||||
pub fn guid(&self) -> [u8; 16] {
|
||||
self.guid
|
||||
}
|
||||
}
|
||||
|
||||
/// The target referenced by a [`Relocation`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum RelocationTarget {
|
||||
/// The target is a symbol.
|
||||
Symbol(SymbolIndex),
|
||||
/// The target is a section.
|
||||
Section(SectionIndex),
|
||||
/// The offset is an absolute address.
|
||||
Absolute,
|
||||
}
|
||||
|
||||
/// A relocation entry.
|
||||
///
|
||||
/// Returned by [`Object::dynamic_relocations`] or [`ObjectSection::relocations`].
|
||||
#[derive(Debug)]
|
||||
pub struct Relocation {
|
||||
kind: RelocationKind,
|
||||
encoding: RelocationEncoding,
|
||||
size: u8,
|
||||
target: RelocationTarget,
|
||||
addend: i64,
|
||||
implicit_addend: bool,
|
||||
}
|
||||
|
||||
impl Relocation {
|
||||
/// The operation used to calculate the result of the relocation.
|
||||
#[inline]
|
||||
pub fn kind(&self) -> RelocationKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
/// Information about how the result of the relocation operation is encoded in the place.
|
||||
#[inline]
|
||||
pub fn encoding(&self) -> RelocationEncoding {
|
||||
self.encoding
|
||||
}
|
||||
|
||||
/// The size in bits of the place of the relocation.
|
||||
///
|
||||
/// If 0, then the size is determined by the relocation kind.
|
||||
#[inline]
|
||||
pub fn size(&self) -> u8 {
|
||||
self.size
|
||||
}
|
||||
|
||||
/// The target of the relocation.
|
||||
#[inline]
|
||||
pub fn target(&self) -> RelocationTarget {
|
||||
self.target
|
||||
}
|
||||
|
||||
/// The addend to use in the relocation calculation.
|
||||
#[inline]
|
||||
pub fn addend(&self) -> i64 {
|
||||
self.addend
|
||||
}
|
||||
|
||||
/// Set the addend to use in the relocation calculation.
|
||||
#[inline]
|
||||
pub fn set_addend(&mut self, addend: i64) {
|
||||
self.addend = addend
|
||||
}
|
||||
|
||||
/// Returns true if there is an implicit addend stored in the data at the offset
|
||||
/// to be relocated.
|
||||
#[inline]
|
||||
pub fn has_implicit_addend(&self) -> bool {
|
||||
self.implicit_addend
|
||||
}
|
||||
}
|
||||
|
||||
/// A data compression format.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum CompressionFormat {
|
||||
/// The data is uncompressed.
|
||||
None,
|
||||
/// The data is compressed, but the compression format is unknown.
|
||||
Unknown,
|
||||
/// ZLIB/DEFLATE.
|
||||
///
|
||||
/// Used for ELF compression and GNU compressed debug information.
|
||||
Zlib,
|
||||
/// Zstandard.
|
||||
///
|
||||
/// Used for ELF compression.
|
||||
Zstandard,
|
||||
}
|
||||
|
||||
/// A range in a file that may be compressed.
|
||||
///
|
||||
/// Returned by [`ObjectSection::compressed_file_range`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct CompressedFileRange {
|
||||
/// The data compression format.
|
||||
pub format: CompressionFormat,
|
||||
/// The file offset of the compressed data.
|
||||
pub offset: u64,
|
||||
/// The compressed data size.
|
||||
pub compressed_size: u64,
|
||||
/// The uncompressed data size.
|
||||
pub uncompressed_size: u64,
|
||||
}
|
||||
|
||||
impl CompressedFileRange {
|
||||
/// Data that is uncompressed.
|
||||
#[inline]
|
||||
pub fn none(range: Option<(u64, u64)>) -> Self {
|
||||
if let Some((offset, size)) = range {
|
||||
CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
offset,
|
||||
compressed_size: size,
|
||||
uncompressed_size: size,
|
||||
}
|
||||
} else {
|
||||
CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
offset: 0,
|
||||
compressed_size: 0,
|
||||
uncompressed_size: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to [`CompressedData`] by reading from the file.
|
||||
pub fn data<'data, R: ReadRef<'data>>(self, file: R) -> Result<CompressedData<'data>> {
|
||||
let data = file
|
||||
.read_bytes_at(self.offset, self.compressed_size)
|
||||
.read_error("Invalid compressed data size or offset")?;
|
||||
Ok(CompressedData {
|
||||
format: self.format,
|
||||
data,
|
||||
uncompressed_size: self.uncompressed_size,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Data that may be compressed.
|
||||
///
|
||||
/// Returned by [`ObjectSection::compressed_data`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct CompressedData<'data> {
|
||||
/// The data compression format.
|
||||
pub format: CompressionFormat,
|
||||
/// The compressed data.
|
||||
pub data: &'data [u8],
|
||||
/// The uncompressed data size.
|
||||
pub uncompressed_size: u64,
|
||||
}
|
||||
|
||||
impl<'data> CompressedData<'data> {
|
||||
/// Data that is uncompressed.
|
||||
#[inline]
|
||||
pub fn none(data: &'data [u8]) -> Self {
|
||||
CompressedData {
|
||||
format: CompressionFormat::None,
|
||||
data,
|
||||
uncompressed_size: data.len() as u64,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the uncompressed data.
|
||||
///
|
||||
/// Returns an error for invalid data or unsupported compression.
|
||||
/// This includes if the data is compressed but the `compression` feature
|
||||
/// for this crate is disabled.
|
||||
pub fn decompress(self) -> Result<Cow<'data, [u8]>> {
|
||||
match self.format {
|
||||
CompressionFormat::None => Ok(Cow::Borrowed(self.data)),
|
||||
#[cfg(feature = "compression")]
|
||||
CompressionFormat::Zlib => {
|
||||
use core::convert::TryInto;
|
||||
let size = self
|
||||
.uncompressed_size
|
||||
.try_into()
|
||||
.ok()
|
||||
.read_error("Uncompressed data size is too large.")?;
|
||||
let mut decompressed = Vec::with_capacity(size);
|
||||
let mut decompress = flate2::Decompress::new(true);
|
||||
decompress
|
||||
.decompress_vec(
|
||||
self.data,
|
||||
&mut decompressed,
|
||||
flate2::FlushDecompress::Finish,
|
||||
)
|
||||
.ok()
|
||||
.read_error("Invalid zlib compressed data")?;
|
||||
Ok(Cow::Owned(decompressed))
|
||||
}
|
||||
#[cfg(feature = "compression")]
|
||||
CompressionFormat::Zstandard => {
|
||||
use core::convert::TryInto;
|
||||
use std::io::Read;
|
||||
let size = self
|
||||
.uncompressed_size
|
||||
.try_into()
|
||||
.ok()
|
||||
.read_error("Uncompressed data size is too large.")?;
|
||||
let mut decompressed = Vec::with_capacity(size);
|
||||
let mut decoder = ruzstd::StreamingDecoder::new(self.data)
|
||||
.ok()
|
||||
.read_error("Invalid zstd compressed data")?;
|
||||
decoder
|
||||
.read_to_end(&mut decompressed)
|
||||
.ok()
|
||||
.read_error("Invalid zstd compressed data")?;
|
||||
Ok(Cow::Owned(decompressed))
|
||||
}
|
||||
_ => Err(Error("Unsupported compressed data.")),
|
||||
}
|
||||
}
|
||||
}
|
||||
213
vendor/object/src/read/pe/data_directory.rs
vendored
Normal file
213
vendor/object/src/read/pe/data_directory.rs
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
use core::slice;
|
||||
|
||||
use crate::read::{Error, ReadError, ReadRef, Result};
|
||||
use crate::{pe, LittleEndian as LE};
|
||||
|
||||
use super::{
|
||||
DelayLoadImportTable, ExportTable, ImportTable, RelocationBlockIterator, ResourceDirectory,
|
||||
SectionTable,
|
||||
};
|
||||
|
||||
/// The table of data directories in a PE file.
|
||||
///
|
||||
/// Returned by [`ImageNtHeaders::parse`](super::ImageNtHeaders::parse).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct DataDirectories<'data> {
|
||||
entries: &'data [pe::ImageDataDirectory],
|
||||
}
|
||||
|
||||
impl<'data> DataDirectories<'data> {
|
||||
/// Parse the data directory table.
|
||||
///
|
||||
/// `data` must be the remaining optional data following the
|
||||
/// [optional header](pe::ImageOptionalHeader64). `number` must be from the
|
||||
/// [`number_of_rva_and_sizes`](pe::ImageOptionalHeader64::number_of_rva_and_sizes)
|
||||
/// field of the optional header.
|
||||
pub fn parse(data: &'data [u8], number: u32) -> Result<Self> {
|
||||
let entries = data
|
||||
.read_slice_at(0, number as usize)
|
||||
.read_error("Invalid PE number of RVA and sizes")?;
|
||||
Ok(DataDirectories { entries })
|
||||
}
|
||||
|
||||
/// The number of data directories.
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.entries.len()
|
||||
}
|
||||
|
||||
/// Iterator over the data directories.
|
||||
pub fn iter(&self) -> slice::Iter<'data, pe::ImageDataDirectory> {
|
||||
self.entries.iter()
|
||||
}
|
||||
|
||||
/// Iterator which gives the directories as well as their index (one of the IMAGE_DIRECTORY_ENTRY_* constants).
|
||||
pub fn enumerate(&self) -> core::iter::Enumerate<slice::Iter<'data, pe::ImageDataDirectory>> {
|
||||
self.entries.iter().enumerate()
|
||||
}
|
||||
|
||||
/// Returns the data directory at the given index.
|
||||
///
|
||||
/// Index should be one of the `IMAGE_DIRECTORY_ENTRY_*` constants.
|
||||
///
|
||||
/// Returns `None` if the index is larger than the table size,
|
||||
/// or if the entry at the index has a zero virtual address.
|
||||
pub fn get(&self, index: usize) -> Option<&'data pe::ImageDataDirectory> {
|
||||
self.entries
|
||||
.get(index)
|
||||
.filter(|d| d.virtual_address.get(LE) != 0)
|
||||
}
|
||||
|
||||
/// Returns the unparsed export directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn export_directory<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<&'data pe::ImageExportDirectory>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_EXPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let export_data = data_dir.data(data, sections)?;
|
||||
ExportTable::parse_directory(export_data).map(Some)
|
||||
}
|
||||
|
||||
/// Returns the partially parsed export directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn export_table<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<ExportTable<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_EXPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let export_va = data_dir.virtual_address.get(LE);
|
||||
let export_data = data_dir.data(data, sections)?;
|
||||
ExportTable::parse(export_data, export_va).map(Some)
|
||||
}
|
||||
|
||||
/// Returns the partially parsed import directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn import_table<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<ImportTable<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_IMPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let import_va = data_dir.virtual_address.get(LE);
|
||||
let (section_data, section_va) = sections
|
||||
.pe_data_containing(data, import_va)
|
||||
.read_error("Invalid import data dir virtual address")?;
|
||||
Ok(Some(ImportTable::new(section_data, section_va, import_va)))
|
||||
}
|
||||
|
||||
/// Returns the partially parsed delay-load import directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn delay_load_import_table<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<DelayLoadImportTable<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let import_va = data_dir.virtual_address.get(LE);
|
||||
let (section_data, section_va) = sections
|
||||
.pe_data_containing(data, import_va)
|
||||
.read_error("Invalid import data dir virtual address")?;
|
||||
Ok(Some(DelayLoadImportTable::new(
|
||||
section_data,
|
||||
section_va,
|
||||
import_va,
|
||||
)))
|
||||
}
|
||||
|
||||
/// Returns the blocks in the base relocation directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn relocation_blocks<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<RelocationBlockIterator<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_BASERELOC) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let reloc_data = data_dir.data(data, sections)?;
|
||||
Ok(Some(RelocationBlockIterator::new(reloc_data)))
|
||||
}
|
||||
|
||||
/// Returns the resource directory.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
pub fn resource_directory<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<Option<ResourceDirectory<'data>>> {
|
||||
let data_dir = match self.get(pe::IMAGE_DIRECTORY_ENTRY_RESOURCE) {
|
||||
Some(data_dir) => data_dir,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let rsrc_data = data_dir.data(data, sections)?;
|
||||
Ok(Some(ResourceDirectory::new(rsrc_data)))
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageDataDirectory {
|
||||
/// Return the virtual address range of this directory entry.
|
||||
pub fn address_range(&self) -> (u32, u32) {
|
||||
(self.virtual_address.get(LE), self.size.get(LE))
|
||||
}
|
||||
|
||||
/// Return the file offset and size of this directory entry.
|
||||
///
|
||||
/// This function has some limitations:
|
||||
/// - It requires that the data is contained in a single section.
|
||||
/// - It uses the size field of the directory entry, which is
|
||||
/// not desirable for all data directories.
|
||||
/// - It uses the `virtual_address` of the directory entry as an address,
|
||||
/// which is not valid for `IMAGE_DIRECTORY_ENTRY_SECURITY`.
|
||||
pub fn file_range(&self, sections: &SectionTable<'_>) -> Result<(u32, u32)> {
|
||||
let (offset, section_size) = sections
|
||||
.pe_file_range_at(self.virtual_address.get(LE))
|
||||
.read_error("Invalid data dir virtual address")?;
|
||||
let size = self.size.get(LE);
|
||||
if size > section_size {
|
||||
return Err(Error("Invalid data dir size"));
|
||||
}
|
||||
Ok((offset, size))
|
||||
}
|
||||
|
||||
/// Get the data referenced by this directory entry.
|
||||
///
|
||||
/// This function has some limitations:
|
||||
/// - It requires that the data is contained in a single section.
|
||||
/// - It uses the size field of the directory entry, which is
|
||||
/// not desirable for all data directories.
|
||||
/// - It uses the `virtual_address` of the directory entry as an address,
|
||||
/// which is not valid for `IMAGE_DIRECTORY_ENTRY_SECURITY`.
|
||||
pub fn data<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
sections: &SectionTable<'data>,
|
||||
) -> Result<&'data [u8]> {
|
||||
sections
|
||||
.pe_data_at(data, self.virtual_address.get(LE))
|
||||
.read_error("Invalid data dir virtual address")?
|
||||
.get(..self.size.get(LE) as usize)
|
||||
.read_error("Invalid data dir size")
|
||||
}
|
||||
}
|
||||
333
vendor/object/src/read/pe/export.rs
vendored
Normal file
333
vendor/object/src/read/pe/export.rs
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::read::{ByteString, Bytes, Error, ReadError, ReadRef, Result};
|
||||
use crate::{pe, LittleEndian as LE, U16Bytes, U32Bytes};
|
||||
|
||||
/// Where an export is pointing to.
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum ExportTarget<'data> {
|
||||
/// The address of the export, relative to the image base.
|
||||
Address(u32),
|
||||
/// Forwarded to an export ordinal in another DLL.
|
||||
///
|
||||
/// This gives the name of the DLL, and the ordinal.
|
||||
ForwardByOrdinal(&'data [u8], u32),
|
||||
/// Forwarded to an export name in another DLL.
|
||||
///
|
||||
/// This gives the name of the DLL, and the export name.
|
||||
ForwardByName(&'data [u8], &'data [u8]),
|
||||
}
|
||||
|
||||
impl<'data> ExportTarget<'data> {
|
||||
/// Returns true if the target is an address.
|
||||
pub fn is_address(&self) -> bool {
|
||||
match self {
|
||||
ExportTarget::Address(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the export is forwarded to another DLL.
|
||||
pub fn is_forward(&self) -> bool {
|
||||
!self.is_address()
|
||||
}
|
||||
}
|
||||
|
||||
/// An export from a PE file.
|
||||
///
|
||||
/// There are multiple kinds of PE exports (with or without a name, and local or forwarded).
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Export<'data> {
|
||||
/// The ordinal of the export.
|
||||
///
|
||||
/// These are sequential, starting at a base specified in the DLL.
|
||||
pub ordinal: u32,
|
||||
/// The name of the export, if known.
|
||||
pub name: Option<&'data [u8]>,
|
||||
/// The target of this export.
|
||||
pub target: ExportTarget<'data>,
|
||||
}
|
||||
|
||||
impl<'a> Debug for Export<'a> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::result::Result<(), core::fmt::Error> {
|
||||
f.debug_struct("Export")
|
||||
.field("ordinal", &self.ordinal)
|
||||
.field("name", &self.name.map(ByteString))
|
||||
.field("target", &self.target)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for ExportTarget<'a> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::result::Result<(), core::fmt::Error> {
|
||||
match self {
|
||||
ExportTarget::Address(address) => write!(f, "Address({:#x})", address),
|
||||
ExportTarget::ForwardByOrdinal(library, ordinal) => write!(
|
||||
f,
|
||||
"ForwardByOrdinal({:?}.#{})",
|
||||
ByteString(library),
|
||||
ordinal
|
||||
),
|
||||
ExportTarget::ForwardByName(library, name) => write!(
|
||||
f,
|
||||
"ForwardByName({:?}.{:?})",
|
||||
ByteString(library),
|
||||
ByteString(name)
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A partially parsed PE export table.
|
||||
///
|
||||
/// Returned by [`DataDirectories::export_table`](super::DataDirectories::export_table).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExportTable<'data> {
|
||||
data: Bytes<'data>,
|
||||
virtual_address: u32,
|
||||
directory: &'data pe::ImageExportDirectory,
|
||||
addresses: &'data [U32Bytes<LE>],
|
||||
names: &'data [U32Bytes<LE>],
|
||||
name_ordinals: &'data [U16Bytes<LE>],
|
||||
}
|
||||
|
||||
impl<'data> ExportTable<'data> {
|
||||
/// Parse the export table given its section data and address.
|
||||
pub fn parse(data: &'data [u8], virtual_address: u32) -> Result<Self> {
|
||||
let directory = Self::parse_directory(data)?;
|
||||
let data = Bytes(data);
|
||||
|
||||
let mut addresses = &[][..];
|
||||
let address_of_functions = directory.address_of_functions.get(LE);
|
||||
if address_of_functions != 0 {
|
||||
addresses = data
|
||||
.read_slice_at::<U32Bytes<_>>(
|
||||
address_of_functions.wrapping_sub(virtual_address) as usize,
|
||||
directory.number_of_functions.get(LE) as usize,
|
||||
)
|
||||
.read_error("Invalid PE export address table")?;
|
||||
}
|
||||
|
||||
let mut names = &[][..];
|
||||
let mut name_ordinals = &[][..];
|
||||
let address_of_names = directory.address_of_names.get(LE);
|
||||
let address_of_name_ordinals = directory.address_of_name_ordinals.get(LE);
|
||||
if address_of_names != 0 {
|
||||
if address_of_name_ordinals == 0 {
|
||||
return Err(Error("Missing PE export ordinal table"));
|
||||
}
|
||||
|
||||
let number = directory.number_of_names.get(LE) as usize;
|
||||
names = data
|
||||
.read_slice_at::<U32Bytes<_>>(
|
||||
address_of_names.wrapping_sub(virtual_address) as usize,
|
||||
number,
|
||||
)
|
||||
.read_error("Invalid PE export name pointer table")?;
|
||||
name_ordinals = data
|
||||
.read_slice_at::<U16Bytes<_>>(
|
||||
address_of_name_ordinals.wrapping_sub(virtual_address) as usize,
|
||||
number,
|
||||
)
|
||||
.read_error("Invalid PE export ordinal table")?;
|
||||
}
|
||||
|
||||
Ok(ExportTable {
|
||||
data,
|
||||
virtual_address,
|
||||
directory,
|
||||
addresses,
|
||||
names,
|
||||
name_ordinals,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse the export directory given its section data.
|
||||
pub fn parse_directory(data: &'data [u8]) -> Result<&'data pe::ImageExportDirectory> {
|
||||
data.read_at::<pe::ImageExportDirectory>(0)
|
||||
.read_error("Invalid PE export dir size")
|
||||
}
|
||||
|
||||
/// Returns the header of the export table.
|
||||
pub fn directory(&self) -> &'data pe::ImageExportDirectory {
|
||||
self.directory
|
||||
}
|
||||
|
||||
/// Returns the base value of ordinals.
|
||||
///
|
||||
/// Adding this to an address index will give an ordinal.
|
||||
pub fn ordinal_base(&self) -> u32 {
|
||||
self.directory.base.get(LE)
|
||||
}
|
||||
|
||||
/// Returns the unparsed address table.
|
||||
///
|
||||
/// An address table entry may be a local address, or the address of a forwarded export entry.
|
||||
/// See [`Self::is_forward`] and [`Self::target_from_address`].
|
||||
pub fn addresses(&self) -> &'data [U32Bytes<LE>] {
|
||||
self.addresses
|
||||
}
|
||||
|
||||
/// Returns the unparsed name pointer table.
|
||||
///
|
||||
/// A name pointer table entry can be used with [`Self::name_from_pointer`].
|
||||
pub fn name_pointers(&self) -> &'data [U32Bytes<LE>] {
|
||||
self.names
|
||||
}
|
||||
|
||||
/// Returns the unparsed ordinal table.
|
||||
///
|
||||
/// An ordinal table entry is a 0-based index into the address table.
|
||||
/// See [`Self::address_by_index`] and [`Self::target_by_index`].
|
||||
pub fn name_ordinals(&self) -> &'data [U16Bytes<LE>] {
|
||||
self.name_ordinals
|
||||
}
|
||||
|
||||
/// Returns an iterator for the entries in the name pointer table and ordinal table.
|
||||
///
|
||||
/// A name pointer table entry can be used with [`Self::name_from_pointer`].
|
||||
///
|
||||
/// An ordinal table entry is a 0-based index into the address table.
|
||||
/// See [`Self::address_by_index`] and [`Self::target_by_index`].
|
||||
pub fn name_iter(&self) -> impl Iterator<Item = (u32, u16)> + 'data {
|
||||
self.names
|
||||
.iter()
|
||||
.map(|x| x.get(LE))
|
||||
.zip(self.name_ordinals.iter().map(|x| x.get(LE)))
|
||||
}
|
||||
|
||||
/// Returns the export address table entry at the given address index.
|
||||
///
|
||||
/// This may be a local address, or the address of a forwarded export entry.
|
||||
/// See [`Self::is_forward`] and [`Self::target_from_address`].
|
||||
///
|
||||
/// `index` is a 0-based index into the export address table.
|
||||
pub fn address_by_index(&self, index: u32) -> Result<u32> {
|
||||
Ok(self
|
||||
.addresses
|
||||
.get(index as usize)
|
||||
.read_error("Invalid PE export address index")?
|
||||
.get(LE))
|
||||
}
|
||||
|
||||
/// Returns the export address table entry at the given ordinal.
|
||||
///
|
||||
/// This may be a local address, or the address of a forwarded export entry.
|
||||
/// See [`Self::is_forward`] and [`Self::target_from_address`].
|
||||
pub fn address_by_ordinal(&self, ordinal: u32) -> Result<u32> {
|
||||
self.address_by_index(ordinal.wrapping_sub(self.ordinal_base()))
|
||||
}
|
||||
|
||||
/// Returns the target of the export at the given address index.
|
||||
///
|
||||
/// `index` is a 0-based index into the export address table.
|
||||
pub fn target_by_index(&self, index: u32) -> Result<ExportTarget<'data>> {
|
||||
self.target_from_address(self.address_by_index(index)?)
|
||||
}
|
||||
|
||||
/// Returns the target of the export at the given ordinal.
|
||||
pub fn target_by_ordinal(&self, ordinal: u32) -> Result<ExportTarget<'data>> {
|
||||
self.target_from_address(self.address_by_ordinal(ordinal)?)
|
||||
}
|
||||
|
||||
/// Convert an export address table entry into a target.
|
||||
pub fn target_from_address(&self, address: u32) -> Result<ExportTarget<'data>> {
|
||||
Ok(if let Some(forward) = self.forward_string(address)? {
|
||||
let i = forward
|
||||
.iter()
|
||||
.position(|x| *x == b'.')
|
||||
.read_error("Missing PE forwarded export separator")?;
|
||||
let library = &forward[..i];
|
||||
match &forward[i + 1..] {
|
||||
[b'#', digits @ ..] => {
|
||||
let ordinal =
|
||||
parse_ordinal(digits).read_error("Invalid PE forwarded export ordinal")?;
|
||||
ExportTarget::ForwardByOrdinal(library, ordinal)
|
||||
}
|
||||
[] => {
|
||||
return Err(Error("Missing PE forwarded export name"));
|
||||
}
|
||||
name => ExportTarget::ForwardByName(library, name),
|
||||
}
|
||||
} else {
|
||||
ExportTarget::Address(address)
|
||||
})
|
||||
}
|
||||
|
||||
fn forward_offset(&self, address: u32) -> Option<usize> {
|
||||
let offset = address.wrapping_sub(self.virtual_address) as usize;
|
||||
if offset < self.data.len() {
|
||||
Some(offset)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the export address table entry is a forward.
|
||||
pub fn is_forward(&self, address: u32) -> bool {
|
||||
self.forward_offset(address).is_some()
|
||||
}
|
||||
|
||||
/// Return the forward string if the export address table entry is a forward.
|
||||
pub fn forward_string(&self, address: u32) -> Result<Option<&'data [u8]>> {
|
||||
if let Some(offset) = self.forward_offset(address) {
|
||||
self.data
|
||||
.read_string_at(offset)
|
||||
.read_error("Invalid PE forwarded export address")
|
||||
.map(Some)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert an export name pointer table entry into a name.
|
||||
pub fn name_from_pointer(&self, name_pointer: u32) -> Result<&'data [u8]> {
|
||||
let offset = name_pointer.wrapping_sub(self.virtual_address);
|
||||
self.data
|
||||
.read_string_at(offset as usize)
|
||||
.read_error("Invalid PE export name pointer")
|
||||
}
|
||||
|
||||
/// Returns the parsed exports in this table.
|
||||
pub fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
// First, let's list all exports.
|
||||
let mut exports = Vec::new();
|
||||
let ordinal_base = self.ordinal_base();
|
||||
for (i, address) in self.addresses.iter().enumerate() {
|
||||
// Convert from an array index to an ordinal.
|
||||
let ordinal = ordinal_base.wrapping_add(i as u32);
|
||||
let target = self.target_from_address(address.get(LE))?;
|
||||
exports.push(Export {
|
||||
ordinal,
|
||||
target,
|
||||
// Might be populated later.
|
||||
name: None,
|
||||
});
|
||||
}
|
||||
|
||||
// Now, check whether some (or all) of them have an associated name.
|
||||
// `ordinal_index` is a 0-based index into `addresses`.
|
||||
for (name_pointer, ordinal_index) in self.name_iter() {
|
||||
let name = self.name_from_pointer(name_pointer)?;
|
||||
exports
|
||||
.get_mut(ordinal_index as usize)
|
||||
.read_error("Invalid PE export ordinal")?
|
||||
.name = Some(name);
|
||||
}
|
||||
|
||||
Ok(exports)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_ordinal(digits: &[u8]) -> Option<u32> {
|
||||
if digits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let mut result: u32 = 0;
|
||||
for &c in digits {
|
||||
let x = (c as char).to_digit(10)?;
|
||||
result = result.checked_mul(10)?.checked_add(x)?;
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
1050
vendor/object/src/read/pe/file.rs
vendored
Normal file
1050
vendor/object/src/read/pe/file.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
337
vendor/object/src/read/pe/import.rs
vendored
Normal file
337
vendor/object/src/read/pe/import.rs
vendored
Normal file
@@ -0,0 +1,337 @@
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use crate::read::{Bytes, ReadError, Result};
|
||||
use crate::{pe, LittleEndian as LE, Pod, U16Bytes};
|
||||
|
||||
use super::ImageNtHeaders;
|
||||
|
||||
/// Information for parsing a PE import table.
|
||||
///
|
||||
/// Returned by [`DataDirectories::import_table`](super::DataDirectories::import_table).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportTable<'data> {
|
||||
section_data: Bytes<'data>,
|
||||
section_address: u32,
|
||||
import_address: u32,
|
||||
}
|
||||
|
||||
impl<'data> ImportTable<'data> {
|
||||
/// Create a new import table parser.
|
||||
///
|
||||
/// The import descriptors start at `import_address`.
|
||||
/// The size declared in the `IMAGE_DIRECTORY_ENTRY_IMPORT` data directory is
|
||||
/// ignored by the Windows loader, and so descriptors will be parsed until a null entry.
|
||||
///
|
||||
/// `section_data` should be from the section containing `import_address`, and
|
||||
/// `section_address` should be the address of that section. Pointers within the
|
||||
/// descriptors and thunks may point to anywhere within the section data.
|
||||
pub fn new(section_data: &'data [u8], section_address: u32, import_address: u32) -> Self {
|
||||
ImportTable {
|
||||
section_data: Bytes(section_data),
|
||||
section_address,
|
||||
import_address,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator for the import descriptors.
|
||||
pub fn descriptors(&self) -> Result<ImportDescriptorIterator<'data>> {
|
||||
let offset = self.import_address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE import descriptor address")?;
|
||||
Ok(ImportDescriptorIterator { data })
|
||||
}
|
||||
|
||||
/// Return a library name given its address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageImportDescriptor::name`].
|
||||
pub fn name(&self, address: u32) -> Result<&'data [u8]> {
|
||||
self.section_data
|
||||
.read_string_at(address.wrapping_sub(self.section_address) as usize)
|
||||
.read_error("Invalid PE import descriptor name")
|
||||
}
|
||||
|
||||
/// Return a list of thunks given its address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageImportDescriptor::original_first_thunk`]
|
||||
/// or [`pe::ImageImportDescriptor::first_thunk`].
|
||||
pub fn thunks(&self, address: u32) -> Result<ImportThunkList<'data>> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE import thunk table address")?;
|
||||
Ok(ImportThunkList { data })
|
||||
}
|
||||
|
||||
/// Parse a thunk.
|
||||
pub fn import<Pe: ImageNtHeaders>(&self, thunk: Pe::ImageThunkData) -> Result<Import<'data>> {
|
||||
if thunk.is_ordinal() {
|
||||
Ok(Import::Ordinal(thunk.ordinal()))
|
||||
} else {
|
||||
let (hint, name) = self.hint_name(thunk.address())?;
|
||||
Ok(Import::Name(hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the hint and name at the given address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageThunkData32`] or [`pe::ImageThunkData64`].
|
||||
///
|
||||
/// The hint is an index into the export name pointer table in the target library.
|
||||
pub fn hint_name(&self, address: u32) -> Result<(u16, &'data [u8])> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE import thunk address")?;
|
||||
let hint = data
|
||||
.read::<U16Bytes<LE>>()
|
||||
.read_error("Missing PE import thunk hint")?
|
||||
.get(LE);
|
||||
let name = data
|
||||
.read_string()
|
||||
.read_error("Missing PE import thunk name")?;
|
||||
Ok((hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// A fallible iterator for the descriptors in the import data directory.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportDescriptorIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> ImportDescriptorIterator<'data> {
|
||||
/// Return the next descriptor.
|
||||
///
|
||||
/// Returns `Ok(None)` when a null descriptor is found.
|
||||
pub fn next(&mut self) -> Result<Option<&'data pe::ImageImportDescriptor>> {
|
||||
let import_desc = self
|
||||
.data
|
||||
.read::<pe::ImageImportDescriptor>()
|
||||
.read_error("Missing PE null import descriptor")?;
|
||||
if import_desc.is_null() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(import_desc))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of import thunks.
|
||||
///
|
||||
/// These may be in the import lookup table, or the import address table.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportThunkList<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> ImportThunkList<'data> {
|
||||
/// Get the thunk at the given index.
|
||||
pub fn get<Pe: ImageNtHeaders>(&self, index: usize) -> Result<Pe::ImageThunkData> {
|
||||
let thunk = self
|
||||
.data
|
||||
.read_at(index * mem::size_of::<Pe::ImageThunkData>())
|
||||
.read_error("Invalid PE import thunk index")?;
|
||||
Ok(*thunk)
|
||||
}
|
||||
|
||||
/// Return the first thunk in the list, and update `self` to point after it.
|
||||
///
|
||||
/// Returns `Ok(None)` when a null thunk is found.
|
||||
pub fn next<Pe: ImageNtHeaders>(&mut self) -> Result<Option<Pe::ImageThunkData>> {
|
||||
let thunk = self
|
||||
.data
|
||||
.read::<Pe::ImageThunkData>()
|
||||
.read_error("Missing PE null import thunk")?;
|
||||
if thunk.address() == 0 {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(*thunk))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed import thunk.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Import<'data> {
|
||||
/// Import by ordinal.
|
||||
Ordinal(u16),
|
||||
/// Import by name.
|
||||
///
|
||||
/// Includes a hint for the index into the export name pointer table in the target library.
|
||||
Name(u16, &'data [u8]),
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`pe::ImageThunkData32`] and [`pe::ImageThunkData64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait ImageThunkData: Debug + Pod {
|
||||
/// Return the raw thunk value.
|
||||
fn raw(self) -> u64;
|
||||
|
||||
/// Returns true if the ordinal flag is set.
|
||||
fn is_ordinal(self) -> bool;
|
||||
|
||||
/// Return the ordinal portion of the thunk.
|
||||
///
|
||||
/// Does not check the ordinal flag.
|
||||
fn ordinal(self) -> u16;
|
||||
|
||||
/// Return the RVA portion of the thunk.
|
||||
///
|
||||
/// Does not check the ordinal flag.
|
||||
fn address(self) -> u32;
|
||||
}
|
||||
|
||||
impl ImageThunkData for pe::ImageThunkData64 {
|
||||
fn raw(self) -> u64 {
|
||||
self.0.get(LE)
|
||||
}
|
||||
|
||||
fn is_ordinal(self) -> bool {
|
||||
self.0.get(LE) & pe::IMAGE_ORDINAL_FLAG64 != 0
|
||||
}
|
||||
|
||||
fn ordinal(self) -> u16 {
|
||||
self.0.get(LE) as u16
|
||||
}
|
||||
|
||||
fn address(self) -> u32 {
|
||||
self.0.get(LE) as u32 & 0x7fff_ffff
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageThunkData for pe::ImageThunkData32 {
|
||||
fn raw(self) -> u64 {
|
||||
self.0.get(LE).into()
|
||||
}
|
||||
|
||||
fn is_ordinal(self) -> bool {
|
||||
self.0.get(LE) & pe::IMAGE_ORDINAL_FLAG32 != 0
|
||||
}
|
||||
|
||||
fn ordinal(self) -> u16 {
|
||||
self.0.get(LE) as u16
|
||||
}
|
||||
|
||||
fn address(self) -> u32 {
|
||||
self.0.get(LE) & 0x7fff_ffff
|
||||
}
|
||||
}
|
||||
|
||||
/// Information for parsing a PE delay-load import table.
|
||||
///
|
||||
/// Returned by
|
||||
/// [`DataDirectories::delay_load_import_table`](super::DataDirectories::delay_load_import_table).
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DelayLoadImportTable<'data> {
|
||||
section_data: Bytes<'data>,
|
||||
section_address: u32,
|
||||
import_address: u32,
|
||||
}
|
||||
|
||||
impl<'data> DelayLoadImportTable<'data> {
|
||||
/// Create a new delay load import table parser.
|
||||
///
|
||||
/// The import descriptors start at `import_address`.
|
||||
/// This table works in the same way the import table does: descriptors will be
|
||||
/// parsed until a null entry.
|
||||
///
|
||||
/// `section_data` should be from the section containing `import_address`, and
|
||||
/// `section_address` should be the address of that section. Pointers within the
|
||||
/// descriptors and thunks may point to anywhere within the section data.
|
||||
pub fn new(section_data: &'data [u8], section_address: u32, import_address: u32) -> Self {
|
||||
DelayLoadImportTable {
|
||||
section_data: Bytes(section_data),
|
||||
section_address,
|
||||
import_address,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator for the import descriptors.
|
||||
pub fn descriptors(&self) -> Result<DelayLoadDescriptorIterator<'data>> {
|
||||
let offset = self.import_address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE delay-load import descriptor address")?;
|
||||
Ok(DelayLoadDescriptorIterator { data })
|
||||
}
|
||||
|
||||
/// Return a library name given its address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageDelayloadDescriptor::dll_name_rva`].
|
||||
pub fn name(&self, address: u32) -> Result<&'data [u8]> {
|
||||
self.section_data
|
||||
.read_string_at(address.wrapping_sub(self.section_address) as usize)
|
||||
.read_error("Invalid PE import descriptor name")
|
||||
}
|
||||
|
||||
/// Return a list of thunks given its address.
|
||||
///
|
||||
/// This address may be from the INT, i.e. from
|
||||
/// [`pe::ImageDelayloadDescriptor::import_name_table_rva`].
|
||||
///
|
||||
/// Please note that others RVA values from [`pe::ImageDelayloadDescriptor`] are used
|
||||
/// by the delay loader at runtime to store values, and thus do not point inside the same
|
||||
/// section as the INT. Calling this function on those addresses will fail.
|
||||
pub fn thunks(&self, address: u32) -> Result<ImportThunkList<'data>> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE delay load import thunk table address")?;
|
||||
Ok(ImportThunkList { data })
|
||||
}
|
||||
|
||||
/// Parse a thunk.
|
||||
pub fn import<Pe: ImageNtHeaders>(&self, thunk: Pe::ImageThunkData) -> Result<Import<'data>> {
|
||||
if thunk.is_ordinal() {
|
||||
Ok(Import::Ordinal(thunk.ordinal()))
|
||||
} else {
|
||||
let (hint, name) = self.hint_name(thunk.address())?;
|
||||
Ok(Import::Name(hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the hint and name at the given address.
|
||||
///
|
||||
/// This address may be from [`pe::ImageThunkData32`] or [`pe::ImageThunkData64`].
|
||||
///
|
||||
/// The hint is an index into the export name pointer table in the target library.
|
||||
pub fn hint_name(&self, address: u32) -> Result<(u16, &'data [u8])> {
|
||||
let offset = address.wrapping_sub(self.section_address);
|
||||
let mut data = self.section_data;
|
||||
data.skip(offset as usize)
|
||||
.read_error("Invalid PE delay load import thunk address")?;
|
||||
let hint = data
|
||||
.read::<U16Bytes<LE>>()
|
||||
.read_error("Missing PE delay load import thunk hint")?
|
||||
.get(LE);
|
||||
let name = data
|
||||
.read_string()
|
||||
.read_error("Missing PE delay load import thunk name")?;
|
||||
Ok((hint, name))
|
||||
}
|
||||
}
|
||||
|
||||
/// A fallible iterator for the descriptors in the delay-load data directory.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DelayLoadDescriptorIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> DelayLoadDescriptorIterator<'data> {
|
||||
/// Return the next descriptor.
|
||||
///
|
||||
/// Returns `Ok(None)` when a null descriptor is found.
|
||||
pub fn next(&mut self) -> Result<Option<&'data pe::ImageDelayloadDescriptor>> {
|
||||
let import_desc = self
|
||||
.data
|
||||
.read::<pe::ImageDelayloadDescriptor>()
|
||||
.read_error("Missing PE null delay-load import descriptor")?;
|
||||
if import_desc.is_null() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(import_desc))
|
||||
}
|
||||
}
|
||||
}
|
||||
68
vendor/object/src/read/pe/mod.rs
vendored
Normal file
68
vendor/object/src/read/pe/mod.rs
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
//! Support for reading PE files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between PE32 and PE32+.
|
||||
//! The primary trait for this is [`ImageNtHeaders`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`PeFile`] implements the [`Object`](crate::read::Object) trait for
|
||||
//! PE files. [`PeFile`] is parameterised by [`ImageNtHeaders`] to allow
|
||||
//! reading both PE32 and PE32+. There are type aliases for these parameters
|
||||
//! ([`PeFile32`] and [`PeFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`ImageNtHeaders`] trait can be directly used to parse both
|
||||
//! [`pe::ImageNtHeaders32`] and [`pe::ImageNtHeaders64`].
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::pe;
|
||||
//! use object::read::pe::ImageNtHeaders;
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let dos_header = pe::ImageDosHeader::parse(&*data)?;
|
||||
//! let mut offset = dos_header.nt_headers_offset().into();
|
||||
//! let (nt_headers, data_directories) = pe::ImageNtHeaders64::parse(&*data, &mut offset)?;
|
||||
//! let sections = nt_headers.sections(&*data, offset)?;
|
||||
//! let symbols = nt_headers.symbols(&*data)?;
|
||||
//! for section in sections.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(section.name(symbols.strings())?));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::pe;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod data_directory;
|
||||
pub use data_directory::*;
|
||||
|
||||
mod export;
|
||||
pub use export::*;
|
||||
|
||||
mod import;
|
||||
pub use import::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod resource;
|
||||
pub use resource::*;
|
||||
|
||||
mod rich;
|
||||
pub use rich::*;
|
||||
|
||||
pub use super::coff::{SectionTable, SymbolTable};
|
||||
92
vendor/object/src/read/pe/relocation.rs
vendored
Normal file
92
vendor/object/src/read/pe/relocation.rs
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
use core::slice;
|
||||
|
||||
use crate::endian::{LittleEndian as LE, U16};
|
||||
use crate::pe;
|
||||
use crate::read::{Bytes, Error, ReadError, Result};
|
||||
|
||||
/// An iterator over the relocation blocks in the `.reloc` section of a PE file.
|
||||
///
|
||||
/// Returned by [`DataDirectories::relocation_blocks`](super::DataDirectories::relocation_blocks).
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct RelocationBlockIterator<'data> {
|
||||
data: Bytes<'data>,
|
||||
}
|
||||
|
||||
impl<'data> RelocationBlockIterator<'data> {
|
||||
/// Construct a new iterator from the data of the `.reloc` section.
|
||||
pub fn new(data: &'data [u8]) -> Self {
|
||||
RelocationBlockIterator { data: Bytes(data) }
|
||||
}
|
||||
|
||||
/// Read the next relocation page.
|
||||
pub fn next(&mut self) -> Result<Option<RelocationIterator<'data>>> {
|
||||
if self.data.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let header = self
|
||||
.data
|
||||
.read::<pe::ImageBaseRelocation>()
|
||||
.read_error("Invalid PE reloc section size")?;
|
||||
let virtual_address = header.virtual_address.get(LE);
|
||||
let size = header.size_of_block.get(LE);
|
||||
if size <= 8 || size & 3 != 0 {
|
||||
return Err(Error("Invalid PE reloc block size"));
|
||||
}
|
||||
let count = (size - 8) / 2;
|
||||
let relocs = self
|
||||
.data
|
||||
.read_slice::<U16<LE>>(count as usize)
|
||||
.read_error("Invalid PE reloc block size")?
|
||||
.iter();
|
||||
Ok(Some(RelocationIterator {
|
||||
virtual_address,
|
||||
size,
|
||||
relocs,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator of the relocations in a block in the `.reloc` section of a PE file.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RelocationIterator<'data> {
|
||||
virtual_address: u32,
|
||||
size: u32,
|
||||
relocs: slice::Iter<'data, U16<LE>>,
|
||||
}
|
||||
|
||||
impl<'data> RelocationIterator<'data> {
|
||||
/// Return the virtual address of the page that this block of relocations applies to.
|
||||
pub fn virtual_address(&self) -> u32 {
|
||||
self.virtual_address
|
||||
}
|
||||
|
||||
/// Return the size in bytes of this block of relocations.
|
||||
pub fn size(&self) -> u32 {
|
||||
self.size
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> Iterator for RelocationIterator<'data> {
|
||||
type Item = Relocation;
|
||||
|
||||
fn next(&mut self) -> Option<Relocation> {
|
||||
loop {
|
||||
let reloc = self.relocs.next()?.get(LE);
|
||||
if reloc != 0 {
|
||||
return Some(Relocation {
|
||||
virtual_address: self.virtual_address.wrapping_add((reloc & 0xfff) as u32),
|
||||
typ: reloc >> 12,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A relocation in the `.reloc` section of a PE file.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct Relocation {
|
||||
/// The virtual address of the relocation.
|
||||
pub virtual_address: u32,
|
||||
/// One of the `pe::IMAGE_REL_BASED_*` constants.
|
||||
pub typ: u16,
|
||||
}
|
||||
209
vendor/object/src/read/pe/resource.rs
vendored
Normal file
209
vendor/object/src/read/pe/resource.rs
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
use alloc::string::String;
|
||||
use core::char;
|
||||
|
||||
use crate::read::{ReadError, ReadRef, Result};
|
||||
use crate::{pe, LittleEndian as LE, U16Bytes};
|
||||
|
||||
/// The `.rsrc` section of a PE file.
|
||||
///
|
||||
/// Returned by [`DataDirectories::resource_directory`](super::DataDirectories::resource_directory).
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ResourceDirectory<'data> {
|
||||
data: &'data [u8],
|
||||
}
|
||||
|
||||
impl<'data> ResourceDirectory<'data> {
|
||||
/// Construct from the data of the `.rsrc` section.
|
||||
pub fn new(data: &'data [u8]) -> Self {
|
||||
ResourceDirectory { data }
|
||||
}
|
||||
|
||||
/// Parses the root resource directory.
|
||||
pub fn root(&self) -> Result<ResourceDirectoryTable<'data>> {
|
||||
ResourceDirectoryTable::parse(self.data, 0)
|
||||
}
|
||||
}
|
||||
|
||||
/// A table of resource entries.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResourceDirectoryTable<'data> {
|
||||
/// The table header.
|
||||
pub header: &'data pe::ImageResourceDirectory,
|
||||
/// The table entries.
|
||||
pub entries: &'data [pe::ImageResourceDirectoryEntry],
|
||||
}
|
||||
|
||||
impl<'data> ResourceDirectoryTable<'data> {
|
||||
fn parse(data: &'data [u8], offset: u32) -> Result<Self> {
|
||||
let mut offset = u64::from(offset);
|
||||
let header = data
|
||||
.read::<pe::ImageResourceDirectory>(&mut offset)
|
||||
.read_error("Invalid resource table header")?;
|
||||
let entries_count = header.number_of_id_entries.get(LE) as usize
|
||||
+ header.number_of_named_entries.get(LE) as usize;
|
||||
let entries = data
|
||||
.read_slice::<pe::ImageResourceDirectoryEntry>(&mut offset, entries_count)
|
||||
.read_error("Invalid resource table entries")?;
|
||||
Ok(Self { header, entries })
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageResourceDirectoryEntry {
|
||||
/// Returns true if the entry has a name, rather than an ID.
|
||||
pub fn has_name(&self) -> bool {
|
||||
self.name_or_id.get(LE) & pe::IMAGE_RESOURCE_NAME_IS_STRING != 0
|
||||
}
|
||||
|
||||
/// Returns the section offset of the name.
|
||||
///
|
||||
/// Valid if `has_name()` returns true.
|
||||
fn name(&self) -> ResourceName {
|
||||
let offset = self.name_or_id.get(LE) & !pe::IMAGE_RESOURCE_NAME_IS_STRING;
|
||||
ResourceName { offset }
|
||||
}
|
||||
|
||||
/// Returns the ID.
|
||||
///
|
||||
/// Valid if `has_string_name()` returns false.
|
||||
fn id(&self) -> u16 {
|
||||
(self.name_or_id.get(LE) & 0x0000_FFFF) as u16
|
||||
}
|
||||
|
||||
/// Returns the entry name
|
||||
pub fn name_or_id(&self) -> ResourceNameOrId {
|
||||
if self.has_name() {
|
||||
ResourceNameOrId::Name(self.name())
|
||||
} else {
|
||||
ResourceNameOrId::Id(self.id())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the entry is a subtable.
|
||||
pub fn is_table(&self) -> bool {
|
||||
self.offset_to_data_or_directory.get(LE) & pe::IMAGE_RESOURCE_DATA_IS_DIRECTORY != 0
|
||||
}
|
||||
|
||||
/// Returns the section offset of the associated table or data.
|
||||
pub fn data_offset(&self) -> u32 {
|
||||
self.offset_to_data_or_directory.get(LE) & !pe::IMAGE_RESOURCE_DATA_IS_DIRECTORY
|
||||
}
|
||||
|
||||
/// Returns the data associated to this directory entry.
|
||||
pub fn data<'data>(
|
||||
&self,
|
||||
section: ResourceDirectory<'data>,
|
||||
) -> Result<ResourceDirectoryEntryData<'data>> {
|
||||
if self.is_table() {
|
||||
ResourceDirectoryTable::parse(section.data, self.data_offset())
|
||||
.map(ResourceDirectoryEntryData::Table)
|
||||
} else {
|
||||
section
|
||||
.data
|
||||
.read_at::<pe::ImageResourceDataEntry>(self.data_offset().into())
|
||||
.read_error("Invalid resource entry")
|
||||
.map(ResourceDirectoryEntryData::Data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Data associated with a resource directory entry.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ResourceDirectoryEntryData<'data> {
|
||||
/// A subtable entry.
|
||||
Table(ResourceDirectoryTable<'data>),
|
||||
/// A resource data entry.
|
||||
Data(&'data pe::ImageResourceDataEntry),
|
||||
}
|
||||
|
||||
impl<'data> ResourceDirectoryEntryData<'data> {
|
||||
/// Converts to an option of table.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn table(self) -> Option<ResourceDirectoryTable<'data>> {
|
||||
match self {
|
||||
Self::Table(dir) => Some(dir),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts to an option of data entry.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn data(self) -> Option<&'data pe::ImageResourceDataEntry> {
|
||||
match self {
|
||||
Self::Data(rsc) => Some(rsc),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource name.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ResourceName {
|
||||
offset: u32,
|
||||
}
|
||||
|
||||
impl ResourceName {
|
||||
/// Converts to a `String`.
|
||||
pub fn to_string_lossy(&self, directory: ResourceDirectory<'_>) -> Result<String> {
|
||||
let d = self.data(directory)?.iter().map(|c| c.get(LE));
|
||||
|
||||
Ok(char::decode_utf16(d)
|
||||
.map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER))
|
||||
.collect::<String>())
|
||||
}
|
||||
|
||||
/// Returns the string unicode buffer.
|
||||
pub fn data<'data>(
|
||||
&self,
|
||||
directory: ResourceDirectory<'data>,
|
||||
) -> Result<&'data [U16Bytes<LE>]> {
|
||||
let mut offset = u64::from(self.offset);
|
||||
let len = directory
|
||||
.data
|
||||
.read::<U16Bytes<LE>>(&mut offset)
|
||||
.read_error("Invalid resource name offset")?;
|
||||
directory
|
||||
.data
|
||||
.read_slice::<U16Bytes<LE>>(&mut offset, len.get(LE).into())
|
||||
.read_error("Invalid resource name length")
|
||||
}
|
||||
|
||||
/// Returns the string buffer as raw bytes.
|
||||
pub fn raw_data<'data>(&self, directory: ResourceDirectory<'data>) -> Result<&'data [u8]> {
|
||||
self.data(directory).map(crate::pod::bytes_of_slice)
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource name or ID.
|
||||
///
|
||||
/// Can be either a string or a numeric ID.
|
||||
#[derive(Debug)]
|
||||
pub enum ResourceNameOrId {
|
||||
/// A resource name.
|
||||
Name(ResourceName),
|
||||
/// A resource ID.
|
||||
Id(u16),
|
||||
}
|
||||
|
||||
impl ResourceNameOrId {
|
||||
/// Converts to an option of name.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn name(self) -> Option<ResourceName> {
|
||||
match self {
|
||||
Self::Name(name) => Some(name),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts to an option of ID.
|
||||
///
|
||||
/// Helper for iterator filtering.
|
||||
pub fn id(self) -> Option<u16> {
|
||||
match self {
|
||||
Self::Id(id) => Some(id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
91
vendor/object/src/read/pe/rich.rs
vendored
Normal file
91
vendor/object/src/read/pe/rich.rs
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
//! PE rich header handling
|
||||
|
||||
use core::mem;
|
||||
|
||||
use crate::pod::bytes_of_slice;
|
||||
use crate::read::Bytes;
|
||||
use crate::{pe, LittleEndian as LE, ReadRef, U32};
|
||||
|
||||
/// Parsed information about a Rich Header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct RichHeaderInfo<'data> {
|
||||
/// The offset at which the rich header starts.
|
||||
pub offset: usize,
|
||||
/// The length (in bytes) of the rich header.
|
||||
///
|
||||
/// This includes the payload, but also the 16-byte start sequence and the
|
||||
/// 8-byte final "Rich" and XOR key.
|
||||
pub length: usize,
|
||||
/// The XOR key used to mask the rich header.
|
||||
///
|
||||
/// Unless the file has been tampered with, it should be equal to a checksum
|
||||
/// of the file header.
|
||||
pub xor_key: u32,
|
||||
masked_entries: &'data [pe::MaskedRichHeaderEntry],
|
||||
}
|
||||
|
||||
/// A PE rich header entry after it has been unmasked.
|
||||
///
|
||||
/// See [`pe::MaskedRichHeaderEntry`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct RichHeaderEntry {
|
||||
/// ID of the component.
|
||||
pub comp_id: u32,
|
||||
/// Number of times this component has been used when building this PE.
|
||||
pub count: u32,
|
||||
}
|
||||
|
||||
impl<'data> RichHeaderInfo<'data> {
|
||||
/// Try to locate a rich header and its entries in the current PE file.
|
||||
pub fn parse<R: ReadRef<'data>>(data: R, nt_header_offset: u64) -> Option<Self> {
|
||||
// Locate the rich header, if any.
|
||||
// It ends with the "Rich" string and an XOR key, before the NT header.
|
||||
let data = data.read_bytes_at(0, nt_header_offset).map(Bytes).ok()?;
|
||||
let end_marker_offset = memmem(data.0, b"Rich", 4)?;
|
||||
let xor_key = *data.read_at::<U32<LE>>(end_marker_offset + 4).ok()?;
|
||||
|
||||
// It starts at the masked "DanS" string and 3 masked zeroes.
|
||||
let masked_start_marker = U32::new(LE, 0x536e_6144 ^ xor_key.get(LE));
|
||||
let start_header = [masked_start_marker, xor_key, xor_key, xor_key];
|
||||
let start_sequence = bytes_of_slice(&start_header);
|
||||
let start_marker_offset = memmem(&data.0[..end_marker_offset], start_sequence, 4)?;
|
||||
|
||||
// Extract the items between the markers.
|
||||
let items_offset = start_marker_offset + start_sequence.len();
|
||||
let items_len = end_marker_offset - items_offset;
|
||||
let item_count = items_len / mem::size_of::<pe::MaskedRichHeaderEntry>();
|
||||
let items = data.read_slice_at(items_offset, item_count).ok()?;
|
||||
Some(RichHeaderInfo {
|
||||
offset: start_marker_offset,
|
||||
// Includes "Rich" marker and the XOR key.
|
||||
length: end_marker_offset - start_marker_offset + 8,
|
||||
xor_key: xor_key.get(LE),
|
||||
masked_entries: items,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns an iterator over the unmasked entries.
|
||||
pub fn unmasked_entries(&self) -> impl Iterator<Item = RichHeaderEntry> + 'data {
|
||||
let xor_key = self.xor_key;
|
||||
self.masked_entries
|
||||
.iter()
|
||||
.map(move |entry| RichHeaderEntry {
|
||||
comp_id: entry.masked_comp_id.get(LE) ^ xor_key,
|
||||
count: entry.masked_count.get(LE) ^ xor_key,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the offset of the first occurrence of needle in the data.
|
||||
///
|
||||
/// The offset must have the given alignment.
|
||||
fn memmem(data: &[u8], needle: &[u8], align: usize) -> Option<usize> {
|
||||
let mut offset = 0;
|
||||
loop {
|
||||
if data.get(offset..)?.get(..needle.len())? == needle {
|
||||
return Some(offset);
|
||||
}
|
||||
offset += align;
|
||||
}
|
||||
}
|
||||
440
vendor/object/src/read/pe/section.rs
vendored
Normal file
440
vendor/object/src/read/pe/section.rs
vendored
Normal file
@@ -0,0 +1,440 @@
|
||||
use core::marker::PhantomData;
|
||||
use core::{cmp, iter, slice, str};
|
||||
|
||||
use crate::endian::LittleEndian as LE;
|
||||
use crate::pe;
|
||||
use crate::pe::ImageSectionHeader;
|
||||
use crate::read::{
|
||||
self, CompressedData, CompressedFileRange, ObjectSection, ObjectSegment, ReadError, ReadRef,
|
||||
Relocation, Result, SectionFlags, SectionIndex, SectionKind, SegmentFlags,
|
||||
};
|
||||
|
||||
use super::{ImageNtHeaders, PeFile, SectionTable};
|
||||
|
||||
/// An iterator for the loadable sections in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSegmentIterator32<'data, 'file, R = &'data [u8]> =
|
||||
PeSegmentIterator<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// An iterator for the loadable sections in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSegmentIterator64<'data, 'file, R = &'data [u8]> =
|
||||
PeSegmentIterator<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// An iterator for the loadable sections in a [`PeFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct PeSegmentIterator<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file PeFile<'data, Pe, R>,
|
||||
pub(super) iter: slice::Iter<'data, pe::ImageSectionHeader>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> Iterator for PeSegmentIterator<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = PeSegment<'data, 'file, Pe, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|section| PeSegment {
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A loadable section in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSegment32<'data, 'file, R = &'data [u8]> =
|
||||
PeSegment<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// A loadable section in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSegment64<'data, 'file, R = &'data [u8]> =
|
||||
PeSegment<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// A loadable section in a [`PeFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSegment`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct PeSegment<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
file: &'file PeFile<'data, Pe, R>,
|
||||
section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> read::private::Sealed for PeSegment<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> ObjectSegment<'data> for PeSegment<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE)).wrapping_add(self.file.common.image_base)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
u64::from(self.section.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.file.section_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
let (offset, size) = self.section.pe_file_range();
|
||||
(u64::from(offset), u64::from(size))
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.section.pe_data(self.file.data)
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.data()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
self.section
|
||||
.name(self.file.common.symbols.strings())
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
let name = self.section.name(self.file.common.symbols.strings())?;
|
||||
Ok(Some(
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 PE section name")?,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
let characteristics = self.section.characteristics.get(LE);
|
||||
SegmentFlags::Coff { characteristics }
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSectionIterator32<'data, 'file, R = &'data [u8]> =
|
||||
PeSectionIterator<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// An iterator for the sections in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSectionIterator64<'data, 'file, R = &'data [u8]> =
|
||||
PeSectionIterator<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// An iterator for the sections in a [`PeFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct PeSectionIterator<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file PeFile<'data, Pe, R>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, pe::ImageSectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> Iterator for PeSectionIterator<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = PeSection<'data, 'file, Pe, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|(index, section)| PeSection {
|
||||
file: self.file,
|
||||
index: SectionIndex(index + 1),
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`PeFile32`](super::PeFile32).
|
||||
pub type PeSection32<'data, 'file, R = &'data [u8]> =
|
||||
PeSection<'data, 'file, pe::ImageNtHeaders32, R>;
|
||||
/// A section in a [`PeFile64`](super::PeFile64).
|
||||
pub type PeSection64<'data, 'file, R = &'data [u8]> =
|
||||
PeSection<'data, 'file, pe::ImageNtHeaders64, R>;
|
||||
|
||||
/// A section in a [`PeFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct PeSection<'data, 'file, Pe, R = &'data [u8]>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file PeFile<'data, Pe, R>,
|
||||
pub(super) index: SectionIndex,
|
||||
pub(super) section: &'data pe::ImageSectionHeader,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> read::private::Sealed for PeSection<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Pe, R> ObjectSection<'data> for PeSection<'data, 'file, Pe, R>
|
||||
where
|
||||
Pe: ImageNtHeaders,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type RelocationIterator = PeRelocationIterator<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
u64::from(self.section.virtual_address.get(LE)).wrapping_add(self.file.common.image_base)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
u64::from(self.section.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
self.file.section_alignment()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
let (offset, size) = self.section.pe_file_range();
|
||||
if size == 0 {
|
||||
None
|
||||
} else {
|
||||
Some((u64::from(offset), u64::from(size)))
|
||||
}
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.section.pe_data(self.file.data)
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.data()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
self.section.name(self.file.common.symbols.strings())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 PE section name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SectionKind {
|
||||
self.section.kind()
|
||||
}
|
||||
|
||||
fn relocations(&self) -> PeRelocationIterator<'data, 'file, R> {
|
||||
PeRelocationIterator(PhantomData)
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::Coff {
|
||||
characteristics: self.section.characteristics.get(LE),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> SectionTable<'data> {
|
||||
/// Return the file offset of the given virtual address, and the size up
|
||||
/// to the end of the section containing it.
|
||||
///
|
||||
/// Returns `None` if no section contains the address.
|
||||
pub fn pe_file_range_at(&self, va: u32) -> Option<(u32, u32)> {
|
||||
self.iter().find_map(|section| section.pe_file_range_at(va))
|
||||
}
|
||||
|
||||
/// Return the data starting at the given virtual address, up to the end of the
|
||||
/// section containing it.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
///
|
||||
/// Returns `None` if no section contains the address.
|
||||
pub fn pe_data_at<R: ReadRef<'data>>(&self, data: R, va: u32) -> Option<&'data [u8]> {
|
||||
self.iter().find_map(|section| section.pe_data_at(data, va))
|
||||
}
|
||||
|
||||
/// Return the data of the section that contains the given virtual address in a PE file.
|
||||
///
|
||||
/// Also returns the virtual address of that section.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
pub fn pe_data_containing<R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
va: u32,
|
||||
) -> Option<(&'data [u8], u32)> {
|
||||
self.iter()
|
||||
.find_map(|section| section.pe_data_containing(data, va))
|
||||
}
|
||||
|
||||
/// Return the section that contains a given virtual address.
|
||||
pub fn section_containing(&self, va: u32) -> Option<&'data ImageSectionHeader> {
|
||||
self.iter().find(|section| section.contains_rva(va))
|
||||
}
|
||||
}
|
||||
|
||||
impl pe::ImageSectionHeader {
|
||||
/// Return the offset and size of the section in a PE file.
|
||||
///
|
||||
/// The size of the range will be the minimum of the file size and virtual size.
|
||||
pub fn pe_file_range(&self) -> (u32, u32) {
|
||||
// Pointer and size will be zero for uninitialized data; we don't need to validate this.
|
||||
let offset = self.pointer_to_raw_data.get(LE);
|
||||
let size = cmp::min(self.virtual_size.get(LE), self.size_of_raw_data.get(LE));
|
||||
(offset, size)
|
||||
}
|
||||
|
||||
/// Return the file offset of the given virtual address, and the remaining size up
|
||||
/// to the end of the section.
|
||||
///
|
||||
/// Returns `None` if the section does not contain the address.
|
||||
pub fn pe_file_range_at(&self, va: u32) -> Option<(u32, u32)> {
|
||||
let section_va = self.virtual_address.get(LE);
|
||||
let offset = va.checked_sub(section_va)?;
|
||||
let (section_offset, section_size) = self.pe_file_range();
|
||||
// Address must be within section (and not at its end).
|
||||
if offset < section_size {
|
||||
Some((section_offset.checked_add(offset)?, section_size - offset))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the virtual address and size of the section.
|
||||
pub fn pe_address_range(&self) -> (u32, u32) {
|
||||
(self.virtual_address.get(LE), self.virtual_size.get(LE))
|
||||
}
|
||||
|
||||
/// Return the section data in a PE file.
|
||||
///
|
||||
/// The length of the data will be the minimum of the file size and virtual size.
|
||||
pub fn pe_data<'data, R: ReadRef<'data>>(&self, data: R) -> Result<&'data [u8]> {
|
||||
let (offset, size) = self.pe_file_range();
|
||||
data.read_bytes_at(offset.into(), size.into())
|
||||
.read_error("Invalid PE section offset or size")
|
||||
}
|
||||
|
||||
/// Return the data starting at the given virtual address, up to the end of the
|
||||
/// section.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
///
|
||||
/// Returns `None` if the section does not contain the address.
|
||||
pub fn pe_data_at<'data, R: ReadRef<'data>>(&self, data: R, va: u32) -> Option<&'data [u8]> {
|
||||
let (offset, size) = self.pe_file_range_at(va)?;
|
||||
data.read_bytes_at(offset.into(), size.into()).ok()
|
||||
}
|
||||
|
||||
/// Tests whether a given RVA is part of this section
|
||||
pub fn contains_rva(&self, va: u32) -> bool {
|
||||
let section_va = self.virtual_address.get(LE);
|
||||
match va.checked_sub(section_va) {
|
||||
None => false,
|
||||
Some(offset) => {
|
||||
// Address must be within section (and not at its end).
|
||||
offset < self.virtual_size.get(LE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the section data if it contains the given virtual address.
|
||||
///
|
||||
/// Also returns the virtual address of that section.
|
||||
///
|
||||
/// Ignores sections with invalid data.
|
||||
pub fn pe_data_containing<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
va: u32,
|
||||
) -> Option<(&'data [u8], u32)> {
|
||||
let section_va = self.virtual_address.get(LE);
|
||||
let offset = va.checked_sub(section_va)?;
|
||||
let (section_offset, section_size) = self.pe_file_range();
|
||||
// Address must be within section (and not at its end).
|
||||
if offset < section_size {
|
||||
let section_data = data
|
||||
.read_bytes_at(section_offset.into(), section_size.into())
|
||||
.ok()?;
|
||||
Some((section_data, section_va))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the relocations in an [`PeSection`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct PeRelocationIterator<'data, 'file, R = &'data [u8]>(
|
||||
PhantomData<(&'data (), &'file (), R)>,
|
||||
);
|
||||
|
||||
impl<'data, 'file, R> Iterator for PeRelocationIterator<'data, 'file, R> {
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
178
vendor/object/src/read/read_cache.rs
vendored
Normal file
178
vendor/object/src/read/read_cache.rs
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
use core::ops::Range;
|
||||
use std::boxed::Box;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryInto;
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
use std::mem;
|
||||
use std::vec::Vec;
|
||||
|
||||
use crate::read::ReadRef;
|
||||
|
||||
/// An implementation of [`ReadRef`] for data in a stream that implements
|
||||
/// `Read + Seek`.
|
||||
///
|
||||
/// Contains a cache of read-only blocks of data, allowing references to
|
||||
/// them to be returned. Entries in the cache are never removed.
|
||||
/// Entries are keyed on the offset and size of the read.
|
||||
/// Currently overlapping reads are considered separate reads.
|
||||
#[derive(Debug)]
|
||||
pub struct ReadCache<R: Read + Seek> {
|
||||
cache: RefCell<ReadCacheInternal<R>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ReadCacheInternal<R: Read + Seek> {
|
||||
read: R,
|
||||
bufs: HashMap<(u64, u64), Box<[u8]>>,
|
||||
strings: HashMap<(u64, u8), Box<[u8]>>,
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> ReadCache<R> {
|
||||
/// Create an empty `ReadCache` for the given stream.
|
||||
pub fn new(read: R) -> Self {
|
||||
ReadCache {
|
||||
cache: RefCell::new(ReadCacheInternal {
|
||||
read,
|
||||
bufs: HashMap::new(),
|
||||
strings: HashMap::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an implementation of `ReadRef` that restricts reads
|
||||
/// to the given range of the stream.
|
||||
pub fn range(&self, offset: u64, size: u64) -> ReadCacheRange<'_, R> {
|
||||
ReadCacheRange {
|
||||
r: self,
|
||||
offset,
|
||||
size,
|
||||
}
|
||||
}
|
||||
|
||||
/// Free buffers used by the cache.
|
||||
pub fn clear(&mut self) {
|
||||
self.cache.borrow_mut().bufs.clear();
|
||||
}
|
||||
|
||||
/// Unwrap this `ReadCache<R>`, returning the underlying reader.
|
||||
pub fn into_inner(self) -> R {
|
||||
self.cache.into_inner().read
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> ReadRef<'a> for &'a ReadCache<R> {
|
||||
fn len(self) -> Result<u64, ()> {
|
||||
let cache = &mut *self.cache.borrow_mut();
|
||||
cache.read.seek(SeekFrom::End(0)).map_err(|_| ())
|
||||
}
|
||||
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8], ()> {
|
||||
if size == 0 {
|
||||
return Ok(&[]);
|
||||
}
|
||||
let cache = &mut *self.cache.borrow_mut();
|
||||
let buf = match cache.bufs.entry((offset, size)) {
|
||||
Entry::Occupied(entry) => entry.into_mut(),
|
||||
Entry::Vacant(entry) => {
|
||||
let size = size.try_into().map_err(|_| ())?;
|
||||
cache.read.seek(SeekFrom::Start(offset)).map_err(|_| ())?;
|
||||
let mut bytes = vec![0; size].into_boxed_slice();
|
||||
cache.read.read_exact(&mut bytes).map_err(|_| ())?;
|
||||
entry.insert(bytes)
|
||||
}
|
||||
};
|
||||
// Extend the lifetime to that of self.
|
||||
// This is OK because we never mutate or remove entries.
|
||||
Ok(unsafe { mem::transmute::<&[u8], &[u8]>(buf) })
|
||||
}
|
||||
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8], ()> {
|
||||
let cache = &mut *self.cache.borrow_mut();
|
||||
let buf = match cache.strings.entry((range.start, delimiter)) {
|
||||
Entry::Occupied(entry) => entry.into_mut(),
|
||||
Entry::Vacant(entry) => {
|
||||
cache
|
||||
.read
|
||||
.seek(SeekFrom::Start(range.start))
|
||||
.map_err(|_| ())?;
|
||||
|
||||
let max_check: usize = (range.end - range.start).try_into().map_err(|_| ())?;
|
||||
// Strings should be relatively small.
|
||||
// TODO: make this configurable?
|
||||
let max_check = max_check.min(4096);
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
let mut checked = 0;
|
||||
loop {
|
||||
bytes.resize((checked + 256).min(max_check), 0);
|
||||
let read = cache.read.read(&mut bytes[checked..]).map_err(|_| ())?;
|
||||
if read == 0 {
|
||||
return Err(());
|
||||
}
|
||||
if let Some(len) = memchr::memchr(delimiter, &bytes[checked..][..read]) {
|
||||
bytes.truncate(checked + len);
|
||||
break entry.insert(bytes.into_boxed_slice());
|
||||
}
|
||||
checked += read;
|
||||
if checked >= max_check {
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
// Extend the lifetime to that of self.
|
||||
// This is OK because we never mutate or remove entries.
|
||||
Ok(unsafe { mem::transmute::<&[u8], &[u8]>(buf) })
|
||||
}
|
||||
}
|
||||
|
||||
/// An implementation of [`ReadRef`] for a range of data in a stream that
|
||||
/// implements `Read + Seek`.
|
||||
///
|
||||
/// Shares an underlying `ReadCache` with a lifetime of `'a`.
|
||||
#[derive(Debug)]
|
||||
pub struct ReadCacheRange<'a, R: Read + Seek> {
|
||||
r: &'a ReadCache<R>,
|
||||
offset: u64,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> Clone for ReadCacheRange<'a, R> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> Copy for ReadCacheRange<'a, R> {}
|
||||
|
||||
impl<'a, R: Read + Seek> ReadRef<'a> for ReadCacheRange<'a, R> {
|
||||
fn len(self) -> Result<u64, ()> {
|
||||
Ok(self.size)
|
||||
}
|
||||
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8], ()> {
|
||||
if size == 0 {
|
||||
return Ok(&[]);
|
||||
}
|
||||
let end = offset.checked_add(size).ok_or(())?;
|
||||
if end > self.size {
|
||||
return Err(());
|
||||
}
|
||||
let r_offset = self.offset.checked_add(offset).ok_or(())?;
|
||||
self.r.read_bytes_at(r_offset, size)
|
||||
}
|
||||
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8], ()> {
|
||||
let r_start = self.offset.checked_add(range.start).ok_or(())?;
|
||||
let r_end = self.offset.checked_add(range.end).ok_or(())?;
|
||||
let bytes = self.r.read_bytes_at_until(r_start..r_end, delimiter)?;
|
||||
let size = bytes.len().try_into().map_err(|_| ())?;
|
||||
let end = range.start.checked_add(size).ok_or(())?;
|
||||
if end > self.size {
|
||||
return Err(());
|
||||
}
|
||||
Ok(bytes)
|
||||
}
|
||||
}
|
||||
137
vendor/object/src/read/read_ref.rs
vendored
Normal file
137
vendor/object/src/read/read_ref.rs
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
#![allow(clippy::len_without_is_empty)]
|
||||
|
||||
use core::convert::TryInto;
|
||||
use core::ops::Range;
|
||||
use core::{mem, result};
|
||||
|
||||
use crate::pod::{from_bytes, slice_from_bytes, Pod};
|
||||
|
||||
type Result<T> = result::Result<T, ()>;
|
||||
|
||||
/// A trait for reading references to [`Pod`] types from a block of data.
|
||||
///
|
||||
/// This allows parsers to handle both of these cases:
|
||||
/// - the block of data exists in memory, and it is desirable
|
||||
/// to use references to this block instead of copying it,
|
||||
/// - the block of data exists in storage, and it is desirable
|
||||
/// to read on demand to minimize I/O and memory usage.
|
||||
///
|
||||
/// The methods accept `self` by value because `Self` is expected to behave
|
||||
/// similar to a reference: it may be a reference with a lifetime of `'a`,
|
||||
/// or it may be a wrapper of a reference.
|
||||
///
|
||||
/// The `Clone` and `Copy` bounds are for convenience, and since `Self` is
|
||||
/// expected to be similar to a reference, these are easily satisfied.
|
||||
///
|
||||
/// Object file parsers typically use offsets to locate the structures
|
||||
/// in the block, and will most commonly use the `*_at` methods to
|
||||
/// read a structure at a known offset.
|
||||
///
|
||||
/// Occasionally file parsers will need to treat the block as a stream,
|
||||
/// and so convenience methods are provided that update an offset with
|
||||
/// the size that was read.
|
||||
//
|
||||
// An alternative would be for methods to accept `&mut self` and use a
|
||||
// `seek` method instead of the `offset` parameters, but this is less
|
||||
// convenient for implementers.
|
||||
pub trait ReadRef<'a>: Clone + Copy {
|
||||
/// The total size of the block of data.
|
||||
fn len(self) -> Result<u64>;
|
||||
|
||||
/// Get a reference to a `u8` slice at the given offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8]>;
|
||||
|
||||
/// Get a reference to a delimited `u8` slice which starts at range.start.
|
||||
///
|
||||
/// Does not include the delimiter.
|
||||
///
|
||||
/// Returns an error if the range is out of bounds or the delimiter is
|
||||
/// not found in the range.
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8]>;
|
||||
|
||||
/// Get a reference to a `u8` slice at the given offset, and update the offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
fn read_bytes(self, offset: &mut u64, size: u64) -> Result<&'a [u8]> {
|
||||
let bytes = self.read_bytes_at(*offset, size)?;
|
||||
*offset = offset.wrapping_add(size);
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Get a reference to a `Pod` type at the given offset, and update the offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// The default implementation uses `read_bytes`, and returns an error if
|
||||
/// `read_bytes` does not return bytes with the correct alignment for `T`.
|
||||
/// Implementors may want to provide their own implementation that ensures
|
||||
/// the alignment can be satisfied. Alternatively, only use this method with
|
||||
/// types that do not need alignment (see the `unaligned` feature of this crate).
|
||||
fn read<T: Pod>(self, offset: &mut u64) -> Result<&'a T> {
|
||||
let size = mem::size_of::<T>().try_into().map_err(|_| ())?;
|
||||
let bytes = self.read_bytes(offset, size)?;
|
||||
let (t, _) = from_bytes(bytes)?;
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
/// Get a reference to a `Pod` type at the given offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// Also see the `read` method for information regarding alignment of `T`.
|
||||
fn read_at<T: Pod>(self, mut offset: u64) -> Result<&'a T> {
|
||||
self.read(&mut offset)
|
||||
}
|
||||
|
||||
/// Get a reference to a slice of a `Pod` type at the given offset, and update the offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// Also see the `read` method for information regarding alignment of `T`.
|
||||
fn read_slice<T: Pod>(self, offset: &mut u64, count: usize) -> Result<&'a [T]> {
|
||||
let size = count
|
||||
.checked_mul(mem::size_of::<T>())
|
||||
.ok_or(())?
|
||||
.try_into()
|
||||
.map_err(|_| ())?;
|
||||
let bytes = self.read_bytes(offset, size)?;
|
||||
let (t, _) = slice_from_bytes(bytes, count)?;
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
/// Get a reference to a slice of a `Pod` type at the given offset.
|
||||
///
|
||||
/// Returns an error if offset or size are out of bounds.
|
||||
///
|
||||
/// Also see the `read` method for information regarding alignment of `T`.
|
||||
fn read_slice_at<T: Pod>(self, mut offset: u64, count: usize) -> Result<&'a [T]> {
|
||||
self.read_slice(&mut offset, count)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ReadRef<'a> for &'a [u8] {
|
||||
fn len(self) -> Result<u64> {
|
||||
self.len().try_into().map_err(|_| ())
|
||||
}
|
||||
|
||||
fn read_bytes_at(self, offset: u64, size: u64) -> Result<&'a [u8]> {
|
||||
let offset: usize = offset.try_into().map_err(|_| ())?;
|
||||
let size: usize = size.try_into().map_err(|_| ())?;
|
||||
self.get(offset..).ok_or(())?.get(..size).ok_or(())
|
||||
}
|
||||
|
||||
fn read_bytes_at_until(self, range: Range<u64>, delimiter: u8) -> Result<&'a [u8]> {
|
||||
let start: usize = range.start.try_into().map_err(|_| ())?;
|
||||
let end: usize = range.end.try_into().map_err(|_| ())?;
|
||||
let bytes = self.get(start..end).ok_or(())?;
|
||||
match memchr::memchr(delimiter, bytes) {
|
||||
Some(len) => {
|
||||
// This will never fail.
|
||||
bytes.get(..len).ok_or(())
|
||||
}
|
||||
None => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
551
vendor/object/src/read/traits.rs
vendored
Normal file
551
vendor/object/src/read/traits.rs
vendored
Normal file
@@ -0,0 +1,551 @@
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, CodeView, ComdatKind, CompressedData, CompressedFileRange, Export,
|
||||
FileFlags, Import, ObjectKind, ObjectMap, Relocation, Result, SectionFlags, SectionIndex,
|
||||
SectionKind, SegmentFlags, SubArchitecture, SymbolFlags, SymbolIndex, SymbolKind, SymbolMap,
|
||||
SymbolMapName, SymbolScope, SymbolSection,
|
||||
};
|
||||
use crate::Endianness;
|
||||
|
||||
/// An object file.
|
||||
///
|
||||
/// This is the primary trait for the unified read API.
|
||||
pub trait Object<'data: 'file, 'file>: read::private::Sealed {
|
||||
/// A loadable segment in the object file.
|
||||
type Segment: ObjectSegment<'data>;
|
||||
|
||||
/// An iterator for the loadable segments in the object file.
|
||||
type SegmentIterator: Iterator<Item = Self::Segment>;
|
||||
|
||||
/// A section in the object file.
|
||||
type Section: ObjectSection<'data>;
|
||||
|
||||
/// An iterator for the sections in the object file.
|
||||
type SectionIterator: Iterator<Item = Self::Section>;
|
||||
|
||||
/// A COMDAT section group in the object file.
|
||||
type Comdat: ObjectComdat<'data>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in the object file.
|
||||
type ComdatIterator: Iterator<Item = Self::Comdat>;
|
||||
|
||||
/// A symbol in the object file.
|
||||
type Symbol: ObjectSymbol<'data>;
|
||||
|
||||
/// An iterator for symbols in the object file.
|
||||
type SymbolIterator: Iterator<Item = Self::Symbol>;
|
||||
|
||||
/// A symbol table in the object file.
|
||||
type SymbolTable: ObjectSymbolTable<
|
||||
'data,
|
||||
Symbol = Self::Symbol,
|
||||
SymbolIterator = Self::SymbolIterator,
|
||||
>;
|
||||
|
||||
/// An iterator for the dynamic relocations in the file.
|
||||
///
|
||||
/// The first field in the item tuple is the address
|
||||
/// that the relocation applies to.
|
||||
type DynamicRelocationIterator: Iterator<Item = (u64, Relocation)>;
|
||||
|
||||
/// Get the architecture type of the file.
|
||||
fn architecture(&self) -> Architecture;
|
||||
|
||||
/// Get the sub-architecture type of the file if known.
|
||||
///
|
||||
/// A value of `None` has a range of meanings: the file supports all
|
||||
/// sub-architectures, the file does not explicitly specify a
|
||||
/// sub-architecture, or the sub-architecture is currently unrecognized.
|
||||
fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Get the endianness of the file.
|
||||
#[inline]
|
||||
fn endianness(&self) -> Endianness {
|
||||
if self.is_little_endian() {
|
||||
Endianness::Little
|
||||
} else {
|
||||
Endianness::Big
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the file is little endian, false if it is big endian.
|
||||
fn is_little_endian(&self) -> bool;
|
||||
|
||||
/// Return true if the file can contain 64-bit addresses.
|
||||
fn is_64(&self) -> bool;
|
||||
|
||||
/// Return the kind of this object.
|
||||
fn kind(&self) -> ObjectKind;
|
||||
|
||||
/// Get an iterator for the loadable segments in the file.
|
||||
///
|
||||
/// For ELF, this is program headers with type [`PT_LOAD`](crate::elf::PT_LOAD).
|
||||
/// For Mach-O, this is load commands with type [`LC_SEGMENT`](crate::macho::LC_SEGMENT)
|
||||
/// or [`LC_SEGMENT_64`](crate::macho::LC_SEGMENT_64).
|
||||
/// For PE, this is all sections.
|
||||
fn segments(&'file self) -> Self::SegmentIterator;
|
||||
|
||||
/// Get the section named `section_name`, if such a section exists.
|
||||
///
|
||||
/// If `section_name` starts with a '.' then it is treated as a system section name,
|
||||
/// and is compared using the conventions specific to the object file format. This
|
||||
/// includes:
|
||||
/// - if ".debug_str_offsets" is requested for a Mach-O object file, then the actual
|
||||
/// section name that is searched for is "__debug_str_offs".
|
||||
/// - if ".debug_info" is requested for an ELF object file, then
|
||||
/// ".zdebug_info" may be returned (and similarly for other debug sections).
|
||||
///
|
||||
/// For some object files, multiple segments may contain sections with the same
|
||||
/// name. In this case, the first matching section will be used.
|
||||
///
|
||||
/// This method skips over sections with invalid names.
|
||||
fn section_by_name(&'file self, section_name: &str) -> Option<Self::Section> {
|
||||
self.section_by_name_bytes(section_name.as_bytes())
|
||||
}
|
||||
|
||||
/// Like [`Self::section_by_name`], but allows names that are not UTF-8.
|
||||
fn section_by_name_bytes(&'file self, section_name: &[u8]) -> Option<Self::Section>;
|
||||
|
||||
/// Get the section at the given index.
|
||||
///
|
||||
/// The meaning of the index depends on the object file.
|
||||
///
|
||||
/// For some object files, this requires iterating through all sections.
|
||||
///
|
||||
/// Returns an error if the index is invalid.
|
||||
fn section_by_index(&'file self, index: SectionIndex) -> Result<Self::Section>;
|
||||
|
||||
/// Get an iterator for the sections in the file.
|
||||
fn sections(&'file self) -> Self::SectionIterator;
|
||||
|
||||
/// Get an iterator for the COMDAT section groups in the file.
|
||||
fn comdats(&'file self) -> Self::ComdatIterator;
|
||||
|
||||
/// Get the debugging symbol table, if any.
|
||||
fn symbol_table(&'file self) -> Option<Self::SymbolTable>;
|
||||
|
||||
/// Get the debugging symbol at the given index.
|
||||
///
|
||||
/// The meaning of the index depends on the object file.
|
||||
///
|
||||
/// Returns an error if the index is invalid.
|
||||
fn symbol_by_index(&'file self, index: SymbolIndex) -> Result<Self::Symbol>;
|
||||
|
||||
/// Get an iterator for the debugging symbols in the file.
|
||||
///
|
||||
/// This may skip over symbols that are malformed or unsupported.
|
||||
///
|
||||
/// For Mach-O files, this does not include STAB entries.
|
||||
fn symbols(&'file self) -> Self::SymbolIterator;
|
||||
|
||||
/// Get the symbol named `symbol_name`, if the symbol exists.
|
||||
fn symbol_by_name(&'file self, symbol_name: &str) -> Option<Self::Symbol> {
|
||||
self.symbol_by_name_bytes(symbol_name.as_bytes())
|
||||
}
|
||||
|
||||
/// Like [`Self::symbol_by_name`], but allows names that are not UTF-8.
|
||||
fn symbol_by_name_bytes(&'file self, symbol_name: &[u8]) -> Option<Self::Symbol> {
|
||||
self.symbols()
|
||||
.find(|sym| sym.name_bytes() == Ok(symbol_name))
|
||||
}
|
||||
|
||||
/// Get the dynamic linking symbol table, if any.
|
||||
///
|
||||
/// Only ELF has a separate dynamic linking symbol table.
|
||||
/// Consider using [`Self::exports`] or [`Self::imports`] instead.
|
||||
fn dynamic_symbol_table(&'file self) -> Option<Self::SymbolTable>;
|
||||
|
||||
/// Get an iterator for the dynamic linking symbols in the file.
|
||||
///
|
||||
/// This may skip over symbols that are malformed or unsupported.
|
||||
///
|
||||
/// Only ELF has dynamic linking symbols.
|
||||
/// Other file formats will return an empty iterator.
|
||||
/// Consider using [`Self::exports`] or [`Self::imports`] instead.
|
||||
fn dynamic_symbols(&'file self) -> Self::SymbolIterator;
|
||||
|
||||
/// Get the dynamic relocations for this file.
|
||||
///
|
||||
/// Symbol indices in these relocations refer to the dynamic symbol table.
|
||||
///
|
||||
/// Only ELF has dynamic relocations.
|
||||
fn dynamic_relocations(&'file self) -> Option<Self::DynamicRelocationIterator>;
|
||||
|
||||
/// Construct a map from addresses to symbol names.
|
||||
///
|
||||
/// The map will only contain defined text and data symbols.
|
||||
/// The dynamic symbol table will only be used if there are no debugging symbols.
|
||||
fn symbol_map(&'file self) -> SymbolMap<SymbolMapName<'data>> {
|
||||
let mut symbols = Vec::new();
|
||||
if let Some(table) = self.symbol_table().or_else(|| self.dynamic_symbol_table()) {
|
||||
// Sometimes symbols share addresses. Collect them all then choose the "best".
|
||||
let mut all_symbols = Vec::new();
|
||||
for symbol in table.symbols() {
|
||||
// Must have an address.
|
||||
if !symbol.is_definition() {
|
||||
continue;
|
||||
}
|
||||
// Must have a name.
|
||||
let name = match symbol.name() {
|
||||
Ok(name) => name,
|
||||
_ => continue,
|
||||
};
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Lower is better.
|
||||
let mut priority = 0u32;
|
||||
|
||||
// Prefer known kind.
|
||||
match symbol.kind() {
|
||||
SymbolKind::Text | SymbolKind::Data => {}
|
||||
SymbolKind::Unknown => priority += 1,
|
||||
_ => continue,
|
||||
}
|
||||
priority *= 2;
|
||||
|
||||
// Prefer global visibility.
|
||||
priority += match symbol.scope() {
|
||||
SymbolScope::Unknown => 3,
|
||||
SymbolScope::Compilation => 2,
|
||||
SymbolScope::Linkage => 1,
|
||||
SymbolScope::Dynamic => 0,
|
||||
};
|
||||
priority *= 4;
|
||||
|
||||
// Prefer later entries (earlier symbol is likely to be less specific).
|
||||
let index = !0 - symbol.index().0;
|
||||
|
||||
// Tuple is ordered for sort.
|
||||
all_symbols.push((symbol.address(), priority, index, name));
|
||||
}
|
||||
// Unstable sort is okay because tuple includes index.
|
||||
all_symbols.sort_unstable();
|
||||
|
||||
let mut previous_address = !0;
|
||||
for (address, _priority, _index, name) in all_symbols {
|
||||
if address != previous_address {
|
||||
symbols.push(SymbolMapName::new(address, name));
|
||||
previous_address = address;
|
||||
}
|
||||
}
|
||||
}
|
||||
SymbolMap::new(symbols)
|
||||
}
|
||||
|
||||
/// Construct a map from addresses to symbol names and object file names.
|
||||
///
|
||||
/// This is derived from Mach-O STAB entries.
|
||||
fn object_map(&'file self) -> ObjectMap<'data> {
|
||||
ObjectMap::default()
|
||||
}
|
||||
|
||||
/// Get the imported symbols.
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>>;
|
||||
|
||||
/// Get the exported symbols that expose both a name and an address.
|
||||
///
|
||||
/// Some file formats may provide other kinds of symbols that can be retrieved using
|
||||
/// the low level API.
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>>;
|
||||
|
||||
/// Return true if the file contains DWARF debug information sections, false if not.
|
||||
fn has_debug_symbols(&self) -> bool;
|
||||
|
||||
/// The UUID from a Mach-O [`LC_UUID`](crate::macho::LC_UUID) load command.
|
||||
#[inline]
|
||||
fn mach_uuid(&self) -> Result<Option<[u8; 16]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The build ID from an ELF [`NT_GNU_BUILD_ID`](crate::elf::NT_GNU_BUILD_ID) note.
|
||||
#[inline]
|
||||
fn build_id(&self) -> Result<Option<&'data [u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The filename and CRC from a `.gnu_debuglink` section.
|
||||
#[inline]
|
||||
fn gnu_debuglink(&self) -> Result<Option<(&'data [u8], u32)>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The filename and build ID from a `.gnu_debugaltlink` section.
|
||||
#[inline]
|
||||
fn gnu_debugaltlink(&self) -> Result<Option<(&'data [u8], &'data [u8])>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// The filename and GUID from the PE CodeView section.
|
||||
#[inline]
|
||||
fn pdb_info(&self) -> Result<Option<CodeView<'_>>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Get the base address used for relative virtual addresses.
|
||||
///
|
||||
/// Currently this is only non-zero for PE.
|
||||
fn relative_address_base(&'file self) -> u64;
|
||||
|
||||
/// Get the virtual address of the entry point of the binary.
|
||||
fn entry(&'file self) -> u64;
|
||||
|
||||
/// File flags that are specific to each file format.
|
||||
fn flags(&self) -> FileFlags;
|
||||
}
|
||||
|
||||
/// A loadable segment in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSegment<'data>: read::private::Sealed {
|
||||
/// Returns the virtual address of the segment.
|
||||
fn address(&self) -> u64;
|
||||
|
||||
/// Returns the size of the segment in memory.
|
||||
fn size(&self) -> u64;
|
||||
|
||||
/// Returns the alignment of the segment in memory.
|
||||
fn align(&self) -> u64;
|
||||
|
||||
/// Returns the offset and size of the segment in the file.
|
||||
fn file_range(&self) -> (u64, u64);
|
||||
|
||||
/// Returns a reference to the file contents of the segment.
|
||||
///
|
||||
/// The length of this data may be different from the size of the
|
||||
/// segment in memory.
|
||||
fn data(&self) -> Result<&'data [u8]>;
|
||||
|
||||
/// Return the segment data in the given range.
|
||||
///
|
||||
/// Returns `Ok(None)` if the segment does not contain the given range.
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>>;
|
||||
|
||||
/// Returns the name of the segment.
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>>;
|
||||
|
||||
/// Returns the name of the segment.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<Option<&str>>;
|
||||
|
||||
/// Return the flags of segment.
|
||||
fn flags(&self) -> SegmentFlags;
|
||||
}
|
||||
|
||||
/// A section in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSection<'data>: read::private::Sealed {
|
||||
/// An iterator for the relocations for a section.
|
||||
///
|
||||
/// The first field in the item tuple is the section offset
|
||||
/// that the relocation applies to.
|
||||
type RelocationIterator: Iterator<Item = (u64, Relocation)>;
|
||||
|
||||
/// Returns the section index.
|
||||
fn index(&self) -> SectionIndex;
|
||||
|
||||
/// Returns the address of the section.
|
||||
fn address(&self) -> u64;
|
||||
|
||||
/// Returns the size of the section in memory.
|
||||
fn size(&self) -> u64;
|
||||
|
||||
/// Returns the alignment of the section in memory.
|
||||
fn align(&self) -> u64;
|
||||
|
||||
/// Returns offset and size of on-disk segment (if any).
|
||||
fn file_range(&self) -> Option<(u64, u64)>;
|
||||
|
||||
/// Returns the raw contents of the section.
|
||||
///
|
||||
/// The length of this data may be different from the size of the
|
||||
/// section in memory.
|
||||
///
|
||||
/// This does not do any decompression.
|
||||
fn data(&self) -> Result<&'data [u8]>;
|
||||
|
||||
/// Return the raw contents of the section data in the given range.
|
||||
///
|
||||
/// This does not do any decompression.
|
||||
///
|
||||
/// Returns `Ok(None)` if the section does not contain the given range.
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>>;
|
||||
|
||||
/// Returns the potentially compressed file range of the section,
|
||||
/// along with information about the compression.
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange>;
|
||||
|
||||
/// Returns the potentially compressed contents of the section,
|
||||
/// along with information about the compression.
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>>;
|
||||
|
||||
/// Returns the uncompressed contents of the section.
|
||||
///
|
||||
/// The length of this data may be different from the size of the
|
||||
/// section in memory.
|
||||
///
|
||||
/// If no compression is detected, then returns the data unchanged.
|
||||
/// Returns `Err` if decompression fails.
|
||||
fn uncompressed_data(&self) -> Result<Cow<'data, [u8]>> {
|
||||
self.compressed_data()?.decompress()
|
||||
}
|
||||
|
||||
/// Returns the name of the section.
|
||||
fn name_bytes(&self) -> Result<&[u8]>;
|
||||
|
||||
/// Returns the name of the section.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<&str>;
|
||||
|
||||
/// Returns the name of the segment for this section.
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>>;
|
||||
|
||||
/// Returns the name of the segment for this section.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn segment_name(&self) -> Result<Option<&str>>;
|
||||
|
||||
/// Return the kind of this section.
|
||||
fn kind(&self) -> SectionKind;
|
||||
|
||||
/// Get the relocations for this section.
|
||||
fn relocations(&self) -> Self::RelocationIterator;
|
||||
|
||||
/// Section flags that are specific to each file format.
|
||||
fn flags(&self) -> SectionFlags;
|
||||
}
|
||||
|
||||
/// A COMDAT section group in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectComdat<'data>: read::private::Sealed {
|
||||
/// An iterator for the sections in the section group.
|
||||
type SectionIterator: Iterator<Item = SectionIndex>;
|
||||
|
||||
/// Returns the COMDAT selection kind.
|
||||
fn kind(&self) -> ComdatKind;
|
||||
|
||||
/// Returns the index of the symbol used for the name of COMDAT section group.
|
||||
fn symbol(&self) -> SymbolIndex;
|
||||
|
||||
/// Returns the name of the COMDAT section group.
|
||||
fn name_bytes(&self) -> Result<&[u8]>;
|
||||
|
||||
/// Returns the name of the COMDAT section group.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<&str>;
|
||||
|
||||
/// Get the sections in this section group.
|
||||
fn sections(&self) -> Self::SectionIterator;
|
||||
}
|
||||
|
||||
/// A symbol table in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSymbolTable<'data>: read::private::Sealed {
|
||||
/// A symbol table entry.
|
||||
type Symbol: ObjectSymbol<'data>;
|
||||
|
||||
/// An iterator for the symbols in a symbol table.
|
||||
type SymbolIterator: Iterator<Item = Self::Symbol>;
|
||||
|
||||
/// Get an iterator for the symbols in the table.
|
||||
///
|
||||
/// This may skip over symbols that are malformed or unsupported.
|
||||
fn symbols(&self) -> Self::SymbolIterator;
|
||||
|
||||
/// Get the symbol at the given index.
|
||||
///
|
||||
/// The meaning of the index depends on the object file.
|
||||
///
|
||||
/// Returns an error if the index is invalid.
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol>;
|
||||
}
|
||||
|
||||
/// A symbol table entry in an [`Object`].
|
||||
///
|
||||
/// This trait is part of the unified read API.
|
||||
pub trait ObjectSymbol<'data>: read::private::Sealed {
|
||||
/// The index of the symbol.
|
||||
fn index(&self) -> SymbolIndex;
|
||||
|
||||
/// The name of the symbol.
|
||||
fn name_bytes(&self) -> Result<&'data [u8]>;
|
||||
|
||||
/// The name of the symbol.
|
||||
///
|
||||
/// Returns an error if the name is not UTF-8.
|
||||
fn name(&self) -> Result<&'data str>;
|
||||
|
||||
/// The address of the symbol. May be zero if the address is unknown.
|
||||
fn address(&self) -> u64;
|
||||
|
||||
/// The size of the symbol. May be zero if the size is unknown.
|
||||
fn size(&self) -> u64;
|
||||
|
||||
/// Return the kind of this symbol.
|
||||
fn kind(&self) -> SymbolKind;
|
||||
|
||||
/// Returns the section where the symbol is defined.
|
||||
fn section(&self) -> SymbolSection;
|
||||
|
||||
/// Returns the section index for the section containing this symbol.
|
||||
///
|
||||
/// May return `None` if the symbol is not defined in a section.
|
||||
fn section_index(&self) -> Option<SectionIndex> {
|
||||
self.section().index()
|
||||
}
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
fn is_undefined(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object
|
||||
/// that has a known address.
|
||||
///
|
||||
/// This is primarily used to implement [`Object::symbol_map`].
|
||||
fn is_definition(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is common data.
|
||||
///
|
||||
/// Note: does not check for [`SymbolSection::Section`] with [`SectionKind::Common`].
|
||||
fn is_common(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is weak.
|
||||
fn is_weak(&self) -> bool;
|
||||
|
||||
/// Returns the symbol scope.
|
||||
fn scope(&self) -> SymbolScope;
|
||||
|
||||
/// Return true if the symbol visible outside of the compilation unit.
|
||||
///
|
||||
/// This treats [`SymbolScope::Unknown`] as global.
|
||||
fn is_global(&self) -> bool;
|
||||
|
||||
/// Return true if the symbol is only visible within the compilation unit.
|
||||
fn is_local(&self) -> bool;
|
||||
|
||||
/// Symbol flags that are specific to each file format.
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex>;
|
||||
}
|
||||
|
||||
/// An iterator for files that don't have dynamic relocations.
|
||||
#[derive(Debug)]
|
||||
pub struct NoDynamicRelocationIterator;
|
||||
|
||||
impl Iterator for NoDynamicRelocationIterator {
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
425
vendor/object/src/read/util.rs
vendored
Normal file
425
vendor/object/src/read/util.rs
vendored
Normal file
@@ -0,0 +1,425 @@
|
||||
use alloc::string::String;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use crate::pod::{from_bytes, slice_from_bytes, Pod};
|
||||
use crate::ReadRef;
|
||||
|
||||
/// A newtype for byte slices.
|
||||
///
|
||||
/// It has these important features:
|
||||
/// - no methods that can panic, such as `Index`
|
||||
/// - convenience methods for `Pod` types
|
||||
/// - a useful `Debug` implementation
|
||||
#[derive(Default, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Bytes<'data>(pub &'data [u8]);
|
||||
|
||||
impl<'data> fmt::Debug for Bytes<'data> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
debug_list_bytes(self.0, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data> Bytes<'data> {
|
||||
/// Return the length of the byte slice.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Return true if the byte slice is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Skip over the given number of bytes at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes.
|
||||
#[inline]
|
||||
pub fn skip(&mut self, offset: usize) -> Result<(), ()> {
|
||||
match self.0.get(offset..) {
|
||||
Some(tail) => {
|
||||
self.0 = tail;
|
||||
Ok(())
|
||||
}
|
||||
None => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to the given number of bytes at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes.
|
||||
#[inline]
|
||||
pub fn read_bytes(&mut self, count: usize) -> Result<Bytes<'data>, ()> {
|
||||
match (self.0.get(..count), self.0.get(count..)) {
|
||||
(Some(head), Some(tail)) => {
|
||||
self.0 = tail;
|
||||
Ok(Bytes(head))
|
||||
}
|
||||
_ => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to the given number of bytes at the given offset of the byte slice.
|
||||
///
|
||||
/// Returns an error if the offset is invalid or there are too few bytes.
|
||||
#[inline]
|
||||
pub fn read_bytes_at(mut self, offset: usize, count: usize) -> Result<Bytes<'data>, ()> {
|
||||
self.skip(offset)?;
|
||||
self.read_bytes(count)
|
||||
}
|
||||
|
||||
/// Return a reference to a `Pod` struct at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the slice is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read<T: Pod>(&mut self) -> Result<&'data T, ()> {
|
||||
match from_bytes(self.0) {
|
||||
Ok((value, tail)) => {
|
||||
self.0 = tail;
|
||||
Ok(value)
|
||||
}
|
||||
Err(()) => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to a `Pod` struct at the given offset of the byte slice.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the offset is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read_at<T: Pod>(mut self, offset: usize) -> Result<&'data T, ()> {
|
||||
self.skip(offset)?;
|
||||
self.read()
|
||||
}
|
||||
|
||||
/// Return a reference to a slice of `Pod` structs at the start of the byte slice.
|
||||
///
|
||||
/// Modifies the byte slice to start after the bytes.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the offset is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read_slice<T: Pod>(&mut self, count: usize) -> Result<&'data [T], ()> {
|
||||
match slice_from_bytes(self.0, count) {
|
||||
Ok((value, tail)) => {
|
||||
self.0 = tail;
|
||||
Ok(value)
|
||||
}
|
||||
Err(()) => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a reference to a slice of `Pod` structs at the given offset of the byte slice.
|
||||
///
|
||||
/// Returns an error if there are too few bytes or the offset is incorrectly aligned.
|
||||
#[inline]
|
||||
pub fn read_slice_at<T: Pod>(mut self, offset: usize, count: usize) -> Result<&'data [T], ()> {
|
||||
self.skip(offset)?;
|
||||
self.read_slice(count)
|
||||
}
|
||||
|
||||
/// Read a null terminated string.
|
||||
///
|
||||
/// Does not assume any encoding.
|
||||
/// Reads past the null byte, but doesn't return it.
|
||||
#[inline]
|
||||
pub fn read_string(&mut self) -> Result<&'data [u8], ()> {
|
||||
match memchr::memchr(b'\0', self.0) {
|
||||
Some(null) => {
|
||||
// These will never fail.
|
||||
let bytes = self.read_bytes(null)?;
|
||||
self.skip(1)?;
|
||||
Ok(bytes.0)
|
||||
}
|
||||
None => {
|
||||
self.0 = &[];
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read a null terminated string at an offset.
|
||||
///
|
||||
/// Does not assume any encoding. Does not return the null byte.
|
||||
#[inline]
|
||||
pub fn read_string_at(mut self, offset: usize) -> Result<&'data [u8], ()> {
|
||||
self.skip(offset)?;
|
||||
self.read_string()
|
||||
}
|
||||
|
||||
/// Read an unsigned LEB128 number.
|
||||
pub fn read_uleb128(&mut self) -> Result<u64, ()> {
|
||||
let mut result = 0;
|
||||
let mut shift = 0;
|
||||
|
||||
loop {
|
||||
let byte = *self.read::<u8>()?;
|
||||
if shift == 63 && byte != 0x00 && byte != 0x01 {
|
||||
return Err(());
|
||||
}
|
||||
result |= u64::from(byte & 0x7f) << shift;
|
||||
shift += 7;
|
||||
|
||||
if byte & 0x80 == 0 {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read a signed LEB128 number.
|
||||
pub fn read_sleb128(&mut self) -> Result<i64, ()> {
|
||||
let mut result = 0;
|
||||
let mut shift = 0;
|
||||
|
||||
loop {
|
||||
let byte = *self.read::<u8>()?;
|
||||
if shift == 63 && byte != 0x00 && byte != 0x7f {
|
||||
return Err(());
|
||||
}
|
||||
result |= i64::from(byte & 0x7f) << shift;
|
||||
shift += 7;
|
||||
|
||||
if byte & 0x80 == 0 {
|
||||
if shift < 64 && (byte & 0x40) != 0 {
|
||||
// Sign extend the result.
|
||||
result |= !0 << shift;
|
||||
}
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only for Debug impl of `Bytes`.
|
||||
fn debug_list_bytes(bytes: &[u8], fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut list = fmt.debug_list();
|
||||
list.entries(bytes.iter().take(8).copied().map(DebugByte));
|
||||
if bytes.len() > 8 {
|
||||
list.entry(&DebugLen(bytes.len()));
|
||||
}
|
||||
list.finish()
|
||||
}
|
||||
|
||||
struct DebugByte(u8);
|
||||
|
||||
impl fmt::Debug for DebugByte {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(fmt, "0x{:02x}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
struct DebugLen(usize);
|
||||
|
||||
impl fmt::Debug for DebugLen {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(fmt, "...; {}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// A newtype for byte strings.
|
||||
///
|
||||
/// For byte slices that are strings of an unknown encoding.
|
||||
///
|
||||
/// Provides a `Debug` implementation that interprets the bytes as UTF-8.
|
||||
#[derive(Default, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) struct ByteString<'data>(pub &'data [u8]);
|
||||
|
||||
impl<'data> fmt::Debug for ByteString<'data> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(fmt, "\"{}\"", String::from_utf8_lossy(self.0))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[inline]
|
||||
pub(crate) fn align(offset: usize, size: usize) -> usize {
|
||||
(offset + (size - 1)) & !(size - 1)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn data_range(
|
||||
data: &[u8],
|
||||
data_address: u64,
|
||||
range_address: u64,
|
||||
size: u64,
|
||||
) -> Option<&[u8]> {
|
||||
let offset = range_address.checked_sub(data_address)?;
|
||||
data.get(offset.try_into().ok()?..)?
|
||||
.get(..size.try_into().ok()?)
|
||||
}
|
||||
|
||||
/// A table of zero-terminated strings.
|
||||
///
|
||||
/// This is used by most file formats for strings such as section names and symbol names.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct StringTable<'data, R = &'data [u8]>
|
||||
where
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
data: Option<R>,
|
||||
start: u64,
|
||||
end: u64,
|
||||
marker: PhantomData<&'data ()>,
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> StringTable<'data, R> {
|
||||
/// Interpret the given data as a string table.
|
||||
pub fn new(data: R, start: u64, end: u64) -> Self {
|
||||
StringTable {
|
||||
data: Some(data),
|
||||
start,
|
||||
end,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the string at the given offset.
|
||||
pub fn get(&self, offset: u32) -> Result<&'data [u8], ()> {
|
||||
match self.data {
|
||||
Some(data) => {
|
||||
let r_start = self.start.checked_add(offset.into()).ok_or(())?;
|
||||
data.read_bytes_at_until(r_start..self.end, 0)
|
||||
}
|
||||
None => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> Default for StringTable<'data, R> {
|
||||
fn default() -> Self {
|
||||
StringTable {
|
||||
data: None,
|
||||
start: 0,
|
||||
end: 0,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::pod::bytes_of;
|
||||
|
||||
#[test]
|
||||
fn bytes() {
|
||||
let x = u32::to_be(0x0123_4567);
|
||||
let data = Bytes(bytes_of(&x));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.skip(0), Ok(()));
|
||||
assert_eq!(bytes, data);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.skip(4), Ok(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.skip(5), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_bytes(0), Ok(Bytes(&[])));
|
||||
assert_eq!(bytes, data);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_bytes(4), Ok(data));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_bytes(5), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
assert_eq!(data.read_bytes_at(0, 0), Ok(Bytes(&[])));
|
||||
assert_eq!(data.read_bytes_at(4, 0), Ok(Bytes(&[])));
|
||||
assert_eq!(data.read_bytes_at(0, 4), Ok(data));
|
||||
assert_eq!(data.read_bytes_at(1, 4), Err(()));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read::<u16>(), Ok(&u16::to_be(0x0123)));
|
||||
assert_eq!(bytes, Bytes(&[0x45, 0x67]));
|
||||
assert_eq!(data.read_at::<u16>(2), Ok(&u16::to_be(0x4567)));
|
||||
assert_eq!(data.read_at::<u16>(3), Err(()));
|
||||
assert_eq!(data.read_at::<u16>(4), Err(()));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read::<u32>(), Ok(&x));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read::<u64>(), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_slice::<u8>(0), Ok(&[][..]));
|
||||
assert_eq!(bytes, data);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_slice::<u8>(4), Ok(data.0));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_slice::<u8>(5), Err(()));
|
||||
assert_eq!(bytes, Bytes(&[]));
|
||||
|
||||
assert_eq!(data.read_slice_at::<u8>(0, 0), Ok(&[][..]));
|
||||
assert_eq!(data.read_slice_at::<u8>(4, 0), Ok(&[][..]));
|
||||
assert_eq!(data.read_slice_at::<u8>(0, 4), Ok(data.0));
|
||||
assert_eq!(data.read_slice_at::<u8>(1, 4), Err(()));
|
||||
|
||||
let data = Bytes(&[0x01, 0x02, 0x00, 0x04]);
|
||||
|
||||
let mut bytes = data;
|
||||
assert_eq!(bytes.read_string(), Ok(&data.0[..2]));
|
||||
assert_eq!(bytes.0, &data.0[3..]);
|
||||
|
||||
let mut bytes = data;
|
||||
bytes.skip(3).unwrap();
|
||||
assert_eq!(bytes.read_string(), Err(()));
|
||||
assert_eq!(bytes.0, &[]);
|
||||
|
||||
assert_eq!(data.read_string_at(0), Ok(&data.0[..2]));
|
||||
assert_eq!(data.read_string_at(1), Ok(&data.0[1..2]));
|
||||
assert_eq!(data.read_string_at(2), Ok(&[][..]));
|
||||
assert_eq!(data.read_string_at(3), Err(()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bytes_debug() {
|
||||
assert_eq!(format!("{:?}", Bytes(&[])), "[]");
|
||||
assert_eq!(format!("{:?}", Bytes(&[0x01])), "[0x01]");
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{:?}",
|
||||
Bytes(&[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08])
|
||||
),
|
||||
"[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]"
|
||||
);
|
||||
assert_eq!(
|
||||
format!(
|
||||
"{:?}",
|
||||
Bytes(&[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09])
|
||||
),
|
||||
"[0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, ...; 9]"
|
||||
);
|
||||
}
|
||||
}
|
||||
966
vendor/object/src/read/wasm.rs
vendored
Normal file
966
vendor/object/src/read/wasm.rs
vendored
Normal file
@@ -0,0 +1,966 @@
|
||||
//! Support for reading Wasm files.
|
||||
//!
|
||||
//! [`WasmFile`] implements the [`Object`] trait for Wasm files.
|
||||
use alloc::boxed::Box;
|
||||
use alloc::vec::Vec;
|
||||
use core::marker::PhantomData;
|
||||
use core::ops::Range;
|
||||
use core::{slice, str};
|
||||
use wasmparser as wp;
|
||||
|
||||
use crate::read::{
|
||||
self, Architecture, ComdatKind, CompressedData, CompressedFileRange, Error, Export, FileFlags,
|
||||
Import, NoDynamicRelocationIterator, Object, ObjectComdat, ObjectKind, ObjectSection,
|
||||
ObjectSegment, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Relocation, Result,
|
||||
SectionFlags, SectionIndex, SectionKind, SegmentFlags, SymbolFlags, SymbolIndex, SymbolKind,
|
||||
SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[repr(usize)]
|
||||
enum SectionId {
|
||||
Custom = 0,
|
||||
Type = 1,
|
||||
Import = 2,
|
||||
Function = 3,
|
||||
Table = 4,
|
||||
Memory = 5,
|
||||
Global = 6,
|
||||
Export = 7,
|
||||
Start = 8,
|
||||
Element = 9,
|
||||
Code = 10,
|
||||
Data = 11,
|
||||
DataCount = 12,
|
||||
}
|
||||
// Update this constant when adding new section id:
|
||||
const MAX_SECTION_ID: usize = SectionId::DataCount as usize;
|
||||
|
||||
/// A WebAssembly object file.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmFile<'data, R = &'data [u8]> {
|
||||
data: &'data [u8],
|
||||
has_memory64: bool,
|
||||
// All sections, including custom sections.
|
||||
sections: Vec<SectionHeader<'data>>,
|
||||
// Indices into `sections` of sections with a non-zero id.
|
||||
id_sections: Box<[Option<usize>; MAX_SECTION_ID + 1]>,
|
||||
// Whether the file has DWARF information.
|
||||
has_debug_symbols: bool,
|
||||
// Symbols collected from imports, exports, code and name sections.
|
||||
symbols: Vec<WasmSymbolInternal<'data>>,
|
||||
// Address of the function body for the entry point.
|
||||
entry: u64,
|
||||
marker: PhantomData<R>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SectionHeader<'data> {
|
||||
id: SectionId,
|
||||
range: Range<usize>,
|
||||
name: &'data str,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum LocalFunctionKind {
|
||||
Unknown,
|
||||
Exported { symbol_ids: Vec<u32> },
|
||||
Local { symbol_id: u32 },
|
||||
}
|
||||
|
||||
impl<T> ReadError<T> for wasmparser::Result<T> {
|
||||
fn read_error(self, error: &'static str) -> Result<T> {
|
||||
self.map_err(|_| Error(error))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R: ReadRef<'data>> WasmFile<'data, R> {
|
||||
/// Parse the raw wasm data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let len = data.len().read_error("Unknown Wasm file size")?;
|
||||
let data = data.read_bytes_at(0, len).read_error("Wasm read failed")?;
|
||||
let parser = wp::Parser::new(0).parse_all(data);
|
||||
|
||||
let mut file = WasmFile {
|
||||
data,
|
||||
has_memory64: false,
|
||||
sections: Vec::new(),
|
||||
id_sections: Default::default(),
|
||||
has_debug_symbols: false,
|
||||
symbols: Vec::new(),
|
||||
entry: 0,
|
||||
marker: PhantomData,
|
||||
};
|
||||
|
||||
let mut main_file_symbol = Some(WasmSymbolInternal {
|
||||
name: "",
|
||||
address: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::File,
|
||||
section: SymbolSection::None,
|
||||
scope: SymbolScope::Compilation,
|
||||
});
|
||||
|
||||
let mut imported_funcs_count = 0;
|
||||
let mut local_func_kinds = Vec::new();
|
||||
let mut entry_func_id = None;
|
||||
let mut code_range_start = 0;
|
||||
let mut code_func_index = 0;
|
||||
// One-to-one mapping of globals to their value (if the global is a constant integer).
|
||||
let mut global_values = Vec::new();
|
||||
|
||||
for payload in parser {
|
||||
let payload = payload.read_error("Invalid Wasm section header")?;
|
||||
|
||||
match payload {
|
||||
wp::Payload::TypeSection(section) => {
|
||||
file.add_section(SectionId::Type, section.range(), "");
|
||||
}
|
||||
wp::Payload::ImportSection(section) => {
|
||||
file.add_section(SectionId::Import, section.range(), "");
|
||||
let mut last_module_name = None;
|
||||
|
||||
for import in section {
|
||||
let import = import.read_error("Couldn't read an import item")?;
|
||||
let module_name = import.module;
|
||||
|
||||
if last_module_name != Some(module_name) {
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: module_name,
|
||||
address: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::File,
|
||||
section: SymbolSection::None,
|
||||
scope: SymbolScope::Dynamic,
|
||||
});
|
||||
last_module_name = Some(module_name);
|
||||
}
|
||||
|
||||
let kind = match import.ty {
|
||||
wp::TypeRef::Func(_) => {
|
||||
imported_funcs_count += 1;
|
||||
SymbolKind::Text
|
||||
}
|
||||
wp::TypeRef::Memory(memory) => {
|
||||
file.has_memory64 |= memory.memory64;
|
||||
SymbolKind::Data
|
||||
}
|
||||
wp::TypeRef::Table(_) | wp::TypeRef::Global(_) => SymbolKind::Data,
|
||||
wp::TypeRef::Tag(_) => SymbolKind::Unknown,
|
||||
};
|
||||
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: import.name,
|
||||
address: 0,
|
||||
size: 0,
|
||||
kind,
|
||||
section: SymbolSection::Undefined,
|
||||
scope: SymbolScope::Dynamic,
|
||||
});
|
||||
}
|
||||
}
|
||||
wp::Payload::FunctionSection(section) => {
|
||||
file.add_section(SectionId::Function, section.range(), "");
|
||||
local_func_kinds =
|
||||
vec![LocalFunctionKind::Unknown; section.into_iter().count()];
|
||||
}
|
||||
wp::Payload::TableSection(section) => {
|
||||
file.add_section(SectionId::Table, section.range(), "");
|
||||
}
|
||||
wp::Payload::MemorySection(section) => {
|
||||
file.add_section(SectionId::Memory, section.range(), "");
|
||||
for memory in section {
|
||||
let memory = memory.read_error("Couldn't read a memory item")?;
|
||||
file.has_memory64 |= memory.memory64;
|
||||
}
|
||||
}
|
||||
wp::Payload::GlobalSection(section) => {
|
||||
file.add_section(SectionId::Global, section.range(), "");
|
||||
for global in section {
|
||||
let global = global.read_error("Couldn't read a global item")?;
|
||||
let mut address = None;
|
||||
if !global.ty.mutable {
|
||||
// There should be exactly one instruction.
|
||||
let init = global.init_expr.get_operators_reader().read();
|
||||
address = match init.read_error("Couldn't read a global init expr")? {
|
||||
wp::Operator::I32Const { value } => Some(value as u64),
|
||||
wp::Operator::I64Const { value } => Some(value as u64),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
global_values.push(address);
|
||||
}
|
||||
}
|
||||
wp::Payload::ExportSection(section) => {
|
||||
file.add_section(SectionId::Export, section.range(), "");
|
||||
if let Some(main_file_symbol) = main_file_symbol.take() {
|
||||
file.symbols.push(main_file_symbol);
|
||||
}
|
||||
|
||||
for export in section {
|
||||
let export = export.read_error("Couldn't read an export item")?;
|
||||
|
||||
let (kind, section_idx) = match export.kind {
|
||||
wp::ExternalKind::Func => {
|
||||
if let Some(local_func_id) =
|
||||
export.index.checked_sub(imported_funcs_count)
|
||||
{
|
||||
let local_func_kind =
|
||||
&mut local_func_kinds[local_func_id as usize];
|
||||
if let LocalFunctionKind::Unknown = local_func_kind {
|
||||
*local_func_kind = LocalFunctionKind::Exported {
|
||||
symbol_ids: Vec::new(),
|
||||
};
|
||||
}
|
||||
let symbol_ids = match local_func_kind {
|
||||
LocalFunctionKind::Exported { symbol_ids } => symbol_ids,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
symbol_ids.push(file.symbols.len() as u32);
|
||||
}
|
||||
(SymbolKind::Text, SectionId::Code)
|
||||
}
|
||||
wp::ExternalKind::Table
|
||||
| wp::ExternalKind::Memory
|
||||
| wp::ExternalKind::Global => (SymbolKind::Data, SectionId::Data),
|
||||
// TODO
|
||||
wp::ExternalKind::Tag => continue,
|
||||
};
|
||||
|
||||
// Try to guess the symbol address. Rust and C export a global containing
|
||||
// the address in linear memory of the symbol.
|
||||
let mut address = 0;
|
||||
if export.kind == wp::ExternalKind::Global {
|
||||
if let Some(&Some(x)) = global_values.get(export.index as usize) {
|
||||
address = x;
|
||||
}
|
||||
}
|
||||
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: export.name,
|
||||
address,
|
||||
size: 0,
|
||||
kind,
|
||||
section: SymbolSection::Section(SectionIndex(section_idx as usize)),
|
||||
scope: SymbolScope::Dynamic,
|
||||
});
|
||||
}
|
||||
}
|
||||
wp::Payload::StartSection { func, range, .. } => {
|
||||
file.add_section(SectionId::Start, range, "");
|
||||
entry_func_id = Some(func);
|
||||
}
|
||||
wp::Payload::ElementSection(section) => {
|
||||
file.add_section(SectionId::Element, section.range(), "");
|
||||
}
|
||||
wp::Payload::CodeSectionStart { range, .. } => {
|
||||
code_range_start = range.start;
|
||||
file.add_section(SectionId::Code, range, "");
|
||||
if let Some(main_file_symbol) = main_file_symbol.take() {
|
||||
file.symbols.push(main_file_symbol);
|
||||
}
|
||||
}
|
||||
wp::Payload::CodeSectionEntry(body) => {
|
||||
let i = code_func_index;
|
||||
code_func_index += 1;
|
||||
|
||||
let range = body.range();
|
||||
|
||||
let address = range.start as u64 - code_range_start as u64;
|
||||
let size = (range.end - range.start) as u64;
|
||||
|
||||
if entry_func_id == Some(i as u32) {
|
||||
file.entry = address;
|
||||
}
|
||||
|
||||
let local_func_kind = &mut local_func_kinds[i];
|
||||
match local_func_kind {
|
||||
LocalFunctionKind::Unknown => {
|
||||
*local_func_kind = LocalFunctionKind::Local {
|
||||
symbol_id: file.symbols.len() as u32,
|
||||
};
|
||||
file.symbols.push(WasmSymbolInternal {
|
||||
name: "",
|
||||
address,
|
||||
size,
|
||||
kind: SymbolKind::Text,
|
||||
section: SymbolSection::Section(SectionIndex(
|
||||
SectionId::Code as usize,
|
||||
)),
|
||||
scope: SymbolScope::Compilation,
|
||||
});
|
||||
}
|
||||
LocalFunctionKind::Exported { symbol_ids } => {
|
||||
for symbol_id in core::mem::take(symbol_ids) {
|
||||
let export_symbol = &mut file.symbols[symbol_id as usize];
|
||||
export_symbol.address = address;
|
||||
export_symbol.size = size;
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
wp::Payload::DataSection(section) => {
|
||||
file.add_section(SectionId::Data, section.range(), "");
|
||||
}
|
||||
wp::Payload::DataCountSection { range, .. } => {
|
||||
file.add_section(SectionId::DataCount, range, "");
|
||||
}
|
||||
wp::Payload::CustomSection(section) => {
|
||||
let name = section.name();
|
||||
let size = section.data().len();
|
||||
let mut range = section.range();
|
||||
range.start = range.end - size;
|
||||
file.add_section(SectionId::Custom, range, name);
|
||||
if name == "name" {
|
||||
for name in
|
||||
wp::NameSectionReader::new(section.data(), section.data_offset())
|
||||
{
|
||||
// TODO: Right now, ill-formed name subsections
|
||||
// are silently ignored in order to maintain
|
||||
// compatibility with extended name sections, which
|
||||
// are not yet supported by the version of
|
||||
// `wasmparser` currently used.
|
||||
// A better fix would be to update `wasmparser` to
|
||||
// the newest version, but this requires
|
||||
// a major rewrite of this file.
|
||||
if let Ok(wp::Name::Function(name_map)) = name {
|
||||
for naming in name_map {
|
||||
let naming =
|
||||
naming.read_error("Couldn't read a function name")?;
|
||||
if let Some(local_index) =
|
||||
naming.index.checked_sub(imported_funcs_count)
|
||||
{
|
||||
if let LocalFunctionKind::Local { symbol_id } =
|
||||
local_func_kinds[local_index as usize]
|
||||
{
|
||||
file.symbols[symbol_id as usize].name = naming.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if name.starts_with(".debug_") {
|
||||
file.has_debug_symbols = true;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
fn add_section(&mut self, id: SectionId, range: Range<usize>, name: &'data str) {
|
||||
let section = SectionHeader { id, range, name };
|
||||
self.id_sections[id as usize] = Some(self.sections.len());
|
||||
self.sections.push(section);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, R> read::private::Sealed for WasmFile<'data, R> {}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>> Object<'data, 'file> for WasmFile<'data, R>
|
||||
where
|
||||
'data: 'file,
|
||||
R: 'file,
|
||||
{
|
||||
type Segment = WasmSegment<'data, 'file, R>;
|
||||
type SegmentIterator = WasmSegmentIterator<'data, 'file, R>;
|
||||
type Section = WasmSection<'data, 'file, R>;
|
||||
type SectionIterator = WasmSectionIterator<'data, 'file, R>;
|
||||
type Comdat = WasmComdat<'data, 'file, R>;
|
||||
type ComdatIterator = WasmComdatIterator<'data, 'file, R>;
|
||||
type Symbol = WasmSymbol<'data, 'file>;
|
||||
type SymbolIterator = WasmSymbolIterator<'data, 'file>;
|
||||
type SymbolTable = WasmSymbolTable<'data, 'file>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
#[inline]
|
||||
fn architecture(&self) -> Architecture {
|
||||
if self.has_memory64 {
|
||||
Architecture::Wasm64
|
||||
} else {
|
||||
Architecture::Wasm32
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_little_endian(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_64(&self) -> bool {
|
||||
self.has_memory64
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
// TODO: check for `linking` custom section
|
||||
ObjectKind::Unknown
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> Self::SegmentIterator {
|
||||
WasmSegmentIterator { file: self }
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<WasmSection<'data, 'file, R>> {
|
||||
self.sections()
|
||||
.find(|section| section.name_bytes() == Ok(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(&'file self, index: SectionIndex) -> Result<WasmSection<'data, 'file, R>> {
|
||||
// TODO: Missing sections should return an empty section.
|
||||
let id_section = self
|
||||
.id_sections
|
||||
.get(index.0)
|
||||
.and_then(|x| *x)
|
||||
.read_error("Invalid Wasm section index")?;
|
||||
let section = self.sections.get(id_section).unwrap();
|
||||
Ok(WasmSection {
|
||||
file: self,
|
||||
section,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> Self::SectionIterator {
|
||||
WasmSectionIterator {
|
||||
file: self,
|
||||
sections: self.sections.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> Self::ComdatIterator {
|
||||
WasmComdatIterator { file: self }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_by_index(&'file self, index: SymbolIndex) -> Result<WasmSymbol<'data, 'file>> {
|
||||
let symbol = self
|
||||
.symbols
|
||||
.get(index.0)
|
||||
.read_error("Invalid Wasm symbol index")?;
|
||||
Ok(WasmSymbol { index, symbol })
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> Self::SymbolIterator {
|
||||
WasmSymbolIterator {
|
||||
symbols: self.symbols.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_table(&'file self) -> Option<WasmSymbolTable<'data, 'file>> {
|
||||
Some(WasmSymbolTable {
|
||||
symbols: &self.symbols,
|
||||
})
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> Self::SymbolIterator {
|
||||
WasmSymbolIterator {
|
||||
symbols: [].iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_symbol_table(&'file self) -> Option<WasmSymbolTable<'data, 'file>> {
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dynamic_relocations(&self) -> Option<NoDynamicRelocationIterator> {
|
||||
None
|
||||
}
|
||||
|
||||
fn imports(&self) -> Result<Vec<Import<'data>>> {
|
||||
// TODO: return entries in the import section
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn exports(&self) -> Result<Vec<Export<'data>>> {
|
||||
// TODO: return entries in the export section
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.has_debug_symbols
|
||||
}
|
||||
|
||||
fn relative_address_base(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn entry(&'file self) -> u64 {
|
||||
self.entry
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the segments in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSegmentIterator<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmSegmentIterator<'data, 'file, R> {
|
||||
type Item = WasmSegment<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSegment<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> read::private::Sealed for WasmSegment<'data, 'file, R> {}
|
||||
|
||||
impl<'data, 'file, R> ObjectSegment<'data> for WasmSegment<'data, 'file, R> {
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn data_range(&self, _address: u64, _size: u64) -> Result<Option<&'data [u8]>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SegmentFlags {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a [`WasmFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSectionIterator<'data, 'file, R = &'data [u8]> {
|
||||
file: &'file WasmFile<'data, R>,
|
||||
sections: slice::Iter<'file, SectionHeader<'data>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmSectionIterator<'data, 'file, R> {
|
||||
type Item = WasmSection<'data, 'file, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let section = self.sections.next()?;
|
||||
Some(WasmSection {
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in a [`WasmFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSection<'data, 'file, R = &'data [u8]> {
|
||||
file: &'file WasmFile<'data, R>,
|
||||
section: &'file SectionHeader<'data>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> read::private::Sealed for WasmSection<'data, 'file, R> {}
|
||||
|
||||
impl<'data, 'file, R: ReadRef<'data>> ObjectSection<'data> for WasmSection<'data, 'file, R> {
|
||||
type RelocationIterator = WasmRelocationIterator<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn index(&self) -> SectionIndex {
|
||||
// Note that we treat all custom sections as index 0.
|
||||
// This is ok because they are never looked up by index.
|
||||
SectionIndex(self.section.id as usize)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
let range = &self.section.range;
|
||||
(range.end - range.start) as u64
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self) -> u64 {
|
||||
1
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
let range = &self.section.range;
|
||||
Some((range.start as _, range.end as _))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
let range = &self.section.range;
|
||||
self.file
|
||||
.data
|
||||
.read_bytes_at(range.start as u64, range.end as u64 - range.start as u64)
|
||||
.read_error("Invalid Wasm section size or offset")
|
||||
}
|
||||
|
||||
fn data_range(&self, _address: u64, _size: u64) -> Result<Option<&'data [u8]>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
self.name().map(str::as_bytes)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
Ok(match self.section.id {
|
||||
SectionId::Custom => self.section.name,
|
||||
SectionId::Type => "<type>",
|
||||
SectionId::Import => "<import>",
|
||||
SectionId::Function => "<function>",
|
||||
SectionId::Table => "<table>",
|
||||
SectionId::Memory => "<memory>",
|
||||
SectionId::Global => "<global>",
|
||||
SectionId::Export => "<export>",
|
||||
SectionId::Start => "<start>",
|
||||
SectionId::Element => "<element>",
|
||||
SectionId::Code => "<code>",
|
||||
SectionId::Data => "<data>",
|
||||
SectionId::DataCount => "<data_count>",
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SectionKind {
|
||||
match self.section.id {
|
||||
SectionId::Custom => match self.section.name {
|
||||
"reloc." | "linking" => SectionKind::Linker,
|
||||
_ => SectionKind::Other,
|
||||
},
|
||||
SectionId::Type => SectionKind::Metadata,
|
||||
SectionId::Import => SectionKind::Linker,
|
||||
SectionId::Function => SectionKind::Metadata,
|
||||
SectionId::Table => SectionKind::UninitializedData,
|
||||
SectionId::Memory => SectionKind::UninitializedData,
|
||||
SectionId::Global => SectionKind::Data,
|
||||
SectionId::Export => SectionKind::Linker,
|
||||
SectionId::Start => SectionKind::Linker,
|
||||
SectionId::Element => SectionKind::Data,
|
||||
SectionId::Code => SectionKind::Text,
|
||||
SectionId::Data => SectionKind::Data,
|
||||
SectionId::DataCount => SectionKind::UninitializedData,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn relocations(&self) -> WasmRelocationIterator<'data, 'file, R> {
|
||||
WasmRelocationIterator(PhantomData)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmComdatIterator<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmComdatIterator<'data, 'file, R> {
|
||||
type Item = WasmComdat<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmComdat<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> read::private::Sealed for WasmComdat<'data, 'file, R> {}
|
||||
|
||||
impl<'data, 'file, R> ObjectComdat<'data> for WasmComdat<'data, 'file, R> {
|
||||
type SectionIterator = WasmComdatSectionIterator<'data, 'file, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`WasmFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmComdatSectionIterator<'data, 'file, R = &'data [u8]> {
|
||||
#[allow(unused)]
|
||||
file: &'file WasmFile<'data, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmComdatSectionIterator<'data, 'file, R> {
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in a [`WasmFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSymbolTable<'data, 'file> {
|
||||
symbols: &'file [WasmSymbolInternal<'data>],
|
||||
}
|
||||
|
||||
impl<'data, 'file> read::private::Sealed for WasmSymbolTable<'data, 'file> {}
|
||||
|
||||
impl<'data, 'file> ObjectSymbolTable<'data> for WasmSymbolTable<'data, 'file> {
|
||||
type Symbol = WasmSymbol<'data, 'file>;
|
||||
type SymbolIterator = WasmSymbolIterator<'data, 'file>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
WasmSymbolIterator {
|
||||
symbols: self.symbols.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol> {
|
||||
let symbol = self
|
||||
.symbols
|
||||
.get(index.0)
|
||||
.read_error("Invalid Wasm symbol index")?;
|
||||
Ok(WasmSymbol { index, symbol })
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in a [`WasmFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct WasmSymbolIterator<'data, 'file> {
|
||||
symbols: core::iter::Enumerate<slice::Iter<'file, WasmSymbolInternal<'data>>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file> Iterator for WasmSymbolIterator<'data, 'file> {
|
||||
type Item = WasmSymbol<'data, 'file>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (index, symbol) = self.symbols.next()?;
|
||||
Some(WasmSymbol {
|
||||
index: SymbolIndex(index),
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in a [`WasmFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct WasmSymbol<'data, 'file> {
|
||||
index: SymbolIndex,
|
||||
symbol: &'file WasmSymbolInternal<'data>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct WasmSymbolInternal<'data> {
|
||||
name: &'data str,
|
||||
address: u64,
|
||||
size: u64,
|
||||
kind: SymbolKind,
|
||||
section: SymbolSection,
|
||||
scope: SymbolScope,
|
||||
}
|
||||
|
||||
impl<'data, 'file> read::private::Sealed for WasmSymbol<'data, 'file> {}
|
||||
|
||||
impl<'data, 'file> ObjectSymbol<'data> for WasmSymbol<'data, 'file> {
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> read::Result<&'data [u8]> {
|
||||
Ok(self.symbol.name.as_bytes())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> read::Result<&'data str> {
|
||||
Ok(self.symbol.name)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
self.symbol.address
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
self.symbol.size
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> SymbolKind {
|
||||
self.symbol.kind
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn section(&self) -> SymbolSection {
|
||||
self.symbol.section
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.section == SymbolSection::Undefined
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
(self.symbol.kind == SymbolKind::Text || self.symbol.kind == SymbolKind::Data)
|
||||
&& self.symbol.section != SymbolSection::Undefined
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.section == SymbolSection::Common
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn scope(&self) -> SymbolScope {
|
||||
self.symbol.scope
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
self.symbol.scope != SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
self.symbol.scope == SymbolScope::Compilation
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
SymbolFlags::None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the relocations for a [`WasmSection`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct WasmRelocationIterator<'data, 'file, R = &'data [u8]>(
|
||||
PhantomData<(&'data (), &'file (), R)>,
|
||||
);
|
||||
|
||||
impl<'data, 'file, R> Iterator for WasmRelocationIterator<'data, 'file, R> {
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
135
vendor/object/src/read/xcoff/comdat.rs
vendored
Normal file
135
vendor/object/src/read/xcoff/comdat.rs
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
//! XCOFF doesn't support the COMDAT section.
|
||||
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::xcoff;
|
||||
|
||||
use crate::read::{self, ComdatKind, ObjectComdat, ReadRef, Result, SectionIndex, SymbolIndex};
|
||||
|
||||
use super::{FileHeader, XcoffFile};
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffComdatIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the COMDAT section groups in a [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffComdatIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the COMDAT section groups in a [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffComdatIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(crate) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffComdatIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = XcoffComdat<'data, 'file, Xcoff, R>;
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A COMDAT section group in a [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffComdat32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdat<'data, 'file, xcoff::FileHeader32, R>;
|
||||
|
||||
/// A COMDAT section group in a [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffComdat64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdat<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A COMDAT section group in a [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffComdat<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> read::private::Sealed for XcoffComdat<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> ObjectComdat<'data> for XcoffComdat<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type SectionIterator = XcoffComdatSectionIterator<'data, 'file, Xcoff, R>;
|
||||
|
||||
#[inline]
|
||||
fn kind(&self) -> ComdatKind {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol(&self) -> SymbolIndex {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name_bytes(&self) -> Result<&[u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<&str> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sections(&self) -> Self::SectionIterator {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffComdatSectionIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatSectionIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the sections in a COMDAT section group in a [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffComdatSectionIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffComdatSectionIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the sections in a COMDAT section group in a [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffComdatSectionIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffComdatSectionIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = SectionIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
696
vendor/object/src/read/xcoff/file.rs
vendored
Normal file
696
vendor/object/src/read/xcoff/file.rs
vendored
Normal file
@@ -0,0 +1,696 @@
|
||||
use core::fmt::Debug;
|
||||
use core::mem;
|
||||
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::read::{self, Error, NoDynamicRelocationIterator, Object, ReadError, ReadRef, Result};
|
||||
|
||||
use crate::{
|
||||
xcoff, Architecture, BigEndian as BE, FileFlags, ObjectKind, ObjectSection, Pod, SectionIndex,
|
||||
SymbolIndex,
|
||||
};
|
||||
|
||||
use super::{
|
||||
CsectAux, FileAux, SectionHeader, SectionTable, Symbol, SymbolTable, XcoffComdat,
|
||||
XcoffComdatIterator, XcoffSection, XcoffSectionIterator, XcoffSegment, XcoffSegmentIterator,
|
||||
XcoffSymbol, XcoffSymbolIterator, XcoffSymbolTable,
|
||||
};
|
||||
|
||||
/// A 32-bit XCOFF object file.
|
||||
///
|
||||
/// This is a file that starts with [`xcoff::FileHeader32`], and corresponds
|
||||
/// to [`crate::FileKind::Xcoff32`].
|
||||
pub type XcoffFile32<'data, R = &'data [u8]> = XcoffFile<'data, xcoff::FileHeader32, R>;
|
||||
/// A 64-bit XCOFF object file.
|
||||
///
|
||||
/// This is a file that starts with [`xcoff::FileHeader64`], and corresponds
|
||||
/// to [`crate::FileKind::Xcoff64`].
|
||||
pub type XcoffFile64<'data, R = &'data [u8]> = XcoffFile<'data, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A partially parsed XCOFF file.
|
||||
///
|
||||
/// Most functionality is provided by the [`Object`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffFile<'data, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) data: R,
|
||||
pub(super) header: &'data Xcoff,
|
||||
pub(super) aux_header: Option<&'data Xcoff::AuxHeader>,
|
||||
pub(super) sections: SectionTable<'data, Xcoff>,
|
||||
pub(super) symbols: SymbolTable<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> XcoffFile<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the raw XCOFF file data.
|
||||
pub fn parse(data: R) -> Result<Self> {
|
||||
let mut offset = 0;
|
||||
let header = Xcoff::parse(data, &mut offset)?;
|
||||
let aux_header = header.aux_header(data, &mut offset)?;
|
||||
let sections = header.sections(data, &mut offset)?;
|
||||
let symbols = header.symbols(data)?;
|
||||
|
||||
Ok(XcoffFile {
|
||||
data,
|
||||
header,
|
||||
aux_header,
|
||||
sections,
|
||||
symbols,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the raw data.
|
||||
pub fn data(&self) -> R {
|
||||
self.data
|
||||
}
|
||||
|
||||
/// Returns the raw XCOFF file header.
|
||||
pub fn raw_header(&self) -> &'data Xcoff {
|
||||
self.header
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> read::private::Sealed for XcoffFile<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Object<'data, 'file> for XcoffFile<'data, Xcoff, R>
|
||||
where
|
||||
'data: 'file,
|
||||
Xcoff: FileHeader,
|
||||
R: 'file + ReadRef<'data>,
|
||||
{
|
||||
type Segment = XcoffSegment<'data, 'file, Xcoff, R>;
|
||||
type SegmentIterator = XcoffSegmentIterator<'data, 'file, Xcoff, R>;
|
||||
type Section = XcoffSection<'data, 'file, Xcoff, R>;
|
||||
type SectionIterator = XcoffSectionIterator<'data, 'file, Xcoff, R>;
|
||||
type Comdat = XcoffComdat<'data, 'file, Xcoff, R>;
|
||||
type ComdatIterator = XcoffComdatIterator<'data, 'file, Xcoff, R>;
|
||||
type Symbol = XcoffSymbol<'data, 'file, Xcoff, R>;
|
||||
type SymbolIterator = XcoffSymbolIterator<'data, 'file, Xcoff, R>;
|
||||
type SymbolTable = XcoffSymbolTable<'data, 'file, Xcoff, R>;
|
||||
type DynamicRelocationIterator = NoDynamicRelocationIterator;
|
||||
|
||||
fn architecture(&self) -> crate::Architecture {
|
||||
if self.is_64() {
|
||||
Architecture::PowerPc64
|
||||
} else {
|
||||
Architecture::PowerPc
|
||||
}
|
||||
}
|
||||
|
||||
fn is_little_endian(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_64(&self) -> bool {
|
||||
self.header.is_type_64()
|
||||
}
|
||||
|
||||
fn kind(&self) -> ObjectKind {
|
||||
let flags = self.header.f_flags();
|
||||
if flags & xcoff::F_EXEC != 0 {
|
||||
ObjectKind::Executable
|
||||
} else if flags & xcoff::F_SHROBJ != 0 {
|
||||
ObjectKind::Dynamic
|
||||
} else if flags & xcoff::F_RELFLG == 0 {
|
||||
ObjectKind::Relocatable
|
||||
} else {
|
||||
ObjectKind::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn segments(&'file self) -> XcoffSegmentIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffSegmentIterator { file: self }
|
||||
}
|
||||
|
||||
fn section_by_name_bytes(
|
||||
&'file self,
|
||||
section_name: &[u8],
|
||||
) -> Option<XcoffSection<'data, 'file, Xcoff, R>> {
|
||||
self.sections()
|
||||
.find(|section| section.name_bytes() == Ok(section_name))
|
||||
}
|
||||
|
||||
fn section_by_index(
|
||||
&'file self,
|
||||
index: SectionIndex,
|
||||
) -> Result<XcoffSection<'data, 'file, Xcoff, R>> {
|
||||
let section = self.sections.section(index)?;
|
||||
Ok(XcoffSection {
|
||||
file: self,
|
||||
section,
|
||||
index,
|
||||
})
|
||||
}
|
||||
|
||||
fn sections(&'file self) -> XcoffSectionIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffSectionIterator {
|
||||
file: self,
|
||||
iter: self.sections.iter().enumerate(),
|
||||
}
|
||||
}
|
||||
|
||||
fn comdats(&'file self) -> XcoffComdatIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffComdatIterator { file: self }
|
||||
}
|
||||
|
||||
fn symbol_table(&'file self) -> Option<XcoffSymbolTable<'data, 'file, Xcoff, R>> {
|
||||
if self.symbols.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(XcoffSymbolTable {
|
||||
symbols: &self.symbols,
|
||||
file: self,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbol_by_index(
|
||||
&'file self,
|
||||
index: SymbolIndex,
|
||||
) -> Result<XcoffSymbol<'data, 'file, Xcoff, R>> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(XcoffSymbol {
|
||||
symbols: &self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
file: self,
|
||||
})
|
||||
}
|
||||
|
||||
fn symbols(&'file self) -> XcoffSymbolIterator<'data, 'file, Xcoff, R> {
|
||||
XcoffSymbolIterator {
|
||||
file: self,
|
||||
symbols: self.symbols.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn dynamic_symbol_table(&'file self) -> Option<XcoffSymbolTable<'data, 'file, Xcoff, R>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn dynamic_symbols(&'file self) -> XcoffSymbolIterator<'data, 'file, Xcoff, R> {
|
||||
// TODO: return the symbols in the STYP_LOADER section.
|
||||
XcoffSymbolIterator {
|
||||
file: self,
|
||||
symbols: self.symbols.iter_none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn dynamic_relocations(&'file self) -> Option<Self::DynamicRelocationIterator> {
|
||||
// TODO: return the relocations in the STYP_LOADER section.
|
||||
None
|
||||
}
|
||||
|
||||
fn imports(&self) -> Result<alloc::vec::Vec<crate::Import<'data>>> {
|
||||
// TODO: return the imports in the STYP_LOADER section.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn exports(&self) -> Result<alloc::vec::Vec<crate::Export<'data>>> {
|
||||
// TODO: return the exports in the STYP_LOADER section.
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn has_debug_symbols(&self) -> bool {
|
||||
self.section_by_name(".debug").is_some() || self.section_by_name(".dwinfo").is_some()
|
||||
}
|
||||
|
||||
fn relative_address_base(&'file self) -> u64 {
|
||||
0
|
||||
}
|
||||
|
||||
fn entry(&'file self) -> u64 {
|
||||
if let Some(aux_header) = self.aux_header {
|
||||
aux_header.o_entry().into()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> FileFlags {
|
||||
FileFlags::Xcoff {
|
||||
f_flags: self.header.f_flags(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::FileHeader32`] and [`xcoff::FileHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FileHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type AuxHeader: AuxHeader<Word = Self::Word>;
|
||||
type SectionHeader: SectionHeader<Word = Self::Word>;
|
||||
type Symbol: Symbol<Word = Self::Word>;
|
||||
type FileAux: FileAux;
|
||||
type CsectAux: CsectAux;
|
||||
|
||||
/// Return true if this type is a 64-bit header.
|
||||
fn is_type_64(&self) -> bool;
|
||||
|
||||
fn f_magic(&self) -> u16;
|
||||
fn f_nscns(&self) -> u16;
|
||||
fn f_timdat(&self) -> u32;
|
||||
fn f_symptr(&self) -> Self::Word;
|
||||
fn f_nsyms(&self) -> u32;
|
||||
fn f_opthdr(&self) -> u16;
|
||||
fn f_flags(&self) -> u16;
|
||||
|
||||
// Provided methods.
|
||||
|
||||
/// Read the file header.
|
||||
///
|
||||
/// Also checks that the magic field in the file header is a supported format.
|
||||
fn parse<'data, R: ReadRef<'data>>(data: R, offset: &mut u64) -> Result<&'data Self> {
|
||||
let header = data
|
||||
.read::<Self>(offset)
|
||||
.read_error("Invalid XCOFF header size or alignment")?;
|
||||
if !header.is_supported() {
|
||||
return Err(Error("Unsupported XCOFF header"));
|
||||
}
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn is_supported(&self) -> bool {
|
||||
(self.is_type_64() && self.f_magic() == xcoff::MAGIC_64)
|
||||
|| (!self.is_type_64() && self.f_magic() == xcoff::MAGIC_32)
|
||||
}
|
||||
|
||||
/// Read the auxiliary file header.
|
||||
fn aux_header<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
) -> Result<Option<&'data Self::AuxHeader>> {
|
||||
let aux_header_size = self.f_opthdr();
|
||||
if self.f_flags() & xcoff::F_EXEC == 0 {
|
||||
// No auxiliary header is required for an object file that is not an executable.
|
||||
// TODO: Some AIX programs generate auxiliary headers for 32-bit object files
|
||||
// that end after the data_start field.
|
||||
*offset += u64::from(aux_header_size);
|
||||
return Ok(None);
|
||||
}
|
||||
// Executables, however, must have auxiliary headers that include the
|
||||
// full structure definitions.
|
||||
if aux_header_size != mem::size_of::<Self::AuxHeader>() as u16 {
|
||||
*offset += u64::from(aux_header_size);
|
||||
return Ok(None);
|
||||
}
|
||||
let aux_header = data
|
||||
.read::<Self::AuxHeader>(offset)
|
||||
.read_error("Invalid XCOFF auxiliary header size")?;
|
||||
Ok(Some(aux_header))
|
||||
}
|
||||
|
||||
/// Read the section table.
|
||||
#[inline]
|
||||
fn sections<'data, R: ReadRef<'data>>(
|
||||
&self,
|
||||
data: R,
|
||||
offset: &mut u64,
|
||||
) -> Result<SectionTable<'data, Self>> {
|
||||
SectionTable::parse(self, data, offset)
|
||||
}
|
||||
|
||||
/// Return the symbol table.
|
||||
#[inline]
|
||||
fn symbols<'data, R: ReadRef<'data>>(&self, data: R) -> Result<SymbolTable<'data, Self, R>> {
|
||||
SymbolTable::parse(*self, data)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileHeader for xcoff::FileHeader32 {
|
||||
type Word = u32;
|
||||
type AuxHeader = xcoff::AuxHeader32;
|
||||
type SectionHeader = xcoff::SectionHeader32;
|
||||
type Symbol = xcoff::Symbol32;
|
||||
type FileAux = xcoff::FileAux32;
|
||||
type CsectAux = xcoff::CsectAux32;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn f_magic(&self) -> u16 {
|
||||
self.f_magic.get(BE)
|
||||
}
|
||||
|
||||
fn f_nscns(&self) -> u16 {
|
||||
self.f_nscns.get(BE)
|
||||
}
|
||||
|
||||
fn f_timdat(&self) -> u32 {
|
||||
self.f_timdat.get(BE)
|
||||
}
|
||||
|
||||
fn f_symptr(&self) -> Self::Word {
|
||||
self.f_symptr.get(BE)
|
||||
}
|
||||
|
||||
fn f_nsyms(&self) -> u32 {
|
||||
self.f_nsyms.get(BE)
|
||||
}
|
||||
|
||||
fn f_opthdr(&self) -> u16 {
|
||||
self.f_opthdr.get(BE)
|
||||
}
|
||||
|
||||
fn f_flags(&self) -> u16 {
|
||||
self.f_flags.get(BE)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileHeader for xcoff::FileHeader64 {
|
||||
type Word = u64;
|
||||
type AuxHeader = xcoff::AuxHeader64;
|
||||
type SectionHeader = xcoff::SectionHeader64;
|
||||
type Symbol = xcoff::Symbol64;
|
||||
type FileAux = xcoff::FileAux64;
|
||||
type CsectAux = xcoff::CsectAux64;
|
||||
|
||||
fn is_type_64(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn f_magic(&self) -> u16 {
|
||||
self.f_magic.get(BE)
|
||||
}
|
||||
|
||||
fn f_nscns(&self) -> u16 {
|
||||
self.f_nscns.get(BE)
|
||||
}
|
||||
|
||||
fn f_timdat(&self) -> u32 {
|
||||
self.f_timdat.get(BE)
|
||||
}
|
||||
|
||||
fn f_symptr(&self) -> Self::Word {
|
||||
self.f_symptr.get(BE)
|
||||
}
|
||||
|
||||
fn f_nsyms(&self) -> u32 {
|
||||
self.f_nsyms.get(BE)
|
||||
}
|
||||
|
||||
fn f_opthdr(&self) -> u16 {
|
||||
self.f_opthdr.get(BE)
|
||||
}
|
||||
|
||||
fn f_flags(&self) -> u16 {
|
||||
self.f_flags.get(BE)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::AuxHeader32`] and [`xcoff::AuxHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait AuxHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
|
||||
fn o_mflag(&self) -> u16;
|
||||
fn o_vstamp(&self) -> u16;
|
||||
fn o_tsize(&self) -> Self::Word;
|
||||
fn o_dsize(&self) -> Self::Word;
|
||||
fn o_bsize(&self) -> Self::Word;
|
||||
fn o_entry(&self) -> Self::Word;
|
||||
fn o_text_start(&self) -> Self::Word;
|
||||
fn o_data_start(&self) -> Self::Word;
|
||||
fn o_toc(&self) -> Self::Word;
|
||||
fn o_snentry(&self) -> u16;
|
||||
fn o_sntext(&self) -> u16;
|
||||
fn o_sndata(&self) -> u16;
|
||||
fn o_sntoc(&self) -> u16;
|
||||
fn o_snloader(&self) -> u16;
|
||||
fn o_snbss(&self) -> u16;
|
||||
fn o_algntext(&self) -> u16;
|
||||
fn o_algndata(&self) -> u16;
|
||||
fn o_modtype(&self) -> u16;
|
||||
fn o_cpuflag(&self) -> u8;
|
||||
fn o_cputype(&self) -> u8;
|
||||
fn o_maxstack(&self) -> Self::Word;
|
||||
fn o_maxdata(&self) -> Self::Word;
|
||||
fn o_debugger(&self) -> u32;
|
||||
fn o_textpsize(&self) -> u8;
|
||||
fn o_datapsize(&self) -> u8;
|
||||
fn o_stackpsize(&self) -> u8;
|
||||
fn o_flags(&self) -> u8;
|
||||
fn o_sntdata(&self) -> u16;
|
||||
fn o_sntbss(&self) -> u16;
|
||||
fn o_x64flags(&self) -> Option<u16>;
|
||||
}
|
||||
|
||||
impl AuxHeader for xcoff::AuxHeader32 {
|
||||
type Word = u32;
|
||||
|
||||
fn o_mflag(&self) -> u16 {
|
||||
self.o_mflag.get(BE)
|
||||
}
|
||||
|
||||
fn o_vstamp(&self) -> u16 {
|
||||
self.o_vstamp.get(BE)
|
||||
}
|
||||
|
||||
fn o_tsize(&self) -> Self::Word {
|
||||
self.o_tsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_dsize(&self) -> Self::Word {
|
||||
self.o_dsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_bsize(&self) -> Self::Word {
|
||||
self.o_bsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_entry(&self) -> Self::Word {
|
||||
self.o_entry.get(BE)
|
||||
}
|
||||
|
||||
fn o_text_start(&self) -> Self::Word {
|
||||
self.o_text_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_data_start(&self) -> Self::Word {
|
||||
self.o_data_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_toc(&self) -> Self::Word {
|
||||
self.o_toc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snentry(&self) -> u16 {
|
||||
self.o_snentry.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntext(&self) -> u16 {
|
||||
self.o_sntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_sndata(&self) -> u16 {
|
||||
self.o_sndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntoc(&self) -> u16 {
|
||||
self.o_sntoc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snloader(&self) -> u16 {
|
||||
self.o_snloader.get(BE)
|
||||
}
|
||||
|
||||
fn o_snbss(&self) -> u16 {
|
||||
self.o_snbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_algntext(&self) -> u16 {
|
||||
self.o_algntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_algndata(&self) -> u16 {
|
||||
self.o_algndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_modtype(&self) -> u16 {
|
||||
self.o_modtype.get(BE)
|
||||
}
|
||||
|
||||
fn o_cpuflag(&self) -> u8 {
|
||||
self.o_cpuflag
|
||||
}
|
||||
|
||||
fn o_cputype(&self) -> u8 {
|
||||
self.o_cputype
|
||||
}
|
||||
|
||||
fn o_maxstack(&self) -> Self::Word {
|
||||
self.o_maxstack.get(BE)
|
||||
}
|
||||
|
||||
fn o_maxdata(&self) -> Self::Word {
|
||||
self.o_maxdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_debugger(&self) -> u32 {
|
||||
self.o_debugger.get(BE)
|
||||
}
|
||||
|
||||
fn o_textpsize(&self) -> u8 {
|
||||
self.o_textpsize
|
||||
}
|
||||
|
||||
fn o_datapsize(&self) -> u8 {
|
||||
self.o_datapsize
|
||||
}
|
||||
|
||||
fn o_stackpsize(&self) -> u8 {
|
||||
self.o_stackpsize
|
||||
}
|
||||
|
||||
fn o_flags(&self) -> u8 {
|
||||
self.o_flags
|
||||
}
|
||||
|
||||
fn o_sntdata(&self) -> u16 {
|
||||
self.o_sntdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntbss(&self) -> u16 {
|
||||
self.o_sntbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_x64flags(&self) -> Option<u16> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl AuxHeader for xcoff::AuxHeader64 {
|
||||
type Word = u64;
|
||||
|
||||
fn o_mflag(&self) -> u16 {
|
||||
self.o_mflag.get(BE)
|
||||
}
|
||||
|
||||
fn o_vstamp(&self) -> u16 {
|
||||
self.o_vstamp.get(BE)
|
||||
}
|
||||
|
||||
fn o_tsize(&self) -> Self::Word {
|
||||
self.o_tsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_dsize(&self) -> Self::Word {
|
||||
self.o_dsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_bsize(&self) -> Self::Word {
|
||||
self.o_bsize.get(BE)
|
||||
}
|
||||
|
||||
fn o_entry(&self) -> Self::Word {
|
||||
self.o_entry.get(BE)
|
||||
}
|
||||
|
||||
fn o_text_start(&self) -> Self::Word {
|
||||
self.o_text_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_data_start(&self) -> Self::Word {
|
||||
self.o_data_start.get(BE)
|
||||
}
|
||||
|
||||
fn o_toc(&self) -> Self::Word {
|
||||
self.o_toc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snentry(&self) -> u16 {
|
||||
self.o_snentry.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntext(&self) -> u16 {
|
||||
self.o_sntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_sndata(&self) -> u16 {
|
||||
self.o_sndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntoc(&self) -> u16 {
|
||||
self.o_sntoc.get(BE)
|
||||
}
|
||||
|
||||
fn o_snloader(&self) -> u16 {
|
||||
self.o_snloader.get(BE)
|
||||
}
|
||||
|
||||
fn o_snbss(&self) -> u16 {
|
||||
self.o_snbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_algntext(&self) -> u16 {
|
||||
self.o_algntext.get(BE)
|
||||
}
|
||||
|
||||
fn o_algndata(&self) -> u16 {
|
||||
self.o_algndata.get(BE)
|
||||
}
|
||||
|
||||
fn o_modtype(&self) -> u16 {
|
||||
self.o_modtype.get(BE)
|
||||
}
|
||||
|
||||
fn o_cpuflag(&self) -> u8 {
|
||||
self.o_cpuflag
|
||||
}
|
||||
|
||||
fn o_cputype(&self) -> u8 {
|
||||
self.o_cputype
|
||||
}
|
||||
|
||||
fn o_maxstack(&self) -> Self::Word {
|
||||
self.o_maxstack.get(BE)
|
||||
}
|
||||
|
||||
fn o_maxdata(&self) -> Self::Word {
|
||||
self.o_maxdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_debugger(&self) -> u32 {
|
||||
self.o_debugger.get(BE)
|
||||
}
|
||||
|
||||
fn o_textpsize(&self) -> u8 {
|
||||
self.o_textpsize
|
||||
}
|
||||
|
||||
fn o_datapsize(&self) -> u8 {
|
||||
self.o_datapsize
|
||||
}
|
||||
|
||||
fn o_stackpsize(&self) -> u8 {
|
||||
self.o_stackpsize
|
||||
}
|
||||
|
||||
fn o_flags(&self) -> u8 {
|
||||
self.o_flags
|
||||
}
|
||||
|
||||
fn o_sntdata(&self) -> u16 {
|
||||
self.o_sntdata.get(BE)
|
||||
}
|
||||
|
||||
fn o_sntbss(&self) -> u16 {
|
||||
self.o_sntbss.get(BE)
|
||||
}
|
||||
|
||||
fn o_x64flags(&self) -> Option<u16> {
|
||||
Some(self.o_x64flags.get(BE))
|
||||
}
|
||||
}
|
||||
63
vendor/object/src/read/xcoff/mod.rs
vendored
Normal file
63
vendor/object/src/read/xcoff/mod.rs
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
//! Support for reading AIX XCOFF files.
|
||||
//!
|
||||
//! Traits are used to abstract over the difference between 32-bit and 64-bit XCOFF.
|
||||
//! The primary trait for this is [`FileHeader`].
|
||||
//!
|
||||
//! ## High level API
|
||||
//!
|
||||
//! [`XcoffFile`] implements the [`Object`](crate::read::Object) trait for XCOFF files.
|
||||
//! [`XcoffFile`] is parameterised by [`FileHeader`] to allow reading both 32-bit and
|
||||
//! 64-bit XCOFF. There are type aliases for these parameters ([`XcoffFile32`] and
|
||||
//! [`XcoffFile64`]).
|
||||
//!
|
||||
//! ## Low level API
|
||||
//!
|
||||
//! The [`FileHeader`] trait can be directly used to parse both [`xcoff::FileHeader32`]
|
||||
//! and [`xcoff::FileHeader64`].
|
||||
//!
|
||||
//! ### Example for low level API
|
||||
//! ```no_run
|
||||
//! use object::xcoff;
|
||||
//! use object::read::xcoff::{FileHeader, SectionHeader, Symbol};
|
||||
//! use std::error::Error;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! /// Reads a file and displays the name of each section and symbol.
|
||||
//! fn main() -> Result<(), Box<dyn Error>> {
|
||||
//! # #[cfg(feature = "std")] {
|
||||
//! let data = fs::read("path/to/binary")?;
|
||||
//! let mut offset = 0;
|
||||
//! let header = xcoff::FileHeader64::parse(&*data, &mut offset)?;
|
||||
//! let aux_header = header.aux_header(&*data, &mut offset)?;
|
||||
//! let sections = header.sections(&*data, &mut offset)?;
|
||||
//! let symbols = header.symbols(&*data)?;
|
||||
//! for section in sections.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(section.name()));
|
||||
//! }
|
||||
//! for (_index, symbol) in symbols.iter() {
|
||||
//! println!("{}", String::from_utf8_lossy(symbol.name(symbols.strings())?));
|
||||
//! }
|
||||
//! # }
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
#[cfg(doc)]
|
||||
use crate::xcoff;
|
||||
|
||||
mod file;
|
||||
pub use file::*;
|
||||
|
||||
mod section;
|
||||
pub use section::*;
|
||||
|
||||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
mod relocation;
|
||||
pub use relocation::*;
|
||||
|
||||
mod comdat;
|
||||
pub use comdat::*;
|
||||
|
||||
mod segment;
|
||||
pub use segment::*;
|
||||
127
vendor/object/src/read/xcoff/relocation.rs
vendored
Normal file
127
vendor/object/src/read/xcoff/relocation.rs
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
use alloc::fmt;
|
||||
use core::fmt::Debug;
|
||||
use core::slice;
|
||||
|
||||
use crate::pod::Pod;
|
||||
use crate::{xcoff, BigEndian as BE, Relocation};
|
||||
|
||||
use crate::read::{ReadRef, RelocationEncoding, RelocationKind, RelocationTarget, SymbolIndex};
|
||||
|
||||
use super::{FileHeader, SectionHeader, XcoffFile};
|
||||
|
||||
/// An iterator for the relocations in an [`XcoffSection32`](super::XcoffSection32).
|
||||
pub type XcoffRelocationIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffRelocationIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the relocations in an [`XcoffSection64`](super::XcoffSection64).
|
||||
pub type XcoffRelocationIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffRelocationIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the relocations in an [`XcoffSection`](super::XcoffSection).
|
||||
pub struct XcoffRelocationIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) relocations:
|
||||
slice::Iter<'data, <<Xcoff as FileHeader>::SectionHeader as SectionHeader>::Rel>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffRelocationIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = (u64, Relocation);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.relocations.next().map(|relocation| {
|
||||
let encoding = RelocationEncoding::Generic;
|
||||
let (kind, addend) = match relocation.r_rtype() {
|
||||
xcoff::R_POS
|
||||
| xcoff::R_RL
|
||||
| xcoff::R_RLA
|
||||
| xcoff::R_BA
|
||||
| xcoff::R_RBA
|
||||
| xcoff::R_TLS => (RelocationKind::Absolute, 0),
|
||||
xcoff::R_REL | xcoff::R_BR | xcoff::R_RBR => (RelocationKind::Relative, -4),
|
||||
xcoff::R_TOC | xcoff::R_TOCL | xcoff::R_TOCU => (RelocationKind::Got, 0),
|
||||
r_type => (RelocationKind::Xcoff(r_type), 0),
|
||||
};
|
||||
let size = (relocation.r_rsize() & 0x3F) + 1;
|
||||
let target = RelocationTarget::Symbol(SymbolIndex(relocation.r_symndx() as usize));
|
||||
(
|
||||
relocation.r_vaddr().into(),
|
||||
Relocation {
|
||||
kind,
|
||||
encoding,
|
||||
size,
|
||||
target,
|
||||
addend,
|
||||
implicit_addend: true,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> fmt::Debug for XcoffRelocationIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("XcoffRelocationIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::Rel32`] and [`xcoff::Rel64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Rel: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
fn r_vaddr(&self) -> Self::Word;
|
||||
fn r_symndx(&self) -> u32;
|
||||
fn r_rsize(&self) -> u8;
|
||||
fn r_rtype(&self) -> u8;
|
||||
}
|
||||
|
||||
impl Rel for xcoff::Rel32 {
|
||||
type Word = u32;
|
||||
|
||||
fn r_vaddr(&self) -> Self::Word {
|
||||
self.r_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn r_symndx(&self) -> u32 {
|
||||
self.r_symndx.get(BE)
|
||||
}
|
||||
|
||||
fn r_rsize(&self) -> u8 {
|
||||
self.r_rsize
|
||||
}
|
||||
|
||||
fn r_rtype(&self) -> u8 {
|
||||
self.r_rtype
|
||||
}
|
||||
}
|
||||
|
||||
impl Rel for xcoff::Rel64 {
|
||||
type Word = u64;
|
||||
|
||||
fn r_vaddr(&self) -> Self::Word {
|
||||
self.r_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn r_symndx(&self) -> u32 {
|
||||
self.r_symndx.get(BE)
|
||||
}
|
||||
|
||||
fn r_rsize(&self) -> u8 {
|
||||
self.r_rsize
|
||||
}
|
||||
|
||||
fn r_rtype(&self) -> u8 {
|
||||
self.r_rtype
|
||||
}
|
||||
}
|
||||
431
vendor/object/src/read/xcoff/section.rs
vendored
Normal file
431
vendor/object/src/read/xcoff/section.rs
vendored
Normal file
@@ -0,0 +1,431 @@
|
||||
use core::fmt::Debug;
|
||||
use core::{iter, result, slice, str};
|
||||
|
||||
use crate::{
|
||||
xcoff, BigEndian as BE, CompressedData, CompressedFileRange, Pod, SectionFlags, SectionKind,
|
||||
};
|
||||
|
||||
use crate::read::{self, Error, ObjectSection, ReadError, ReadRef, Result, SectionIndex};
|
||||
|
||||
use super::{AuxHeader, FileHeader, Rel, XcoffFile, XcoffRelocationIterator};
|
||||
|
||||
/// An iterator for the sections in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSectionIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSectionIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the sections in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSectionIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSectionIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the sections in an [`XcoffFile`].
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSectionIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) iter: iter::Enumerate<slice::Iter<'data, Xcoff::SectionHeader>>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffSectionIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = XcoffSection<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|(index, section)| XcoffSection {
|
||||
index: SectionIndex(index + 1),
|
||||
file: self.file,
|
||||
section,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A section in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSection32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSection<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A section in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSection64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSection<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A section in an [`XcoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSection`] trait implementation.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSection<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) section: &'data Xcoff::SectionHeader,
|
||||
pub(super) index: SectionIndex,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> XcoffSection<'data, 'file, Xcoff, R> {
|
||||
fn bytes(&self) -> Result<&'data [u8]> {
|
||||
self.section
|
||||
.data(self.file.data)
|
||||
.read_error("Invalid XCOFF section offset or size")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> read::private::Sealed for XcoffSection<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> ObjectSection<'data> for XcoffSection<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type RelocationIterator = XcoffRelocationIterator<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn index(&self) -> SectionIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn address(&self) -> u64 {
|
||||
self.section.s_paddr().into()
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
self.section.s_size().into()
|
||||
}
|
||||
|
||||
fn align(&self) -> u64 {
|
||||
// The default section alignment is 4.
|
||||
if let Some(aux_header) = self.file.aux_header {
|
||||
match self.kind() {
|
||||
SectionKind::Text => aux_header.o_algntext().into(),
|
||||
SectionKind::Data => aux_header.o_algndata().into(),
|
||||
_ => 4,
|
||||
}
|
||||
} else {
|
||||
4
|
||||
}
|
||||
}
|
||||
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
self.section.file_range()
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
self.bytes()
|
||||
}
|
||||
|
||||
fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> {
|
||||
Ok(read::util::data_range(
|
||||
self.bytes()?,
|
||||
self.address(),
|
||||
address,
|
||||
size,
|
||||
))
|
||||
}
|
||||
|
||||
fn compressed_file_range(&self) -> Result<CompressedFileRange> {
|
||||
Ok(CompressedFileRange::none(self.file_range()))
|
||||
}
|
||||
|
||||
fn compressed_data(&self) -> Result<CompressedData<'data>> {
|
||||
self.data().map(CompressedData::none)
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> read::Result<&[u8]> {
|
||||
Ok(self.section.name())
|
||||
}
|
||||
|
||||
fn name(&self) -> read::Result<&str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 XCOFF section name")
|
||||
}
|
||||
|
||||
fn segment_name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn segment_name(&self) -> Result<Option<&str>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn kind(&self) -> SectionKind {
|
||||
let section_type = self.section.s_flags() as u16;
|
||||
if section_type & xcoff::STYP_TEXT != 0 {
|
||||
SectionKind::Text
|
||||
} else if section_type & xcoff::STYP_DATA != 0 {
|
||||
SectionKind::Data
|
||||
} else if section_type & xcoff::STYP_TDATA != 0 {
|
||||
SectionKind::Tls
|
||||
} else if section_type & xcoff::STYP_BSS != 0 {
|
||||
SectionKind::UninitializedData
|
||||
} else if section_type & xcoff::STYP_TBSS != 0 {
|
||||
SectionKind::UninitializedTls
|
||||
} else if section_type & (xcoff::STYP_DEBUG | xcoff::STYP_DWARF) != 0 {
|
||||
SectionKind::Debug
|
||||
} else if section_type & (xcoff::STYP_LOADER | xcoff::STYP_OVRFLO) != 0 {
|
||||
SectionKind::Metadata
|
||||
} else if section_type
|
||||
& (xcoff::STYP_INFO | xcoff::STYP_EXCEPT | xcoff::STYP_PAD | xcoff::STYP_TYPCHK)
|
||||
!= 0
|
||||
{
|
||||
SectionKind::Other
|
||||
} else {
|
||||
SectionKind::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn relocations(&self) -> Self::RelocationIterator {
|
||||
let rel = self.section.relocations(self.file.data).unwrap_or(&[]);
|
||||
XcoffRelocationIterator {
|
||||
file: self.file,
|
||||
relocations: rel.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn flags(&self) -> SectionFlags {
|
||||
SectionFlags::Xcoff {
|
||||
s_flags: self.section.s_flags(),
|
||||
}
|
||||
}
|
||||
|
||||
fn uncompressed_data(&self) -> Result<alloc::borrow::Cow<'data, [u8]>> {
|
||||
self.compressed_data()?.decompress()
|
||||
}
|
||||
}
|
||||
|
||||
/// The table of section headers in an XCOFF file.
|
||||
///
|
||||
/// Returned by [`FileHeader::sections`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SectionTable<'data, Xcoff: FileHeader> {
|
||||
sections: &'data [Xcoff::SectionHeader],
|
||||
}
|
||||
|
||||
impl<'data, Xcoff> Default for SectionTable<'data, Xcoff>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self { sections: &[] }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Xcoff> SectionTable<'data, Xcoff>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
{
|
||||
/// Parse the section table.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
/// `offset` must be after the optional file header.
|
||||
pub fn parse<R: ReadRef<'data>>(header: &Xcoff, data: R, offset: &mut u64) -> Result<Self> {
|
||||
let section_num = header.f_nscns();
|
||||
if section_num == 0 {
|
||||
return Ok(SectionTable::default());
|
||||
}
|
||||
let sections = data
|
||||
.read_slice(offset, section_num as usize)
|
||||
.read_error("Invalid XCOFF section headers")?;
|
||||
Ok(SectionTable { sections })
|
||||
}
|
||||
|
||||
/// Iterate over the section headers.
|
||||
#[inline]
|
||||
pub fn iter(&self) -> slice::Iter<'data, Xcoff::SectionHeader> {
|
||||
self.sections.iter()
|
||||
}
|
||||
|
||||
/// Return true if the section table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.sections.is_empty()
|
||||
}
|
||||
|
||||
/// The number of section headers.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.sections.len()
|
||||
}
|
||||
|
||||
/// Return the section header at the given index.
|
||||
///
|
||||
/// The index is 1-based.
|
||||
pub fn section(&self, index: SectionIndex) -> read::Result<&'data Xcoff::SectionHeader> {
|
||||
self.sections
|
||||
.get(index.0.wrapping_sub(1))
|
||||
.read_error("Invalid XCOFF section index")
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::SectionHeader32`] and [`xcoff::SectionHeader64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait SectionHeader: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
type HalfWord: Into<u32>;
|
||||
type Xcoff: FileHeader<SectionHeader = Self, Word = Self::Word>;
|
||||
type Rel: Rel<Word = Self::Word>;
|
||||
|
||||
fn s_name(&self) -> &[u8; 8];
|
||||
fn s_paddr(&self) -> Self::Word;
|
||||
fn s_vaddr(&self) -> Self::Word;
|
||||
fn s_size(&self) -> Self::Word;
|
||||
fn s_scnptr(&self) -> Self::Word;
|
||||
fn s_relptr(&self) -> Self::Word;
|
||||
fn s_lnnoptr(&self) -> Self::Word;
|
||||
fn s_nreloc(&self) -> Self::HalfWord;
|
||||
fn s_nlnno(&self) -> Self::HalfWord;
|
||||
fn s_flags(&self) -> u32;
|
||||
|
||||
/// Return the section name.
|
||||
fn name(&self) -> &[u8] {
|
||||
let sectname = &self.s_name()[..];
|
||||
match memchr::memchr(b'\0', sectname) {
|
||||
Some(end) => §name[..end],
|
||||
None => sectname,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the offset and size of the section in the file.
|
||||
fn file_range(&self) -> Option<(u64, u64)> {
|
||||
Some((self.s_scnptr().into(), self.s_size().into()))
|
||||
}
|
||||
|
||||
/// Return the section data.
|
||||
///
|
||||
/// Returns `Ok(&[])` if the section has no data.
|
||||
/// Returns `Err` for invalid values.
|
||||
fn data<'data, R: ReadRef<'data>>(&self, data: R) -> result::Result<&'data [u8], ()> {
|
||||
if let Some((offset, size)) = self.file_range() {
|
||||
data.read_bytes_at(offset, size)
|
||||
} else {
|
||||
Ok(&[])
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the relocations.
|
||||
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]>;
|
||||
}
|
||||
|
||||
impl SectionHeader for xcoff::SectionHeader32 {
|
||||
type Word = u32;
|
||||
type HalfWord = u16;
|
||||
type Xcoff = xcoff::FileHeader32;
|
||||
type Rel = xcoff::Rel32;
|
||||
|
||||
fn s_name(&self) -> &[u8; 8] {
|
||||
&self.s_name
|
||||
}
|
||||
|
||||
fn s_paddr(&self) -> Self::Word {
|
||||
self.s_paddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_vaddr(&self) -> Self::Word {
|
||||
self.s_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_size(&self) -> Self::Word {
|
||||
self.s_size.get(BE)
|
||||
}
|
||||
|
||||
fn s_scnptr(&self) -> Self::Word {
|
||||
self.s_scnptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_relptr(&self) -> Self::Word {
|
||||
self.s_relptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_lnnoptr(&self) -> Self::Word {
|
||||
self.s_lnnoptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_nreloc(&self) -> Self::HalfWord {
|
||||
self.s_nreloc.get(BE)
|
||||
}
|
||||
|
||||
fn s_nlnno(&self) -> Self::HalfWord {
|
||||
self.s_nlnno.get(BE)
|
||||
}
|
||||
|
||||
fn s_flags(&self) -> u32 {
|
||||
self.s_flags.get(BE)
|
||||
}
|
||||
|
||||
/// Read the relocations in a XCOFF32 file.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]> {
|
||||
let reloc_num = self.s_nreloc() as usize;
|
||||
// TODO: If more than 65,534 relocation entries are required, the field value will be 65535,
|
||||
// and an STYP_OVRFLO section header will contain the actual count of relocation entries in
|
||||
// the s_paddr field.
|
||||
if reloc_num == 65535 {
|
||||
return Err(Error("Overflow section is not supported yet."));
|
||||
}
|
||||
data.read_slice_at(self.s_relptr().into(), reloc_num)
|
||||
.read_error("Invalid XCOFF relocation offset or number")
|
||||
}
|
||||
}
|
||||
|
||||
impl SectionHeader for xcoff::SectionHeader64 {
|
||||
type Word = u64;
|
||||
type HalfWord = u32;
|
||||
type Xcoff = xcoff::FileHeader64;
|
||||
type Rel = xcoff::Rel64;
|
||||
|
||||
fn s_name(&self) -> &[u8; 8] {
|
||||
&self.s_name
|
||||
}
|
||||
|
||||
fn s_paddr(&self) -> Self::Word {
|
||||
self.s_paddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_vaddr(&self) -> Self::Word {
|
||||
self.s_vaddr.get(BE)
|
||||
}
|
||||
|
||||
fn s_size(&self) -> Self::Word {
|
||||
self.s_size.get(BE)
|
||||
}
|
||||
|
||||
fn s_scnptr(&self) -> Self::Word {
|
||||
self.s_scnptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_relptr(&self) -> Self::Word {
|
||||
self.s_relptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_lnnoptr(&self) -> Self::Word {
|
||||
self.s_lnnoptr.get(BE)
|
||||
}
|
||||
|
||||
fn s_nreloc(&self) -> Self::HalfWord {
|
||||
self.s_nreloc.get(BE)
|
||||
}
|
||||
|
||||
fn s_nlnno(&self) -> Self::HalfWord {
|
||||
self.s_nlnno.get(BE)
|
||||
}
|
||||
|
||||
fn s_flags(&self) -> u32 {
|
||||
self.s_flags.get(BE)
|
||||
}
|
||||
|
||||
/// Read the relocations in a XCOFF64 file.
|
||||
///
|
||||
/// `data` must be the entire file data.
|
||||
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]> {
|
||||
data.read_slice_at(self.s_relptr(), self.s_nreloc() as usize)
|
||||
.read_error("Invalid XCOFF relocation offset or number")
|
||||
}
|
||||
}
|
||||
117
vendor/object/src/read/xcoff/segment.rs
vendored
Normal file
117
vendor/object/src/read/xcoff/segment.rs
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
//! TODO: Support the segment for XCOFF when auxiliary file header and loader section is ready.
|
||||
|
||||
use core::fmt::Debug;
|
||||
use core::str;
|
||||
|
||||
use crate::read::{self, ObjectSegment, ReadRef, Result};
|
||||
use crate::xcoff;
|
||||
|
||||
use super::{FileHeader, XcoffFile};
|
||||
|
||||
/// An iterator for the segments in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSegmentIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegmentIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the segments in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSegmentIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegmentIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the segments in an [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSegmentIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> Iterator for XcoffSegmentIterator<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
type Item = XcoffSegment<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSegment32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegment<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A segment in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSegment64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSegment<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A loadable section in an [`XcoffFile`].
|
||||
///
|
||||
/// This is a stub that doesn't implement any functionality.
|
||||
#[derive(Debug)]
|
||||
pub struct XcoffSegment<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
#[allow(unused)]
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> XcoffSegment<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> read::private::Sealed for XcoffSegment<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff, R> ObjectSegment<'data> for XcoffSegment<'data, 'file, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn address(&self) -> u64 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn align(&self) -> u64 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn file_range(&self) -> (u64, u64) {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn data(&self) -> Result<&'data [u8]> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn data_range(&self, _address: u64, _size: u64) -> Result<Option<&'data [u8]>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> Result<Option<&[u8]>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn name(&self) -> Result<Option<&str>> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn flags(&self) -> crate::SegmentFlags {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
786
vendor/object/src/read/xcoff/symbol.rs
vendored
Normal file
786
vendor/object/src/read/xcoff/symbol.rs
vendored
Normal file
@@ -0,0 +1,786 @@
|
||||
use alloc::fmt;
|
||||
use core::convert::TryInto;
|
||||
use core::fmt::Debug;
|
||||
use core::marker::PhantomData;
|
||||
use core::str;
|
||||
|
||||
use crate::endian::{BigEndian as BE, U32Bytes};
|
||||
use crate::pod::{bytes_of, Pod};
|
||||
use crate::read::util::StringTable;
|
||||
use crate::xcoff;
|
||||
|
||||
use crate::read::{
|
||||
self, Bytes, Error, ObjectSymbol, ObjectSymbolTable, ReadError, ReadRef, Result, SectionIndex,
|
||||
SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
use super::{FileHeader, XcoffFile};
|
||||
|
||||
/// A table of symbol entries in an XCOFF file.
|
||||
///
|
||||
/// Also includes the string table used for the symbol names.
|
||||
///
|
||||
/// Returned by [`FileHeader::symbols`].
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable<'data, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
symbols: &'data [xcoff::SymbolBytes],
|
||||
strings: StringTable<'data, R>,
|
||||
header: PhantomData<Xcoff>,
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> Default for SymbolTable<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
symbols: &[],
|
||||
strings: StringTable::default(),
|
||||
header: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, Xcoff, R> SymbolTable<'data, Xcoff, R>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
/// Parse the symbol table.
|
||||
pub fn parse(header: Xcoff, data: R) -> Result<Self> {
|
||||
let mut offset = header.f_symptr().into();
|
||||
let (symbols, strings) = if offset != 0 {
|
||||
let symbols = data
|
||||
.read_slice(&mut offset, header.f_nsyms() as usize)
|
||||
.read_error("Invalid XCOFF symbol table offset or size")?;
|
||||
|
||||
// Parse the string table.
|
||||
// Note: don't update data when reading length; the length includes itself.
|
||||
let length = data
|
||||
.read_at::<U32Bytes<_>>(offset)
|
||||
.read_error("Missing XCOFF string table")?
|
||||
.get(BE);
|
||||
let str_end = offset
|
||||
.checked_add(length as u64)
|
||||
.read_error("Invalid XCOFF string table length")?;
|
||||
let strings = StringTable::new(data, offset, str_end);
|
||||
|
||||
(symbols, strings)
|
||||
} else {
|
||||
(&[][..], StringTable::default())
|
||||
};
|
||||
|
||||
Ok(SymbolTable {
|
||||
symbols,
|
||||
strings,
|
||||
header: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the string table used for the symbol names.
|
||||
#[inline]
|
||||
pub fn strings(&self) -> StringTable<'data, R> {
|
||||
self.strings
|
||||
}
|
||||
|
||||
/// Iterate over the symbols.
|
||||
#[inline]
|
||||
pub fn iter<'table>(&'table self) -> SymbolIterator<'data, 'table, Xcoff, R> {
|
||||
SymbolIterator {
|
||||
symbols: self,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Empty symbol iterator.
|
||||
#[inline]
|
||||
pub(super) fn iter_none<'table>(&'table self) -> SymbolIterator<'data, 'table, Xcoff, R> {
|
||||
SymbolIterator {
|
||||
symbols: self,
|
||||
index: self.symbols.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the symbol entry at the given index and offset.
|
||||
pub fn get<T: Pod>(&self, index: usize, offset: usize) -> Result<&'data T> {
|
||||
let entry = index
|
||||
.checked_add(offset)
|
||||
.and_then(|x| self.symbols.get(x))
|
||||
.read_error("Invalid XCOFF symbol index")?;
|
||||
let bytes = bytes_of(entry);
|
||||
Bytes(bytes).read().read_error("Invalid XCOFF symbol data")
|
||||
}
|
||||
|
||||
/// Return the symbol at the given index.
|
||||
pub fn symbol(&self, index: usize) -> Result<&'data Xcoff::Symbol> {
|
||||
self.get::<Xcoff::Symbol>(index, 0)
|
||||
}
|
||||
|
||||
/// Return a file auxiliary symbol.
|
||||
pub fn aux_file(&self, index: usize, offset: usize) -> Result<&'data Xcoff::FileAux> {
|
||||
debug_assert!(self.symbol(index)?.has_aux_file());
|
||||
let aux_file = self.get::<Xcoff::FileAux>(index, offset)?;
|
||||
if let Some(aux_type) = aux_file.x_auxtype() {
|
||||
if aux_type != xcoff::AUX_FILE {
|
||||
return Err(Error("Invalid index for file auxiliary symbol."));
|
||||
}
|
||||
}
|
||||
Ok(aux_file)
|
||||
}
|
||||
|
||||
/// Return the csect auxiliary symbol.
|
||||
pub fn aux_csect(&self, index: usize, offset: usize) -> Result<&'data Xcoff::CsectAux> {
|
||||
debug_assert!(self.symbol(index)?.has_aux_csect());
|
||||
let aux_csect = self.get::<Xcoff::CsectAux>(index, offset)?;
|
||||
if let Some(aux_type) = aux_csect.x_auxtype() {
|
||||
if aux_type != xcoff::AUX_CSECT {
|
||||
return Err(Error("Invalid index/offset for csect auxiliary symbol."));
|
||||
}
|
||||
}
|
||||
Ok(aux_csect)
|
||||
}
|
||||
|
||||
/// Return true if the symbol table is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.symbols.is_empty()
|
||||
}
|
||||
|
||||
/// The number of symbol table entries.
|
||||
///
|
||||
/// This includes auxiliary symbol table entries.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.symbols.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for symbol entries in an XCOFF file.
|
||||
///
|
||||
/// Yields the index and symbol structure for each symbol.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolIterator<'data, 'table, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
symbols: &'table SymbolTable<'data, Xcoff, R>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl<'data, 'table, Xcoff: FileHeader, R: ReadRef<'data>> Iterator
|
||||
for SymbolIterator<'data, 'table, Xcoff, R>
|
||||
{
|
||||
type Item = (SymbolIndex, &'data Xcoff::Symbol);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let index = self.index;
|
||||
let symbol = self.symbols.symbol(index).ok()?;
|
||||
self.index += 1 + symbol.n_numaux() as usize;
|
||||
Some((SymbolIndex(index), symbol))
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol table in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSymbolTable32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolTable<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A symbol table in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSymbolTable64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolTable<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A symbol table in an [`XcoffFile`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct XcoffSymbolTable<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for XcoffSymbolTable<'data, 'file, Xcoff, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> ObjectSymbolTable<'data>
|
||||
for XcoffSymbolTable<'data, 'file, Xcoff, R>
|
||||
{
|
||||
type Symbol = XcoffSymbol<'data, 'file, Xcoff, R>;
|
||||
type SymbolIterator = XcoffSymbolIterator<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn symbols(&self) -> Self::SymbolIterator {
|
||||
XcoffSymbolIterator {
|
||||
file: self.file,
|
||||
symbols: self.symbols.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_by_index(&self, index: SymbolIndex) -> read::Result<Self::Symbol> {
|
||||
let symbol = self.symbols.symbol(index.0)?;
|
||||
Ok(XcoffSymbol {
|
||||
file: self.file,
|
||||
symbols: self.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator for the symbols in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSymbolIterator32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolIterator<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// An iterator for the symbols in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSymbolIterator64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbolIterator<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// An iterator for the symbols in an [`XcoffFile`].
|
||||
pub struct XcoffSymbolIterator<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) symbols: SymbolIterator<'data, 'file, Xcoff, R>,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> fmt::Debug
|
||||
for XcoffSymbolIterator<'data, 'file, Xcoff, R>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("XcoffSymbolIterator").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> Iterator
|
||||
for XcoffSymbolIterator<'data, 'file, Xcoff, R>
|
||||
{
|
||||
type Item = XcoffSymbol<'data, 'file, Xcoff, R>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (index, symbol) = self.symbols.next()?;
|
||||
Some(XcoffSymbol {
|
||||
file: self.file,
|
||||
symbols: self.symbols.symbols,
|
||||
index,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A symbol in an [`XcoffFile32`](super::XcoffFile32).
|
||||
pub type XcoffSymbol32<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbol<'data, 'file, xcoff::FileHeader32, R>;
|
||||
/// A symbol in an [`XcoffFile64`](super::XcoffFile64).
|
||||
pub type XcoffSymbol64<'data, 'file, R = &'data [u8]> =
|
||||
XcoffSymbol<'data, 'file, xcoff::FileHeader64, R>;
|
||||
|
||||
/// A symbol in an [`XcoffFile`].
|
||||
///
|
||||
/// Most functionality is provided by the [`ObjectSymbol`] trait implementation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct XcoffSymbol<'data, 'file, Xcoff, R = &'data [u8]>
|
||||
where
|
||||
Xcoff: FileHeader,
|
||||
R: ReadRef<'data>,
|
||||
{
|
||||
pub(super) file: &'file XcoffFile<'data, Xcoff, R>,
|
||||
pub(super) symbols: &'file SymbolTable<'data, Xcoff, R>,
|
||||
pub(super) index: SymbolIndex,
|
||||
pub(super) symbol: &'data Xcoff::Symbol,
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> read::private::Sealed
|
||||
for XcoffSymbol<'data, 'file, Xcoff, R>
|
||||
{
|
||||
}
|
||||
|
||||
impl<'data, 'file, Xcoff: FileHeader, R: ReadRef<'data>> ObjectSymbol<'data>
|
||||
for XcoffSymbol<'data, 'file, Xcoff, R>
|
||||
{
|
||||
#[inline]
|
||||
fn index(&self) -> SymbolIndex {
|
||||
self.index
|
||||
}
|
||||
|
||||
fn name_bytes(&self) -> Result<&'data [u8]> {
|
||||
if self.symbol.has_aux_file() {
|
||||
// By convention the file name is in the first auxiliary entry.
|
||||
self.symbols
|
||||
.aux_file(self.index.0, 1)?
|
||||
.fname(self.symbols.strings)
|
||||
} else {
|
||||
self.symbol.name(self.symbols.strings)
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> Result<&'data str> {
|
||||
let name = self.name_bytes()?;
|
||||
str::from_utf8(name)
|
||||
.ok()
|
||||
.read_error("Non UTF-8 XCOFF symbol name")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn address(&self) -> u64 {
|
||||
match self.symbol.n_sclass() {
|
||||
// Relocatable address.
|
||||
xcoff::C_EXT
|
||||
| xcoff::C_WEAKEXT
|
||||
| xcoff::C_HIDEXT
|
||||
| xcoff::C_FCN
|
||||
| xcoff::C_BLOCK
|
||||
| xcoff::C_STAT
|
||||
| xcoff::C_INFO => self.symbol.n_value().into(),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size(&self) -> u64 {
|
||||
if self.symbol.has_aux_csect() {
|
||||
// XCOFF32 must have the csect auxiliary entry as the last auxiliary entry.
|
||||
// XCOFF64 doesn't require this, but conventionally does.
|
||||
if let Ok(aux_csect) = self
|
||||
.file
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
let sym_type = aux_csect.sym_type();
|
||||
if sym_type == xcoff::XTY_SD || sym_type == xcoff::XTY_CM {
|
||||
return aux_csect.x_scnlen();
|
||||
}
|
||||
}
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
fn kind(&self) -> SymbolKind {
|
||||
if self.symbol.has_aux_csect() {
|
||||
if let Ok(aux_csect) = self
|
||||
.file
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
let sym_type = aux_csect.sym_type();
|
||||
if sym_type == xcoff::XTY_SD || sym_type == xcoff::XTY_CM {
|
||||
return match aux_csect.x_smclas() {
|
||||
xcoff::XMC_PR | xcoff::XMC_GL => SymbolKind::Text,
|
||||
xcoff::XMC_RO | xcoff::XMC_RW | xcoff::XMC_TD | xcoff::XMC_BS => {
|
||||
SymbolKind::Data
|
||||
}
|
||||
xcoff::XMC_TL | xcoff::XMC_UL => SymbolKind::Tls,
|
||||
xcoff::XMC_DS | xcoff::XMC_TC0 | xcoff::XMC_TC => {
|
||||
// `Metadata` might be a better kind for these if we had it.
|
||||
SymbolKind::Data
|
||||
}
|
||||
_ => SymbolKind::Unknown,
|
||||
};
|
||||
} else if sym_type == xcoff::XTY_LD {
|
||||
// A function entry point. Neither `Text` nor `Label` are a good fit for this.
|
||||
return SymbolKind::Text;
|
||||
} else if sym_type == xcoff::XTY_ER {
|
||||
return SymbolKind::Unknown;
|
||||
}
|
||||
}
|
||||
}
|
||||
match self.symbol.n_sclass() {
|
||||
xcoff::C_NULL => SymbolKind::Null,
|
||||
xcoff::C_FILE => SymbolKind::File,
|
||||
_ => SymbolKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn section(&self) -> SymbolSection {
|
||||
match self.symbol.n_scnum() {
|
||||
xcoff::N_ABS => SymbolSection::Absolute,
|
||||
xcoff::N_UNDEF => SymbolSection::Undefined,
|
||||
xcoff::N_DEBUG => SymbolSection::None,
|
||||
index if index > 0 => SymbolSection::Section(SectionIndex(index as usize)),
|
||||
_ => SymbolSection::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
self.symbol.is_undefined()
|
||||
}
|
||||
|
||||
/// Return true if the symbol is a definition of a function or data object.
|
||||
#[inline]
|
||||
fn is_definition(&self) -> bool {
|
||||
if self.symbol.n_scnum() <= 0 {
|
||||
return false;
|
||||
}
|
||||
if self.symbol.has_aux_csect() {
|
||||
if let Ok(aux_csect) = self
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
let sym_type = aux_csect.sym_type();
|
||||
sym_type == xcoff::XTY_SD || sym_type == xcoff::XTY_LD || sym_type == xcoff::XTY_CM
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_common(&self) -> bool {
|
||||
self.symbol.n_sclass() == xcoff::C_EXT && self.symbol.n_scnum() == xcoff::N_UNDEF
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_weak(&self) -> bool {
|
||||
self.symbol.n_sclass() == xcoff::C_WEAKEXT
|
||||
}
|
||||
|
||||
fn scope(&self) -> SymbolScope {
|
||||
if self.symbol.n_scnum() == xcoff::N_UNDEF {
|
||||
SymbolScope::Unknown
|
||||
} else {
|
||||
match self.symbol.n_sclass() {
|
||||
xcoff::C_EXT | xcoff::C_WEAKEXT => {
|
||||
let visibility = self.symbol.n_type() & xcoff::SYM_V_MASK;
|
||||
if visibility == xcoff::SYM_V_HIDDEN {
|
||||
SymbolScope::Linkage
|
||||
} else {
|
||||
SymbolScope::Dynamic
|
||||
}
|
||||
}
|
||||
_ => SymbolScope::Compilation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_global(&self) -> bool {
|
||||
match self.symbol.n_sclass() {
|
||||
xcoff::C_EXT | xcoff::C_WEAKEXT => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_local(&self) -> bool {
|
||||
!self.is_global()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flags(&self) -> SymbolFlags<SectionIndex, SymbolIndex> {
|
||||
let mut x_smtyp = 0;
|
||||
let mut x_smclas = 0;
|
||||
let mut containing_csect = None;
|
||||
if self.symbol.has_aux_csect() {
|
||||
if let Ok(aux_csect) = self
|
||||
.file
|
||||
.symbols
|
||||
.aux_csect(self.index.0, self.symbol.n_numaux() as usize)
|
||||
{
|
||||
x_smtyp = aux_csect.x_smtyp();
|
||||
x_smclas = aux_csect.x_smclas();
|
||||
if aux_csect.sym_type() == xcoff::XTY_LD {
|
||||
containing_csect = Some(SymbolIndex(aux_csect.x_scnlen() as usize))
|
||||
}
|
||||
}
|
||||
}
|
||||
SymbolFlags::Xcoff {
|
||||
n_sclass: self.symbol.n_sclass(),
|
||||
x_smtyp,
|
||||
x_smclas,
|
||||
containing_csect,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::Symbol32`] and [`xcoff::Symbol64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait Symbol: Debug + Pod {
|
||||
type Word: Into<u64>;
|
||||
|
||||
fn n_value(&self) -> Self::Word;
|
||||
fn n_scnum(&self) -> i16;
|
||||
fn n_type(&self) -> u16;
|
||||
fn n_sclass(&self) -> u8;
|
||||
fn n_numaux(&self) -> u8;
|
||||
|
||||
fn name_offset(&self) -> Option<u32>;
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]>;
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
#[inline]
|
||||
fn is_undefined(&self) -> bool {
|
||||
let n_sclass = self.n_sclass();
|
||||
(n_sclass == xcoff::C_EXT || n_sclass == xcoff::C_WEAKEXT)
|
||||
&& self.n_scnum() == xcoff::N_UNDEF
|
||||
}
|
||||
|
||||
/// Return true if the symbol has file auxiliary entry.
|
||||
fn has_aux_file(&self) -> bool {
|
||||
self.n_numaux() > 0 && self.n_sclass() == xcoff::C_FILE
|
||||
}
|
||||
|
||||
/// Return true if the symbol has csect auxiliary entry.
|
||||
///
|
||||
/// A csect auxiliary entry is required for each symbol table entry that has
|
||||
/// a storage class value of C_EXT, C_WEAKEXT, or C_HIDEXT.
|
||||
fn has_aux_csect(&self) -> bool {
|
||||
let sclass = self.n_sclass();
|
||||
self.n_numaux() > 0
|
||||
&& (sclass == xcoff::C_EXT || sclass == xcoff::C_WEAKEXT || sclass == xcoff::C_HIDEXT)
|
||||
}
|
||||
}
|
||||
|
||||
impl Symbol for xcoff::Symbol64 {
|
||||
type Word = u64;
|
||||
|
||||
fn n_value(&self) -> Self::Word {
|
||||
self.n_value.get(BE)
|
||||
}
|
||||
|
||||
fn n_scnum(&self) -> i16 {
|
||||
self.n_scnum.get(BE)
|
||||
}
|
||||
|
||||
fn n_type(&self) -> u16 {
|
||||
self.n_type.get(BE)
|
||||
}
|
||||
|
||||
fn n_sclass(&self) -> u8 {
|
||||
self.n_sclass
|
||||
}
|
||||
|
||||
fn n_numaux(&self) -> u8 {
|
||||
self.n_numaux
|
||||
}
|
||||
|
||||
fn name_offset(&self) -> Option<u32> {
|
||||
Some(self.n_offset.get(BE))
|
||||
}
|
||||
|
||||
/// Parse the symbol name for XCOFF64.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
strings
|
||||
.get(self.n_offset.get(BE))
|
||||
.read_error("Invalid XCOFF symbol name offset")
|
||||
}
|
||||
}
|
||||
|
||||
impl Symbol for xcoff::Symbol32 {
|
||||
type Word = u32;
|
||||
|
||||
fn n_value(&self) -> Self::Word {
|
||||
self.n_value.get(BE)
|
||||
}
|
||||
|
||||
fn n_scnum(&self) -> i16 {
|
||||
self.n_scnum.get(BE)
|
||||
}
|
||||
|
||||
fn n_type(&self) -> u16 {
|
||||
self.n_type.get(BE)
|
||||
}
|
||||
|
||||
fn n_sclass(&self) -> u8 {
|
||||
self.n_sclass
|
||||
}
|
||||
|
||||
fn n_numaux(&self) -> u8 {
|
||||
self.n_numaux
|
||||
}
|
||||
|
||||
fn name_offset(&self) -> Option<u32> {
|
||||
if self.n_name[0] == 0 {
|
||||
let offset = u32::from_be_bytes(self.n_name[4..8].try_into().unwrap());
|
||||
Some(offset)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the symbol name for XCOFF32.
|
||||
fn name<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
if let Some(offset) = self.name_offset() {
|
||||
// If the name starts with 0 then the last 4 bytes are a string table offset.
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid XCOFF symbol name offset")
|
||||
} else {
|
||||
// The name is inline and padded with nulls.
|
||||
Ok(match memchr::memchr(b'\0', &self.n_name) {
|
||||
Some(end) => &self.n_name[..end],
|
||||
None => &self.n_name,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::FileAux32`] and [`xcoff::FileAux64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait FileAux: Debug + Pod {
|
||||
fn x_fname(&self) -> &[u8; 8];
|
||||
fn x_ftype(&self) -> u8;
|
||||
fn x_auxtype(&self) -> Option<u8>;
|
||||
|
||||
fn name_offset(&self) -> Option<u32> {
|
||||
let x_fname = self.x_fname();
|
||||
if x_fname[0] == 0 {
|
||||
Some(u32::from_be_bytes(x_fname[4..8].try_into().unwrap()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the x_fname field, which may be an inline string or a string table offset.
|
||||
fn fname<'data, R: ReadRef<'data>>(
|
||||
&'data self,
|
||||
strings: StringTable<'data, R>,
|
||||
) -> Result<&'data [u8]> {
|
||||
if let Some(offset) = self.name_offset() {
|
||||
// If the name starts with 0 then the last 4 bytes are a string table offset.
|
||||
strings
|
||||
.get(offset)
|
||||
.read_error("Invalid XCOFF symbol name offset")
|
||||
} else {
|
||||
// The name is inline and padded with nulls.
|
||||
let x_fname = self.x_fname();
|
||||
Ok(match memchr::memchr(b'\0', x_fname) {
|
||||
Some(end) => &x_fname[..end],
|
||||
None => x_fname,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileAux for xcoff::FileAux64 {
|
||||
fn x_fname(&self) -> &[u8; 8] {
|
||||
&self.x_fname
|
||||
}
|
||||
|
||||
fn x_ftype(&self) -> u8 {
|
||||
self.x_ftype
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
Some(self.x_auxtype)
|
||||
}
|
||||
}
|
||||
|
||||
impl FileAux for xcoff::FileAux32 {
|
||||
fn x_fname(&self) -> &[u8; 8] {
|
||||
&self.x_fname
|
||||
}
|
||||
|
||||
fn x_ftype(&self) -> u8 {
|
||||
self.x_ftype
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for generic access to [`xcoff::CsectAux32`] and [`xcoff::CsectAux64`].
|
||||
#[allow(missing_docs)]
|
||||
pub trait CsectAux: Debug + Pod {
|
||||
fn x_scnlen(&self) -> u64;
|
||||
fn x_parmhash(&self) -> u32;
|
||||
fn x_snhash(&self) -> u16;
|
||||
fn x_smtyp(&self) -> u8;
|
||||
fn x_smclas(&self) -> u8;
|
||||
fn x_stab(&self) -> Option<u32>;
|
||||
fn x_snstab(&self) -> Option<u16>;
|
||||
fn x_auxtype(&self) -> Option<u8>;
|
||||
|
||||
fn alignment(&self) -> u8 {
|
||||
self.x_smtyp() >> 3
|
||||
}
|
||||
fn sym_type(&self) -> u8 {
|
||||
self.x_smtyp() & 0x07
|
||||
}
|
||||
}
|
||||
|
||||
impl CsectAux for xcoff::CsectAux64 {
|
||||
fn x_scnlen(&self) -> u64 {
|
||||
self.x_scnlen_lo.get(BE) as u64 | ((self.x_scnlen_hi.get(BE) as u64) << 32)
|
||||
}
|
||||
|
||||
fn x_parmhash(&self) -> u32 {
|
||||
self.x_parmhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_snhash(&self) -> u16 {
|
||||
self.x_snhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_smtyp(&self) -> u8 {
|
||||
self.x_smtyp
|
||||
}
|
||||
|
||||
fn x_smclas(&self) -> u8 {
|
||||
self.x_smclas
|
||||
}
|
||||
|
||||
fn x_stab(&self) -> Option<u32> {
|
||||
None
|
||||
}
|
||||
|
||||
fn x_snstab(&self) -> Option<u16> {
|
||||
None
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
Some(self.x_auxtype)
|
||||
}
|
||||
}
|
||||
|
||||
impl CsectAux for xcoff::CsectAux32 {
|
||||
fn x_scnlen(&self) -> u64 {
|
||||
self.x_scnlen.get(BE) as u64
|
||||
}
|
||||
|
||||
fn x_parmhash(&self) -> u32 {
|
||||
self.x_parmhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_snhash(&self) -> u16 {
|
||||
self.x_snhash.get(BE)
|
||||
}
|
||||
|
||||
fn x_smtyp(&self) -> u8 {
|
||||
self.x_smtyp
|
||||
}
|
||||
|
||||
fn x_smclas(&self) -> u8 {
|
||||
self.x_smclas
|
||||
}
|
||||
|
||||
fn x_stab(&self) -> Option<u32> {
|
||||
Some(self.x_stab.get(BE))
|
||||
}
|
||||
|
||||
fn x_snstab(&self) -> Option<u16> {
|
||||
Some(self.x_snstab.get(BE))
|
||||
}
|
||||
|
||||
fn x_auxtype(&self) -> Option<u8> {
|
||||
None
|
||||
}
|
||||
}
|
||||
10
vendor/object/src/write/coff/mod.rs
vendored
Normal file
10
vendor/object/src/write/coff/mod.rs
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
//! Support for writing COFF files.
|
||||
//!
|
||||
//! Provides [`Writer`] for low level writing of COFF files.
|
||||
//! This is also used to provide COFF support for [`write::Object`](crate::write::Object).
|
||||
|
||||
mod object;
|
||||
pub use self::object::*;
|
||||
|
||||
mod writer;
|
||||
pub use writer::*;
|
||||
583
vendor/object/src/write/coff/object.rs
vendored
Normal file
583
vendor/object/src/write/coff/object.rs
vendored
Normal file
@@ -0,0 +1,583 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::pe as coff;
|
||||
use crate::write::coff::writer;
|
||||
use crate::write::util::*;
|
||||
use crate::write::*;
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
struct SectionOffsets {
|
||||
name: writer::Name,
|
||||
offset: u32,
|
||||
reloc_offset: u32,
|
||||
selection: u8,
|
||||
associative_section: u32,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
struct SymbolOffsets {
|
||||
name: writer::Name,
|
||||
index: u32,
|
||||
aux_count: u8,
|
||||
}
|
||||
|
||||
/// Internal format to use for the `.drectve` section containing linker
|
||||
/// directives for symbol exports.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum CoffExportStyle {
|
||||
/// MSVC format supported by link.exe and LLD.
|
||||
Msvc,
|
||||
/// Gnu format supported by GNU LD and LLD.
|
||||
Gnu,
|
||||
}
|
||||
|
||||
impl<'a> Object<'a> {
|
||||
pub(crate) fn coff_section_info(
|
||||
&self,
|
||||
section: StandardSection,
|
||||
) -> (&'static [u8], &'static [u8], SectionKind, SectionFlags) {
|
||||
match section {
|
||||
StandardSection::Text => (&[], &b".text"[..], SectionKind::Text, SectionFlags::None),
|
||||
StandardSection::Data => (&[], &b".data"[..], SectionKind::Data, SectionFlags::None),
|
||||
StandardSection::ReadOnlyData
|
||||
| StandardSection::ReadOnlyDataWithRel
|
||||
| StandardSection::ReadOnlyString => (
|
||||
&[],
|
||||
&b".rdata"[..],
|
||||
SectionKind::ReadOnlyData,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::UninitializedData => (
|
||||
&[],
|
||||
&b".bss"[..],
|
||||
SectionKind::UninitializedData,
|
||||
SectionFlags::None,
|
||||
),
|
||||
// TLS sections are data sections with a special name.
|
||||
StandardSection::Tls => (&[], &b".tls$"[..], SectionKind::Data, SectionFlags::None),
|
||||
StandardSection::UninitializedTls => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::UninitializedTls, SectionFlags::None)
|
||||
}
|
||||
StandardSection::TlsVariables => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::TlsVariables, SectionFlags::None)
|
||||
}
|
||||
StandardSection::Common => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::Common, SectionFlags::None)
|
||||
}
|
||||
StandardSection::GnuProperty => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::Note, SectionFlags::None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn coff_subsection_name(&self, section: &[u8], value: &[u8]) -> Vec<u8> {
|
||||
let mut name = section.to_vec();
|
||||
name.push(b'$');
|
||||
name.extend_from_slice(value);
|
||||
name
|
||||
}
|
||||
|
||||
pub(crate) fn coff_fixup_relocation(&mut self, relocation: &mut Relocation) -> i64 {
|
||||
if relocation.kind == RelocationKind::GotRelative {
|
||||
// Use a stub symbol for the relocation instead.
|
||||
// This isn't really a GOT, but it's a similar purpose.
|
||||
// TODO: need to handle DLL imports differently?
|
||||
relocation.kind = RelocationKind::Relative;
|
||||
relocation.symbol = self.coff_add_stub_symbol(relocation.symbol);
|
||||
} else if relocation.kind == RelocationKind::PltRelative {
|
||||
// Windows doesn't need a separate relocation type for
|
||||
// references to functions in import libraries.
|
||||
// For convenience, treat this the same as Relative.
|
||||
relocation.kind = RelocationKind::Relative;
|
||||
}
|
||||
|
||||
let constant = match self.architecture {
|
||||
Architecture::I386 | Architecture::Arm | Architecture::Aarch64 => match relocation.kind
|
||||
{
|
||||
RelocationKind::Relative => {
|
||||
// IMAGE_REL_I386_REL32, IMAGE_REL_ARM_REL32, IMAGE_REL_ARM64_REL32
|
||||
relocation.addend + 4
|
||||
}
|
||||
_ => relocation.addend,
|
||||
},
|
||||
Architecture::X86_64 => match relocation.kind {
|
||||
RelocationKind::Relative => {
|
||||
// IMAGE_REL_AMD64_REL32 through to IMAGE_REL_AMD64_REL32_5
|
||||
if relocation.addend <= -4 && relocation.addend >= -9 {
|
||||
0
|
||||
} else {
|
||||
relocation.addend + 4
|
||||
}
|
||||
}
|
||||
_ => relocation.addend,
|
||||
},
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
relocation.addend -= constant;
|
||||
constant
|
||||
}
|
||||
|
||||
fn coff_add_stub_symbol(&mut self, symbol_id: SymbolId) -> SymbolId {
|
||||
if let Some(stub_id) = self.stub_symbols.get(&symbol_id) {
|
||||
return *stub_id;
|
||||
}
|
||||
let stub_size = self.architecture.address_size().unwrap().bytes();
|
||||
|
||||
let name = b".rdata$.refptr".to_vec();
|
||||
let section_id = self.add_section(Vec::new(), name, SectionKind::ReadOnlyData);
|
||||
let section = self.section_mut(section_id);
|
||||
section.set_data(vec![0; stub_size as usize], u64::from(stub_size));
|
||||
section.relocations = vec![Relocation {
|
||||
offset: 0,
|
||||
size: stub_size * 8,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol: symbol_id,
|
||||
addend: 0,
|
||||
}];
|
||||
|
||||
let mut name = b".refptr.".to_vec();
|
||||
name.extend_from_slice(&self.symbol(symbol_id).name);
|
||||
let stub_id = self.add_raw_symbol(Symbol {
|
||||
name,
|
||||
value: 0,
|
||||
size: u64::from(stub_size),
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Compilation,
|
||||
weak: false,
|
||||
section: SymbolSection::Section(section_id),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
self.stub_symbols.insert(symbol_id, stub_id);
|
||||
|
||||
stub_id
|
||||
}
|
||||
|
||||
/// Appends linker directives to the `.drectve` section to tell the linker
|
||||
/// to export all symbols with `SymbolScope::Dynamic`.
|
||||
///
|
||||
/// This must be called after all symbols have been defined.
|
||||
pub fn add_coff_exports(&mut self, style: CoffExportStyle) {
|
||||
assert_eq!(self.format, BinaryFormat::Coff);
|
||||
|
||||
let mut directives = vec![];
|
||||
for symbol in &self.symbols {
|
||||
if symbol.scope == SymbolScope::Dynamic {
|
||||
match style {
|
||||
CoffExportStyle::Msvc => directives.extend(b" /EXPORT:\""),
|
||||
CoffExportStyle::Gnu => directives.extend(b" -export:\""),
|
||||
}
|
||||
directives.extend(&symbol.name);
|
||||
directives.extend(b"\"");
|
||||
if symbol.kind != SymbolKind::Text {
|
||||
match style {
|
||||
CoffExportStyle::Msvc => directives.extend(b",DATA"),
|
||||
CoffExportStyle::Gnu => directives.extend(b",data"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let drectve = self.add_section(vec![], b".drectve".to_vec(), SectionKind::Linker);
|
||||
self.append_section_data(drectve, &directives, 1);
|
||||
}
|
||||
|
||||
pub(crate) fn coff_write(&self, buffer: &mut dyn WritableBuffer) -> Result<()> {
|
||||
let mut writer = writer::Writer::new(buffer);
|
||||
|
||||
// Add section strings to strtab.
|
||||
let mut section_offsets = vec![SectionOffsets::default(); self.sections.len()];
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
section_offsets[index].name = writer.add_name(§ion.name);
|
||||
}
|
||||
|
||||
// Set COMDAT flags.
|
||||
for comdat in &self.comdats {
|
||||
let symbol = &self.symbols[comdat.symbol.0];
|
||||
let comdat_section = match symbol.section {
|
||||
SymbolSection::Section(id) => id.0,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unsupported COMDAT symbol `{}` section {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
symbol.section
|
||||
)));
|
||||
}
|
||||
};
|
||||
section_offsets[comdat_section].selection = match comdat.kind {
|
||||
ComdatKind::NoDuplicates => coff::IMAGE_COMDAT_SELECT_NODUPLICATES,
|
||||
ComdatKind::Any => coff::IMAGE_COMDAT_SELECT_ANY,
|
||||
ComdatKind::SameSize => coff::IMAGE_COMDAT_SELECT_SAME_SIZE,
|
||||
ComdatKind::ExactMatch => coff::IMAGE_COMDAT_SELECT_EXACT_MATCH,
|
||||
ComdatKind::Largest => coff::IMAGE_COMDAT_SELECT_LARGEST,
|
||||
ComdatKind::Newest => coff::IMAGE_COMDAT_SELECT_NEWEST,
|
||||
ComdatKind::Unknown => {
|
||||
return Err(Error(format!(
|
||||
"unsupported COMDAT symbol `{}` kind {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
comdat.kind
|
||||
)));
|
||||
}
|
||||
};
|
||||
for id in &comdat.sections {
|
||||
let section = &self.sections[id.0];
|
||||
if section.symbol.is_none() {
|
||||
return Err(Error(format!(
|
||||
"missing symbol for COMDAT section `{}`",
|
||||
section.name().unwrap_or(""),
|
||||
)));
|
||||
}
|
||||
if id.0 != comdat_section {
|
||||
section_offsets[id.0].selection = coff::IMAGE_COMDAT_SELECT_ASSOCIATIVE;
|
||||
section_offsets[id.0].associative_section = comdat_section as u32 + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reserve symbol indices and add symbol strings to strtab.
|
||||
let mut symbol_offsets = vec![SymbolOffsets::default(); self.symbols.len()];
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
symbol_offsets[index].index = writer.reserve_symbol_index();
|
||||
let mut name = &*symbol.name;
|
||||
match symbol.kind {
|
||||
SymbolKind::File => {
|
||||
// Name goes in auxiliary symbol records.
|
||||
symbol_offsets[index].aux_count = writer.reserve_aux_file_name(&symbol.name);
|
||||
name = b".file";
|
||||
}
|
||||
SymbolKind::Section if symbol.section.id().is_some() => {
|
||||
symbol_offsets[index].aux_count = writer.reserve_aux_section();
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
symbol_offsets[index].name = writer.add_name(name);
|
||||
}
|
||||
|
||||
// Reserve file ranges.
|
||||
writer.reserve_file_header();
|
||||
writer.reserve_section_headers(self.sections.len() as u16);
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
section_offsets[index].offset = writer.reserve_section(section.data.len());
|
||||
section_offsets[index].reloc_offset =
|
||||
writer.reserve_relocations(section.relocations.len());
|
||||
}
|
||||
writer.reserve_symtab_strtab();
|
||||
|
||||
// Start writing.
|
||||
writer.write_file_header(writer::FileHeader {
|
||||
machine: match (self.architecture, self.sub_architecture) {
|
||||
(Architecture::Arm, None) => coff::IMAGE_FILE_MACHINE_ARMNT,
|
||||
(Architecture::Aarch64, None) => coff::IMAGE_FILE_MACHINE_ARM64,
|
||||
(Architecture::Aarch64, Some(SubArchitecture::Arm64EC)) => {
|
||||
coff::IMAGE_FILE_MACHINE_ARM64EC
|
||||
}
|
||||
(Architecture::I386, None) => coff::IMAGE_FILE_MACHINE_I386,
|
||||
(Architecture::X86_64, None) => coff::IMAGE_FILE_MACHINE_AMD64,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented architecture {:?} with sub-architecture {:?}",
|
||||
self.architecture, self.sub_architecture
|
||||
)));
|
||||
}
|
||||
},
|
||||
time_date_stamp: 0,
|
||||
characteristics: match self.flags {
|
||||
FileFlags::Coff { characteristics } => characteristics,
|
||||
_ => 0,
|
||||
},
|
||||
})?;
|
||||
|
||||
// Write section headers.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let mut characteristics = if let SectionFlags::Coff {
|
||||
characteristics, ..
|
||||
} = section.flags
|
||||
{
|
||||
characteristics
|
||||
} else {
|
||||
match section.kind {
|
||||
SectionKind::Text => {
|
||||
coff::IMAGE_SCN_CNT_CODE
|
||||
| coff::IMAGE_SCN_MEM_EXECUTE
|
||||
| coff::IMAGE_SCN_MEM_READ
|
||||
}
|
||||
SectionKind::Data => {
|
||||
coff::IMAGE_SCN_CNT_INITIALIZED_DATA
|
||||
| coff::IMAGE_SCN_MEM_READ
|
||||
| coff::IMAGE_SCN_MEM_WRITE
|
||||
}
|
||||
SectionKind::UninitializedData => {
|
||||
coff::IMAGE_SCN_CNT_UNINITIALIZED_DATA
|
||||
| coff::IMAGE_SCN_MEM_READ
|
||||
| coff::IMAGE_SCN_MEM_WRITE
|
||||
}
|
||||
SectionKind::ReadOnlyData
|
||||
| SectionKind::ReadOnlyDataWithRel
|
||||
| SectionKind::ReadOnlyString => {
|
||||
coff::IMAGE_SCN_CNT_INITIALIZED_DATA | coff::IMAGE_SCN_MEM_READ
|
||||
}
|
||||
SectionKind::Debug | SectionKind::Other | SectionKind::OtherString => {
|
||||
coff::IMAGE_SCN_CNT_INITIALIZED_DATA
|
||||
| coff::IMAGE_SCN_MEM_READ
|
||||
| coff::IMAGE_SCN_MEM_DISCARDABLE
|
||||
}
|
||||
SectionKind::Linker => coff::IMAGE_SCN_LNK_INFO | coff::IMAGE_SCN_LNK_REMOVE,
|
||||
SectionKind::Common
|
||||
| SectionKind::Tls
|
||||
| SectionKind::UninitializedTls
|
||||
| SectionKind::TlsVariables
|
||||
| SectionKind::Note
|
||||
| SectionKind::Unknown
|
||||
| SectionKind::Metadata
|
||||
| SectionKind::Elf(_) => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented section `{}` kind {:?}",
|
||||
section.name().unwrap_or(""),
|
||||
section.kind
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
if section_offsets[index].selection != 0 {
|
||||
characteristics |= coff::IMAGE_SCN_LNK_COMDAT;
|
||||
};
|
||||
if section.relocations.len() > 0xffff {
|
||||
characteristics |= coff::IMAGE_SCN_LNK_NRELOC_OVFL;
|
||||
}
|
||||
characteristics |= match section.align {
|
||||
1 => coff::IMAGE_SCN_ALIGN_1BYTES,
|
||||
2 => coff::IMAGE_SCN_ALIGN_2BYTES,
|
||||
4 => coff::IMAGE_SCN_ALIGN_4BYTES,
|
||||
8 => coff::IMAGE_SCN_ALIGN_8BYTES,
|
||||
16 => coff::IMAGE_SCN_ALIGN_16BYTES,
|
||||
32 => coff::IMAGE_SCN_ALIGN_32BYTES,
|
||||
64 => coff::IMAGE_SCN_ALIGN_64BYTES,
|
||||
128 => coff::IMAGE_SCN_ALIGN_128BYTES,
|
||||
256 => coff::IMAGE_SCN_ALIGN_256BYTES,
|
||||
512 => coff::IMAGE_SCN_ALIGN_512BYTES,
|
||||
1024 => coff::IMAGE_SCN_ALIGN_1024BYTES,
|
||||
2048 => coff::IMAGE_SCN_ALIGN_2048BYTES,
|
||||
4096 => coff::IMAGE_SCN_ALIGN_4096BYTES,
|
||||
8192 => coff::IMAGE_SCN_ALIGN_8192BYTES,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented section `{}` align {}",
|
||||
section.name().unwrap_or(""),
|
||||
section.align
|
||||
)));
|
||||
}
|
||||
};
|
||||
writer.write_section_header(writer::SectionHeader {
|
||||
name: section_offsets[index].name,
|
||||
size_of_raw_data: section.size as u32,
|
||||
pointer_to_raw_data: section_offsets[index].offset,
|
||||
pointer_to_relocations: section_offsets[index].reloc_offset,
|
||||
pointer_to_linenumbers: 0,
|
||||
number_of_relocations: section.relocations.len() as u32,
|
||||
number_of_linenumbers: 0,
|
||||
characteristics,
|
||||
});
|
||||
}
|
||||
|
||||
// Write section data and relocations.
|
||||
for section in &self.sections {
|
||||
writer.write_section(§ion.data);
|
||||
|
||||
if !section.relocations.is_empty() {
|
||||
//debug_assert_eq!(section_offsets[index].reloc_offset, buffer.len());
|
||||
writer.write_relocations_count(section.relocations.len());
|
||||
for reloc in §ion.relocations {
|
||||
//assert!(reloc.implicit_addend);
|
||||
let typ = match self.architecture {
|
||||
Architecture::I386 => match (reloc.kind, reloc.size, reloc.addend) {
|
||||
(RelocationKind::Absolute, 16, 0) => coff::IMAGE_REL_I386_DIR16,
|
||||
(RelocationKind::Relative, 16, 0) => coff::IMAGE_REL_I386_REL16,
|
||||
(RelocationKind::Absolute, 32, 0) => coff::IMAGE_REL_I386_DIR32,
|
||||
(RelocationKind::ImageOffset, 32, 0) => coff::IMAGE_REL_I386_DIR32NB,
|
||||
(RelocationKind::SectionIndex, 16, 0) => coff::IMAGE_REL_I386_SECTION,
|
||||
(RelocationKind::SectionOffset, 32, 0) => coff::IMAGE_REL_I386_SECREL,
|
||||
(RelocationKind::SectionOffset, 7, 0) => coff::IMAGE_REL_I386_SECREL7,
|
||||
(RelocationKind::Relative, 32, -4) => coff::IMAGE_REL_I386_REL32,
|
||||
(RelocationKind::Coff(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::X86_64 => match (reloc.kind, reloc.size, reloc.addend) {
|
||||
(RelocationKind::Absolute, 64, 0) => coff::IMAGE_REL_AMD64_ADDR64,
|
||||
(RelocationKind::Absolute, 32, 0) => coff::IMAGE_REL_AMD64_ADDR32,
|
||||
(RelocationKind::ImageOffset, 32, 0) => coff::IMAGE_REL_AMD64_ADDR32NB,
|
||||
(RelocationKind::Relative, 32, -4) => coff::IMAGE_REL_AMD64_REL32,
|
||||
(RelocationKind::Relative, 32, -5) => coff::IMAGE_REL_AMD64_REL32_1,
|
||||
(RelocationKind::Relative, 32, -6) => coff::IMAGE_REL_AMD64_REL32_2,
|
||||
(RelocationKind::Relative, 32, -7) => coff::IMAGE_REL_AMD64_REL32_3,
|
||||
(RelocationKind::Relative, 32, -8) => coff::IMAGE_REL_AMD64_REL32_4,
|
||||
(RelocationKind::Relative, 32, -9) => coff::IMAGE_REL_AMD64_REL32_5,
|
||||
(RelocationKind::SectionIndex, 16, 0) => coff::IMAGE_REL_AMD64_SECTION,
|
||||
(RelocationKind::SectionOffset, 32, 0) => coff::IMAGE_REL_AMD64_SECREL,
|
||||
(RelocationKind::SectionOffset, 7, 0) => coff::IMAGE_REL_AMD64_SECREL7,
|
||||
(RelocationKind::Coff(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Arm => match (reloc.kind, reloc.size, reloc.addend) {
|
||||
(RelocationKind::Absolute, 32, 0) => coff::IMAGE_REL_ARM_ADDR32,
|
||||
(RelocationKind::ImageOffset, 32, 0) => coff::IMAGE_REL_ARM_ADDR32NB,
|
||||
(RelocationKind::Relative, 32, -4) => coff::IMAGE_REL_ARM_REL32,
|
||||
(RelocationKind::SectionIndex, 16, 0) => coff::IMAGE_REL_ARM_SECTION,
|
||||
(RelocationKind::SectionOffset, 32, 0) => coff::IMAGE_REL_ARM_SECREL,
|
||||
(RelocationKind::Coff(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Aarch64 => match (reloc.kind, reloc.size, reloc.addend) {
|
||||
(RelocationKind::Absolute, 32, 0) => coff::IMAGE_REL_ARM64_ADDR32,
|
||||
(RelocationKind::ImageOffset, 32, 0) => coff::IMAGE_REL_ARM64_ADDR32NB,
|
||||
(RelocationKind::SectionIndex, 16, 0) => coff::IMAGE_REL_ARM64_SECTION,
|
||||
(RelocationKind::SectionOffset, 32, 0) => coff::IMAGE_REL_ARM64_SECREL,
|
||||
(RelocationKind::Absolute, 64, 0) => coff::IMAGE_REL_ARM64_ADDR64,
|
||||
(RelocationKind::Relative, 32, -4) => coff::IMAGE_REL_ARM64_REL32,
|
||||
(RelocationKind::Coff(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented architecture {:?}",
|
||||
self.architecture
|
||||
)));
|
||||
}
|
||||
};
|
||||
writer.write_relocation(writer::Relocation {
|
||||
virtual_address: reloc.offset as u32,
|
||||
symbol: symbol_offsets[reloc.symbol.0].index,
|
||||
typ,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write symbols.
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
let section_number = match symbol.section {
|
||||
SymbolSection::None => {
|
||||
debug_assert_eq!(symbol.kind, SymbolKind::File);
|
||||
coff::IMAGE_SYM_DEBUG as u16
|
||||
}
|
||||
SymbolSection::Undefined => coff::IMAGE_SYM_UNDEFINED as u16,
|
||||
SymbolSection::Absolute => coff::IMAGE_SYM_ABSOLUTE as u16,
|
||||
SymbolSection::Common => coff::IMAGE_SYM_UNDEFINED as u16,
|
||||
SymbolSection::Section(id) => id.0 as u16 + 1,
|
||||
};
|
||||
let typ = if symbol.kind == SymbolKind::Text {
|
||||
coff::IMAGE_SYM_DTYPE_FUNCTION << coff::IMAGE_SYM_DTYPE_SHIFT
|
||||
} else {
|
||||
coff::IMAGE_SYM_TYPE_NULL
|
||||
};
|
||||
let storage_class = match symbol.kind {
|
||||
SymbolKind::File => coff::IMAGE_SYM_CLASS_FILE,
|
||||
SymbolKind::Section => {
|
||||
if symbol.section.id().is_some() {
|
||||
coff::IMAGE_SYM_CLASS_STATIC
|
||||
} else {
|
||||
coff::IMAGE_SYM_CLASS_SECTION
|
||||
}
|
||||
}
|
||||
SymbolKind::Label => coff::IMAGE_SYM_CLASS_LABEL,
|
||||
SymbolKind::Text | SymbolKind::Data | SymbolKind::Tls => {
|
||||
match symbol.section {
|
||||
SymbolSection::None => {
|
||||
return Err(Error(format!(
|
||||
"missing section for symbol `{}`",
|
||||
symbol.name().unwrap_or("")
|
||||
)));
|
||||
}
|
||||
SymbolSection::Undefined | SymbolSection::Common => {
|
||||
coff::IMAGE_SYM_CLASS_EXTERNAL
|
||||
}
|
||||
SymbolSection::Absolute | SymbolSection::Section(_) => {
|
||||
match symbol.scope {
|
||||
// TODO: does this need aux symbol records too?
|
||||
_ if symbol.weak => coff::IMAGE_SYM_CLASS_WEAK_EXTERNAL,
|
||||
SymbolScope::Unknown => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented symbol `{}` scope {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
symbol.scope
|
||||
)));
|
||||
}
|
||||
SymbolScope::Compilation => coff::IMAGE_SYM_CLASS_STATIC,
|
||||
SymbolScope::Linkage | SymbolScope::Dynamic => {
|
||||
coff::IMAGE_SYM_CLASS_EXTERNAL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
SymbolKind::Unknown | SymbolKind::Null => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented symbol `{}` kind {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
symbol.kind
|
||||
)));
|
||||
}
|
||||
};
|
||||
let number_of_aux_symbols = symbol_offsets[index].aux_count;
|
||||
let value = if symbol.section == SymbolSection::Common {
|
||||
symbol.size as u32
|
||||
} else {
|
||||
symbol.value as u32
|
||||
};
|
||||
writer.write_symbol(writer::Symbol {
|
||||
name: symbol_offsets[index].name,
|
||||
value,
|
||||
section_number,
|
||||
typ,
|
||||
storage_class,
|
||||
number_of_aux_symbols,
|
||||
});
|
||||
|
||||
// Write auxiliary symbols.
|
||||
match symbol.kind {
|
||||
SymbolKind::File => {
|
||||
writer.write_aux_file_name(&symbol.name, number_of_aux_symbols);
|
||||
}
|
||||
SymbolKind::Section if symbol.section.id().is_some() => {
|
||||
debug_assert_eq!(number_of_aux_symbols, 1);
|
||||
let section_index = symbol.section.id().unwrap().0;
|
||||
let section = &self.sections[section_index];
|
||||
writer.write_aux_section(writer::AuxSymbolSection {
|
||||
length: section.size as u32,
|
||||
number_of_relocations: section.relocations.len() as u32,
|
||||
number_of_linenumbers: 0,
|
||||
check_sum: checksum(section.data()),
|
||||
number: section_offsets[section_index].associative_section,
|
||||
selection: section_offsets[section_index].selection,
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
debug_assert_eq!(number_of_aux_symbols, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writer.write_strtab();
|
||||
|
||||
debug_assert_eq!(writer.reserved_len(), writer.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// JamCRC
|
||||
fn checksum(data: &[u8]) -> u32 {
|
||||
let mut hasher = crc32fast::Hasher::new_with_initial(0xffff_ffff);
|
||||
hasher.update(data);
|
||||
!hasher.finalize()
|
||||
}
|
||||
518
vendor/object/src/write/coff/writer.rs
vendored
Normal file
518
vendor/object/src/write/coff/writer.rs
vendored
Normal file
@@ -0,0 +1,518 @@
|
||||
//! Helper for writing COFF files.
|
||||
use alloc::string::String;
|
||||
use alloc::vec::Vec;
|
||||
use core::mem;
|
||||
|
||||
use crate::endian::{LittleEndian as LE, U16Bytes, U32Bytes, U16, U32};
|
||||
use crate::pe;
|
||||
use crate::write::string::{StringId, StringTable};
|
||||
use crate::write::util;
|
||||
use crate::write::{Error, Result, WritableBuffer};
|
||||
|
||||
/// A helper for writing COFF files.
|
||||
///
|
||||
/// Writing uses a two phase approach. The first phase builds up all of the information
|
||||
/// that may need to be known ahead of time:
|
||||
/// - build string table
|
||||
/// - reserve section indices
|
||||
/// - reserve symbol indices
|
||||
/// - reserve file ranges for headers and sections
|
||||
///
|
||||
/// Some of the information has ordering requirements. For example, strings must be added
|
||||
/// to the string table before reserving the file range for the string table. There are debug
|
||||
/// asserts to check some of these requirements.
|
||||
///
|
||||
/// The second phase writes everything out in order. Thus the caller must ensure writing
|
||||
/// is in the same order that file ranges were reserved. There are debug asserts to assist
|
||||
/// with checking this.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub struct Writer<'a> {
|
||||
buffer: &'a mut dyn WritableBuffer,
|
||||
len: usize,
|
||||
|
||||
section_num: u16,
|
||||
|
||||
symtab_offset: u32,
|
||||
symtab_num: u32,
|
||||
|
||||
strtab: StringTable<'a>,
|
||||
strtab_len: usize,
|
||||
strtab_offset: u32,
|
||||
strtab_data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl<'a> Writer<'a> {
|
||||
/// Create a new `Writer`.
|
||||
pub fn new(buffer: &'a mut dyn WritableBuffer) -> Self {
|
||||
Writer {
|
||||
buffer,
|
||||
len: 0,
|
||||
|
||||
section_num: 0,
|
||||
|
||||
symtab_offset: 0,
|
||||
symtab_num: 0,
|
||||
|
||||
strtab: StringTable::default(),
|
||||
strtab_len: 0,
|
||||
strtab_offset: 0,
|
||||
strtab_data: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the current file length that has been reserved.
|
||||
pub fn reserved_len(&self) -> usize {
|
||||
self.len
|
||||
}
|
||||
|
||||
/// Return the current file length that has been written.
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.buffer.len()
|
||||
}
|
||||
|
||||
/// Reserve a file range with the given size and starting alignment.
|
||||
///
|
||||
/// Returns the aligned offset of the start of the range.
|
||||
pub fn reserve(&mut self, len: usize, align_start: usize) -> u32 {
|
||||
if align_start > 1 {
|
||||
self.len = util::align(self.len, align_start);
|
||||
}
|
||||
let offset = self.len;
|
||||
self.len += len;
|
||||
offset as u32
|
||||
}
|
||||
|
||||
/// Write alignment padding bytes.
|
||||
pub fn write_align(&mut self, align_start: usize) {
|
||||
if align_start > 1 {
|
||||
util::write_align(self.buffer, align_start);
|
||||
}
|
||||
}
|
||||
|
||||
/// Write data.
|
||||
pub fn write(&mut self, data: &[u8]) {
|
||||
self.buffer.write_bytes(data);
|
||||
}
|
||||
|
||||
/// Reserve the file range up to the given file offset.
|
||||
pub fn reserve_until(&mut self, offset: usize) {
|
||||
debug_assert!(self.len <= offset);
|
||||
self.len = offset;
|
||||
}
|
||||
|
||||
/// Write padding up to the given file offset.
|
||||
pub fn pad_until(&mut self, offset: usize) {
|
||||
debug_assert!(self.buffer.len() <= offset);
|
||||
self.buffer.resize(offset);
|
||||
}
|
||||
|
||||
/// Reserve the range for the file header.
|
||||
///
|
||||
/// This must be at the start of the file.
|
||||
pub fn reserve_file_header(&mut self) {
|
||||
debug_assert_eq!(self.len, 0);
|
||||
self.reserve(mem::size_of::<pe::ImageFileHeader>(), 1);
|
||||
}
|
||||
|
||||
/// Write the file header.
|
||||
///
|
||||
/// This must be at the start of the file.
|
||||
///
|
||||
/// Fields that can be derived from known information are automatically set by this function.
|
||||
pub fn write_file_header(&mut self, header: FileHeader) -> Result<()> {
|
||||
debug_assert_eq!(self.buffer.len(), 0);
|
||||
|
||||
// Start writing.
|
||||
self.buffer
|
||||
.reserve(self.len)
|
||||
.map_err(|_| Error(String::from("Cannot allocate buffer")))?;
|
||||
|
||||
// Write file header.
|
||||
let header = pe::ImageFileHeader {
|
||||
machine: U16::new(LE, header.machine),
|
||||
number_of_sections: U16::new(LE, self.section_num),
|
||||
time_date_stamp: U32::new(LE, header.time_date_stamp),
|
||||
pointer_to_symbol_table: U32::new(LE, self.symtab_offset),
|
||||
number_of_symbols: U32::new(LE, self.symtab_num),
|
||||
size_of_optional_header: U16::default(),
|
||||
characteristics: U16::new(LE, header.characteristics),
|
||||
};
|
||||
self.buffer.write(&header);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reserve the range for the section headers.
|
||||
pub fn reserve_section_headers(&mut self, section_num: u16) {
|
||||
debug_assert_eq!(self.section_num, 0);
|
||||
self.section_num = section_num;
|
||||
self.reserve(
|
||||
section_num as usize * mem::size_of::<pe::ImageSectionHeader>(),
|
||||
1,
|
||||
);
|
||||
}
|
||||
|
||||
/// Write a section header.
|
||||
pub fn write_section_header(&mut self, section: SectionHeader) {
|
||||
let mut coff_section = pe::ImageSectionHeader {
|
||||
name: [0; 8],
|
||||
virtual_size: U32::default(),
|
||||
virtual_address: U32::default(),
|
||||
size_of_raw_data: U32::new(LE, section.size_of_raw_data),
|
||||
pointer_to_raw_data: U32::new(LE, section.pointer_to_raw_data),
|
||||
pointer_to_relocations: U32::new(LE, section.pointer_to_relocations),
|
||||
pointer_to_linenumbers: U32::new(LE, section.pointer_to_linenumbers),
|
||||
number_of_relocations: if section.number_of_relocations > 0xffff {
|
||||
U16::new(LE, 0xffff)
|
||||
} else {
|
||||
U16::new(LE, section.number_of_relocations as u16)
|
||||
},
|
||||
number_of_linenumbers: U16::default(),
|
||||
characteristics: U32::new(LE, section.characteristics),
|
||||
};
|
||||
match section.name {
|
||||
Name::Short(name) => coff_section.name = name,
|
||||
Name::Long(str_id) => {
|
||||
let mut str_offset = self.strtab.get_offset(str_id);
|
||||
if str_offset <= 9_999_999 {
|
||||
let mut name = [0; 7];
|
||||
let mut len = 0;
|
||||
if str_offset == 0 {
|
||||
name[6] = b'0';
|
||||
len = 1;
|
||||
} else {
|
||||
while str_offset != 0 {
|
||||
let rem = (str_offset % 10) as u8;
|
||||
str_offset /= 10;
|
||||
name[6 - len] = b'0' + rem;
|
||||
len += 1;
|
||||
}
|
||||
}
|
||||
coff_section.name = [0; 8];
|
||||
coff_section.name[0] = b'/';
|
||||
coff_section.name[1..][..len].copy_from_slice(&name[7 - len..]);
|
||||
} else {
|
||||
debug_assert!(str_offset as u64 <= 0xf_ffff_ffff);
|
||||
coff_section.name[0] = b'/';
|
||||
coff_section.name[1] = b'/';
|
||||
for i in 0..6 {
|
||||
let rem = (str_offset % 64) as u8;
|
||||
str_offset /= 64;
|
||||
let c = match rem {
|
||||
0..=25 => b'A' + rem,
|
||||
26..=51 => b'a' + rem - 26,
|
||||
52..=61 => b'0' + rem - 52,
|
||||
62 => b'+',
|
||||
63 => b'/',
|
||||
_ => unreachable!(),
|
||||
};
|
||||
coff_section.name[7 - i] = c;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.buffer.write(&coff_section);
|
||||
}
|
||||
|
||||
/// Reserve the range for the section data.
|
||||
///
|
||||
/// Returns the aligned offset of the start of the range.
|
||||
/// Does nothing and returns 0 if the length is zero.
|
||||
pub fn reserve_section(&mut self, len: usize) -> u32 {
|
||||
if len == 0 {
|
||||
return 0;
|
||||
}
|
||||
// TODO: not sure what alignment is required here, but this seems to match LLVM
|
||||
self.reserve(len, 4)
|
||||
}
|
||||
|
||||
/// Write the alignment bytes prior to section data.
|
||||
///
|
||||
/// This is unneeded if you are using `write_section` or `write_section_zeroes`
|
||||
/// for the data.
|
||||
pub fn write_section_align(&mut self) {
|
||||
util::write_align(self.buffer, 4);
|
||||
}
|
||||
|
||||
/// Write the section data.
|
||||
///
|
||||
/// Writes alignment bytes prior to the data.
|
||||
/// Does nothing if the data is empty.
|
||||
pub fn write_section(&mut self, data: &[u8]) {
|
||||
if data.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.write_section_align();
|
||||
self.buffer.write_bytes(data);
|
||||
}
|
||||
|
||||
/// Write the section data using zero bytes.
|
||||
///
|
||||
/// Writes alignment bytes prior to the data.
|
||||
/// Does nothing if the length is zero.
|
||||
pub fn write_section_zeroes(&mut self, len: usize) {
|
||||
if len == 0 {
|
||||
return;
|
||||
}
|
||||
self.write_section_align();
|
||||
self.buffer.resize(self.buffer.len() + len);
|
||||
}
|
||||
|
||||
/// Reserve a file range for the given number of relocations.
|
||||
///
|
||||
/// This will automatically reserve an extra relocation if there are more than 0xffff.
|
||||
///
|
||||
/// Returns the offset of the range.
|
||||
/// Does nothing and returns 0 if the count is zero.
|
||||
pub fn reserve_relocations(&mut self, mut count: usize) -> u32 {
|
||||
if count == 0 {
|
||||
return 0;
|
||||
}
|
||||
if count > 0xffff {
|
||||
count += 1;
|
||||
}
|
||||
self.reserve(count * mem::size_of::<pe::ImageRelocation>(), 1)
|
||||
}
|
||||
|
||||
/// Write a relocation containing the count if required.
|
||||
///
|
||||
/// This should be called before writing the first relocation for a section.
|
||||
pub fn write_relocations_count(&mut self, count: usize) {
|
||||
if count > 0xffff {
|
||||
let coff_relocation = pe::ImageRelocation {
|
||||
virtual_address: U32Bytes::new(LE, count as u32 + 1),
|
||||
symbol_table_index: U32Bytes::new(LE, 0),
|
||||
typ: U16Bytes::new(LE, 0),
|
||||
};
|
||||
self.buffer.write(&coff_relocation);
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a relocation.
|
||||
pub fn write_relocation(&mut self, reloc: Relocation) {
|
||||
let coff_relocation = pe::ImageRelocation {
|
||||
virtual_address: U32Bytes::new(LE, reloc.virtual_address),
|
||||
symbol_table_index: U32Bytes::new(LE, reloc.symbol),
|
||||
typ: U16Bytes::new(LE, reloc.typ),
|
||||
};
|
||||
self.buffer.write(&coff_relocation);
|
||||
}
|
||||
|
||||
/// Reserve a symbol table entry.
|
||||
///
|
||||
/// This must be called before [`Self::reserve_symtab_strtab`].
|
||||
pub fn reserve_symbol_index(&mut self) -> u32 {
|
||||
debug_assert_eq!(self.symtab_offset, 0);
|
||||
let index = self.symtab_num;
|
||||
self.symtab_num += 1;
|
||||
index
|
||||
}
|
||||
|
||||
/// Reserve a number of symbol table entries.
|
||||
pub fn reserve_symbol_indices(&mut self, count: u32) {
|
||||
debug_assert_eq!(self.symtab_offset, 0);
|
||||
self.symtab_num += count;
|
||||
}
|
||||
|
||||
/// Write a symbol table entry.
|
||||
pub fn write_symbol(&mut self, symbol: Symbol) {
|
||||
let mut coff_symbol = pe::ImageSymbol {
|
||||
name: [0; 8],
|
||||
value: U32Bytes::new(LE, symbol.value),
|
||||
section_number: U16Bytes::new(LE, symbol.section_number),
|
||||
typ: U16Bytes::new(LE, symbol.typ),
|
||||
storage_class: symbol.storage_class,
|
||||
number_of_aux_symbols: symbol.number_of_aux_symbols,
|
||||
};
|
||||
match symbol.name {
|
||||
Name::Short(name) => coff_symbol.name = name,
|
||||
Name::Long(str_id) => {
|
||||
let str_offset = self.strtab.get_offset(str_id);
|
||||
coff_symbol.name[4..8].copy_from_slice(&u32::to_le_bytes(str_offset as u32));
|
||||
}
|
||||
}
|
||||
self.buffer.write(&coff_symbol);
|
||||
}
|
||||
|
||||
/// Reserve auxiliary symbols for a file name.
|
||||
///
|
||||
/// Returns the number of auxiliary symbols required.
|
||||
///
|
||||
/// This must be called before [`Self::reserve_symtab_strtab`].
|
||||
pub fn reserve_aux_file_name(&mut self, name: &[u8]) -> u8 {
|
||||
debug_assert_eq!(self.symtab_offset, 0);
|
||||
let aux_count = (name.len() + pe::IMAGE_SIZEOF_SYMBOL - 1) / pe::IMAGE_SIZEOF_SYMBOL;
|
||||
self.symtab_num += aux_count as u32;
|
||||
aux_count as u8
|
||||
}
|
||||
|
||||
/// Write auxiliary symbols for a file name.
|
||||
pub fn write_aux_file_name(&mut self, name: &[u8], aux_count: u8) {
|
||||
let aux_len = aux_count as usize * pe::IMAGE_SIZEOF_SYMBOL;
|
||||
debug_assert!(aux_len >= name.len());
|
||||
let old_len = self.buffer.len();
|
||||
self.buffer.write_bytes(name);
|
||||
self.buffer.resize(old_len + aux_len);
|
||||
}
|
||||
|
||||
/// Reserve an auxiliary symbol for a section.
|
||||
///
|
||||
/// Returns the number of auxiliary symbols required.
|
||||
///
|
||||
/// This must be called before [`Self::reserve_symtab_strtab`].
|
||||
pub fn reserve_aux_section(&mut self) -> u8 {
|
||||
debug_assert_eq!(self.symtab_offset, 0);
|
||||
self.symtab_num += 1;
|
||||
1
|
||||
}
|
||||
|
||||
/// Write an auxiliary symbol for a section.
|
||||
pub fn write_aux_section(&mut self, section: AuxSymbolSection) {
|
||||
let aux = pe::ImageAuxSymbolSection {
|
||||
length: U32Bytes::new(LE, section.length),
|
||||
number_of_relocations: if section.number_of_relocations > 0xffff {
|
||||
U16Bytes::new(LE, 0xffff)
|
||||
} else {
|
||||
U16Bytes::new(LE, section.number_of_relocations as u16)
|
||||
},
|
||||
number_of_linenumbers: U16Bytes::new(LE, section.number_of_linenumbers),
|
||||
check_sum: U32Bytes::new(LE, section.check_sum),
|
||||
number: U16Bytes::new(LE, section.number as u16),
|
||||
selection: section.selection,
|
||||
reserved: 0,
|
||||
high_number: U16Bytes::new(LE, (section.number >> 16) as u16),
|
||||
};
|
||||
self.buffer.write(&aux);
|
||||
}
|
||||
|
||||
/// Return the number of reserved symbol table entries.
|
||||
pub fn symbol_count(&self) -> u32 {
|
||||
self.symtab_num
|
||||
}
|
||||
|
||||
/// Add a string to the string table.
|
||||
///
|
||||
/// This must be called before [`Self::reserve_symtab_strtab`].
|
||||
pub fn add_string(&mut self, name: &'a [u8]) -> StringId {
|
||||
debug_assert_eq!(self.strtab_offset, 0);
|
||||
self.strtab.add(name)
|
||||
}
|
||||
|
||||
/// Add a section or symbol name to the string table if required.
|
||||
///
|
||||
/// This must be called before [`Self::reserve_symtab_strtab`].
|
||||
pub fn add_name(&mut self, name: &'a [u8]) -> Name {
|
||||
if name.len() > 8 {
|
||||
Name::Long(self.add_string(name))
|
||||
} else {
|
||||
let mut short_name = [0; 8];
|
||||
short_name[..name.len()].copy_from_slice(name);
|
||||
Name::Short(short_name)
|
||||
}
|
||||
}
|
||||
|
||||
/// Reserve the range for the symbol table and string table.
|
||||
///
|
||||
/// This must be called after functions that reserve symbol
|
||||
/// indices or add strings.
|
||||
pub fn reserve_symtab_strtab(&mut self) {
|
||||
debug_assert_eq!(self.symtab_offset, 0);
|
||||
self.symtab_offset = self.reserve(self.symtab_num as usize * pe::IMAGE_SIZEOF_SYMBOL, 1);
|
||||
|
||||
debug_assert_eq!(self.strtab_offset, 0);
|
||||
// First 4 bytes of strtab are the length.
|
||||
self.strtab.write(4, &mut self.strtab_data);
|
||||
self.strtab_len = self.strtab_data.len() + 4;
|
||||
self.strtab_offset = self.reserve(self.strtab_len, 1);
|
||||
}
|
||||
|
||||
/// Write the string table.
|
||||
pub fn write_strtab(&mut self) {
|
||||
debug_assert_eq!(self.strtab_offset, self.buffer.len() as u32);
|
||||
self.buffer
|
||||
.write_bytes(&u32::to_le_bytes(self.strtab_len as u32));
|
||||
self.buffer.write_bytes(&self.strtab_data);
|
||||
}
|
||||
}
|
||||
|
||||
/// Shortened and native endian version of [`pe::ImageFileHeader`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct FileHeader {
|
||||
pub machine: u16,
|
||||
pub time_date_stamp: u32,
|
||||
pub characteristics: u16,
|
||||
}
|
||||
|
||||
/// A section or symbol name.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Name {
|
||||
/// An inline name.
|
||||
Short([u8; 8]),
|
||||
/// An id of a string table entry.
|
||||
Long(StringId),
|
||||
}
|
||||
|
||||
impl Default for Name {
|
||||
fn default() -> Name {
|
||||
Name::Short([0; 8])
|
||||
}
|
||||
}
|
||||
|
||||
// From isn't useful.
|
||||
#[allow(clippy::from_over_into)]
|
||||
impl<'a> Into<Name> for &'a [u8; 8] {
|
||||
fn into(self) -> Name {
|
||||
Name::Short(*self)
|
||||
}
|
||||
}
|
||||
|
||||
/// Native endian version of [`pe::ImageSectionHeader`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct SectionHeader {
|
||||
pub name: Name,
|
||||
pub size_of_raw_data: u32,
|
||||
pub pointer_to_raw_data: u32,
|
||||
pub pointer_to_relocations: u32,
|
||||
pub pointer_to_linenumbers: u32,
|
||||
/// This will automatically be clamped if there are more than 0xffff.
|
||||
pub number_of_relocations: u32,
|
||||
pub number_of_linenumbers: u16,
|
||||
pub characteristics: u32,
|
||||
}
|
||||
|
||||
/// Native endian version of [`pe::ImageSymbol`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Symbol {
|
||||
pub name: Name,
|
||||
pub value: u32,
|
||||
pub section_number: u16,
|
||||
pub typ: u16,
|
||||
pub storage_class: u8,
|
||||
pub number_of_aux_symbols: u8,
|
||||
}
|
||||
|
||||
/// Native endian version of [`pe::ImageAuxSymbolSection`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct AuxSymbolSection {
|
||||
pub length: u32,
|
||||
/// This will automatically be clamped if there are more than 0xffff.
|
||||
pub number_of_relocations: u32,
|
||||
pub number_of_linenumbers: u16,
|
||||
pub check_sum: u32,
|
||||
pub number: u32,
|
||||
pub selection: u8,
|
||||
}
|
||||
|
||||
/// Native endian version of [`pe::ImageRelocation`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Relocation {
|
||||
pub virtual_address: u32,
|
||||
pub symbol: u32,
|
||||
pub typ: u16,
|
||||
}
|
||||
9
vendor/object/src/write/elf/mod.rs
vendored
Normal file
9
vendor/object/src/write/elf/mod.rs
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
//! Support for writing ELF files.
|
||||
//!
|
||||
//! Provides [`Writer`] for low level writing of ELF files.
|
||||
//! This is also used to provide ELF support for [`write::Object`](crate::write::Object).
|
||||
|
||||
mod object;
|
||||
|
||||
mod writer;
|
||||
pub use writer::*;
|
||||
907
vendor/object/src/write/elf/object.rs
vendored
Normal file
907
vendor/object/src/write/elf/object.rs
vendored
Normal file
@@ -0,0 +1,907 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use crate::write::elf::writer::*;
|
||||
use crate::write::string::StringId;
|
||||
use crate::write::*;
|
||||
use crate::AddressSize;
|
||||
use crate::{elf, pod};
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct ComdatOffsets {
|
||||
offset: usize,
|
||||
str_id: StringId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct SectionOffsets {
|
||||
index: SectionIndex,
|
||||
offset: usize,
|
||||
str_id: StringId,
|
||||
reloc_offset: usize,
|
||||
reloc_str_id: Option<StringId>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
struct SymbolOffsets {
|
||||
index: SymbolIndex,
|
||||
str_id: Option<StringId>,
|
||||
}
|
||||
|
||||
// Public methods.
|
||||
impl<'a> Object<'a> {
|
||||
/// Add a property with a u32 value to the ELF ".note.gnu.property" section.
|
||||
///
|
||||
/// Requires `feature = "elf"`.
|
||||
pub fn add_elf_gnu_property_u32(&mut self, property: u32, value: u32) {
|
||||
if self.format != BinaryFormat::Elf {
|
||||
return;
|
||||
}
|
||||
|
||||
let align = if self.elf_is_64() { 8 } else { 4 };
|
||||
let mut data = Vec::with_capacity(32);
|
||||
let n_name = b"GNU\0";
|
||||
data.extend_from_slice(pod::bytes_of(&elf::NoteHeader32 {
|
||||
n_namesz: U32::new(self.endian, n_name.len() as u32),
|
||||
n_descsz: U32::new(self.endian, util::align(3 * 4, align) as u32),
|
||||
n_type: U32::new(self.endian, elf::NT_GNU_PROPERTY_TYPE_0),
|
||||
}));
|
||||
data.extend_from_slice(n_name);
|
||||
// This happens to already be aligned correctly.
|
||||
debug_assert_eq!(util::align(data.len(), align), data.len());
|
||||
data.extend_from_slice(pod::bytes_of(&U32::new(self.endian, property)));
|
||||
// Value size
|
||||
data.extend_from_slice(pod::bytes_of(&U32::new(self.endian, 4)));
|
||||
data.extend_from_slice(pod::bytes_of(&U32::new(self.endian, value)));
|
||||
util::write_align(&mut data, align);
|
||||
|
||||
let section = self.section_id(StandardSection::GnuProperty);
|
||||
self.append_section_data(section, &data, align as u64);
|
||||
}
|
||||
}
|
||||
|
||||
// Private methods.
|
||||
impl<'a> Object<'a> {
|
||||
pub(crate) fn elf_section_info(
|
||||
&self,
|
||||
section: StandardSection,
|
||||
) -> (&'static [u8], &'static [u8], SectionKind, SectionFlags) {
|
||||
match section {
|
||||
StandardSection::Text => (&[], &b".text"[..], SectionKind::Text, SectionFlags::None),
|
||||
StandardSection::Data => (&[], &b".data"[..], SectionKind::Data, SectionFlags::None),
|
||||
StandardSection::ReadOnlyData | StandardSection::ReadOnlyString => (
|
||||
&[],
|
||||
&b".rodata"[..],
|
||||
SectionKind::ReadOnlyData,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::ReadOnlyDataWithRel => (
|
||||
&[],
|
||||
b".data.rel.ro",
|
||||
SectionKind::ReadOnlyDataWithRel,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::UninitializedData => (
|
||||
&[],
|
||||
&b".bss"[..],
|
||||
SectionKind::UninitializedData,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::Tls => (&[], &b".tdata"[..], SectionKind::Tls, SectionFlags::None),
|
||||
StandardSection::UninitializedTls => (
|
||||
&[],
|
||||
&b".tbss"[..],
|
||||
SectionKind::UninitializedTls,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::TlsVariables => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::TlsVariables, SectionFlags::None)
|
||||
}
|
||||
StandardSection::Common => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::Common, SectionFlags::None)
|
||||
}
|
||||
StandardSection::GnuProperty => (
|
||||
&[],
|
||||
&b".note.gnu.property"[..],
|
||||
SectionKind::Note,
|
||||
SectionFlags::Elf {
|
||||
sh_flags: u64::from(elf::SHF_ALLOC),
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn elf_subsection_name(&self, section: &[u8], value: &[u8]) -> Vec<u8> {
|
||||
let mut name = section.to_vec();
|
||||
name.push(b'.');
|
||||
name.extend_from_slice(value);
|
||||
name
|
||||
}
|
||||
|
||||
fn elf_has_relocation_addend(&self) -> Result<bool> {
|
||||
Ok(match self.architecture {
|
||||
Architecture::Aarch64 => true,
|
||||
Architecture::Aarch64_Ilp32 => true,
|
||||
Architecture::Arm => false,
|
||||
Architecture::Avr => true,
|
||||
Architecture::Bpf => false,
|
||||
Architecture::Csky => true,
|
||||
Architecture::I386 => false,
|
||||
Architecture::X86_64 => true,
|
||||
Architecture::X86_64_X32 => true,
|
||||
Architecture::Hexagon => true,
|
||||
Architecture::LoongArch64 => true,
|
||||
Architecture::Mips => false,
|
||||
Architecture::Mips64 => true,
|
||||
Architecture::Msp430 => true,
|
||||
Architecture::PowerPc => true,
|
||||
Architecture::PowerPc64 => true,
|
||||
Architecture::Riscv64 => true,
|
||||
Architecture::Riscv32 => true,
|
||||
Architecture::S390x => true,
|
||||
Architecture::Sbf => false,
|
||||
Architecture::Sharc => true,
|
||||
Architecture::Sparc64 => true,
|
||||
Architecture::Xtensa => true,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented architecture {:?}",
|
||||
self.architecture
|
||||
)));
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn elf_fixup_relocation(&mut self, relocation: &mut Relocation) -> Result<i64> {
|
||||
// Determine whether the addend is stored in the relocation or the data.
|
||||
if self.elf_has_relocation_addend()? {
|
||||
Ok(0)
|
||||
} else {
|
||||
let constant = relocation.addend;
|
||||
relocation.addend = 0;
|
||||
Ok(constant)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn elf_is_64(&self) -> bool {
|
||||
match self.architecture.address_size().unwrap() {
|
||||
AddressSize::U8 | AddressSize::U16 | AddressSize::U32 => false,
|
||||
AddressSize::U64 => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn elf_write(&self, buffer: &mut dyn WritableBuffer) -> Result<()> {
|
||||
// Create reloc section header names so we can reference them.
|
||||
let is_rela = self.elf_has_relocation_addend()?;
|
||||
let reloc_names: Vec<_> = self
|
||||
.sections
|
||||
.iter()
|
||||
.map(|section| {
|
||||
let mut reloc_name = Vec::with_capacity(
|
||||
if is_rela { ".rela".len() } else { ".rel".len() } + section.name.len(),
|
||||
);
|
||||
if !section.relocations.is_empty() {
|
||||
reloc_name.extend_from_slice(if is_rela {
|
||||
&b".rela"[..]
|
||||
} else {
|
||||
&b".rel"[..]
|
||||
});
|
||||
reloc_name.extend_from_slice(§ion.name);
|
||||
}
|
||||
reloc_name
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Start calculating offsets of everything.
|
||||
let mut writer = Writer::new(self.endian, self.elf_is_64(), buffer);
|
||||
writer.reserve_file_header();
|
||||
|
||||
// Calculate size of section data.
|
||||
let mut comdat_offsets = Vec::with_capacity(self.comdats.len());
|
||||
for comdat in &self.comdats {
|
||||
if comdat.kind != ComdatKind::Any {
|
||||
return Err(Error(format!(
|
||||
"unsupported COMDAT symbol `{}` kind {:?}",
|
||||
self.symbols[comdat.symbol.0].name().unwrap_or(""),
|
||||
comdat.kind
|
||||
)));
|
||||
}
|
||||
|
||||
writer.reserve_section_index();
|
||||
let offset = writer.reserve_comdat(comdat.sections.len());
|
||||
let str_id = writer.add_section_name(b".group");
|
||||
comdat_offsets.push(ComdatOffsets { offset, str_id });
|
||||
}
|
||||
let mut section_offsets = Vec::with_capacity(self.sections.len());
|
||||
for (section, reloc_name) in self.sections.iter().zip(reloc_names.iter()) {
|
||||
let index = writer.reserve_section_index();
|
||||
let offset = writer.reserve(section.data.len(), section.align as usize);
|
||||
let str_id = writer.add_section_name(§ion.name);
|
||||
let mut reloc_str_id = None;
|
||||
if !section.relocations.is_empty() {
|
||||
writer.reserve_section_index();
|
||||
reloc_str_id = Some(writer.add_section_name(reloc_name));
|
||||
}
|
||||
section_offsets.push(SectionOffsets {
|
||||
index,
|
||||
offset,
|
||||
str_id,
|
||||
// Relocation data is reserved later.
|
||||
reloc_offset: 0,
|
||||
reloc_str_id,
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate index of symbols and add symbol strings to strtab.
|
||||
let mut symbol_offsets = vec![SymbolOffsets::default(); self.symbols.len()];
|
||||
writer.reserve_null_symbol_index();
|
||||
// Local symbols must come before global.
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
if symbol.is_local() {
|
||||
let section_index = symbol.section.id().map(|s| section_offsets[s.0].index);
|
||||
symbol_offsets[index].index = writer.reserve_symbol_index(section_index);
|
||||
}
|
||||
}
|
||||
let symtab_num_local = writer.symbol_count();
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
if !symbol.is_local() {
|
||||
let section_index = symbol.section.id().map(|s| section_offsets[s.0].index);
|
||||
symbol_offsets[index].index = writer.reserve_symbol_index(section_index);
|
||||
}
|
||||
}
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
if symbol.kind != SymbolKind::Section && !symbol.name.is_empty() {
|
||||
symbol_offsets[index].str_id = Some(writer.add_string(&symbol.name));
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate size of symbols.
|
||||
writer.reserve_symtab_section_index();
|
||||
writer.reserve_symtab();
|
||||
if writer.symtab_shndx_needed() {
|
||||
writer.reserve_symtab_shndx_section_index();
|
||||
}
|
||||
writer.reserve_symtab_shndx();
|
||||
writer.reserve_strtab_section_index();
|
||||
writer.reserve_strtab();
|
||||
|
||||
// Calculate size of relocations.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let count = section.relocations.len();
|
||||
if count != 0 {
|
||||
section_offsets[index].reloc_offset = writer.reserve_relocations(count, is_rela);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate size of section headers.
|
||||
writer.reserve_shstrtab_section_index();
|
||||
writer.reserve_shstrtab();
|
||||
writer.reserve_section_headers();
|
||||
|
||||
// Start writing.
|
||||
let e_type = elf::ET_REL;
|
||||
let e_machine = match (self.architecture, self.sub_architecture) {
|
||||
(Architecture::Aarch64, None) => elf::EM_AARCH64,
|
||||
(Architecture::Aarch64_Ilp32, None) => elf::EM_AARCH64,
|
||||
(Architecture::Arm, None) => elf::EM_ARM,
|
||||
(Architecture::Avr, None) => elf::EM_AVR,
|
||||
(Architecture::Bpf, None) => elf::EM_BPF,
|
||||
(Architecture::Csky, None) => elf::EM_CSKY,
|
||||
(Architecture::I386, None) => elf::EM_386,
|
||||
(Architecture::X86_64, None) => elf::EM_X86_64,
|
||||
(Architecture::X86_64_X32, None) => elf::EM_X86_64,
|
||||
(Architecture::Hexagon, None) => elf::EM_HEXAGON,
|
||||
(Architecture::LoongArch64, None) => elf::EM_LOONGARCH,
|
||||
(Architecture::Mips, None) => elf::EM_MIPS,
|
||||
(Architecture::Mips64, None) => elf::EM_MIPS,
|
||||
(Architecture::Msp430, None) => elf::EM_MSP430,
|
||||
(Architecture::PowerPc, None) => elf::EM_PPC,
|
||||
(Architecture::PowerPc64, None) => elf::EM_PPC64,
|
||||
(Architecture::Riscv32, None) => elf::EM_RISCV,
|
||||
(Architecture::Riscv64, None) => elf::EM_RISCV,
|
||||
(Architecture::S390x, None) => elf::EM_S390,
|
||||
(Architecture::Sbf, None) => elf::EM_SBF,
|
||||
(Architecture::Sharc, None) => elf::EM_SHARC,
|
||||
(Architecture::Sparc64, None) => elf::EM_SPARCV9,
|
||||
(Architecture::Xtensa, None) => elf::EM_XTENSA,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented architecture {:?} with sub-architecture {:?}",
|
||||
self.architecture, self.sub_architecture
|
||||
)));
|
||||
}
|
||||
};
|
||||
let (os_abi, abi_version, e_flags) = if let FileFlags::Elf {
|
||||
os_abi,
|
||||
abi_version,
|
||||
e_flags,
|
||||
} = self.flags
|
||||
{
|
||||
(os_abi, abi_version, e_flags)
|
||||
} else {
|
||||
(elf::ELFOSABI_NONE, 0, 0)
|
||||
};
|
||||
writer.write_file_header(&FileHeader {
|
||||
os_abi,
|
||||
abi_version,
|
||||
e_type,
|
||||
e_machine,
|
||||
e_entry: 0,
|
||||
e_flags,
|
||||
})?;
|
||||
|
||||
// Write section data.
|
||||
for comdat in &self.comdats {
|
||||
writer.write_comdat_header();
|
||||
for section in &comdat.sections {
|
||||
writer.write_comdat_entry(section_offsets[section.0].index);
|
||||
}
|
||||
}
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
writer.write_align(section.align as usize);
|
||||
debug_assert_eq!(section_offsets[index].offset, writer.len());
|
||||
writer.write(§ion.data);
|
||||
}
|
||||
|
||||
// Write symbols.
|
||||
writer.write_null_symbol();
|
||||
let mut write_symbol = |index: usize, symbol: &Symbol| -> Result<()> {
|
||||
let st_info = if let SymbolFlags::Elf { st_info, .. } = symbol.flags {
|
||||
st_info
|
||||
} else {
|
||||
let st_type = match symbol.kind {
|
||||
SymbolKind::Null => elf::STT_NOTYPE,
|
||||
SymbolKind::Text => {
|
||||
if symbol.is_undefined() {
|
||||
elf::STT_NOTYPE
|
||||
} else {
|
||||
elf::STT_FUNC
|
||||
}
|
||||
}
|
||||
SymbolKind::Data => {
|
||||
if symbol.is_undefined() {
|
||||
elf::STT_NOTYPE
|
||||
} else if symbol.is_common() {
|
||||
elf::STT_COMMON
|
||||
} else {
|
||||
elf::STT_OBJECT
|
||||
}
|
||||
}
|
||||
SymbolKind::Section => elf::STT_SECTION,
|
||||
SymbolKind::File => elf::STT_FILE,
|
||||
SymbolKind::Tls => elf::STT_TLS,
|
||||
SymbolKind::Label => elf::STT_NOTYPE,
|
||||
SymbolKind::Unknown => {
|
||||
if symbol.is_undefined() {
|
||||
elf::STT_NOTYPE
|
||||
} else {
|
||||
return Err(Error(format!(
|
||||
"unimplemented symbol `{}` kind {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
symbol.kind
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
let st_bind = if symbol.weak {
|
||||
elf::STB_WEAK
|
||||
} else if symbol.is_undefined() {
|
||||
elf::STB_GLOBAL
|
||||
} else if symbol.is_local() {
|
||||
elf::STB_LOCAL
|
||||
} else {
|
||||
elf::STB_GLOBAL
|
||||
};
|
||||
(st_bind << 4) + st_type
|
||||
};
|
||||
let st_other = if let SymbolFlags::Elf { st_other, .. } = symbol.flags {
|
||||
st_other
|
||||
} else if symbol.scope == SymbolScope::Linkage {
|
||||
elf::STV_HIDDEN
|
||||
} else {
|
||||
elf::STV_DEFAULT
|
||||
};
|
||||
let (st_shndx, section) = match symbol.section {
|
||||
SymbolSection::None => {
|
||||
debug_assert_eq!(symbol.kind, SymbolKind::File);
|
||||
(elf::SHN_ABS, None)
|
||||
}
|
||||
SymbolSection::Undefined => (elf::SHN_UNDEF, None),
|
||||
SymbolSection::Absolute => (elf::SHN_ABS, None),
|
||||
SymbolSection::Common => (elf::SHN_COMMON, None),
|
||||
SymbolSection::Section(id) => (0, Some(section_offsets[id.0].index)),
|
||||
};
|
||||
writer.write_symbol(&Sym {
|
||||
name: symbol_offsets[index].str_id,
|
||||
section,
|
||||
st_info,
|
||||
st_other,
|
||||
st_shndx,
|
||||
st_value: symbol.value,
|
||||
st_size: symbol.size,
|
||||
});
|
||||
Ok(())
|
||||
};
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
if symbol.is_local() {
|
||||
write_symbol(index, symbol)?;
|
||||
}
|
||||
}
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
if !symbol.is_local() {
|
||||
write_symbol(index, symbol)?;
|
||||
}
|
||||
}
|
||||
writer.write_symtab_shndx();
|
||||
writer.write_strtab();
|
||||
|
||||
// Write relocations.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
if !section.relocations.is_empty() {
|
||||
writer.write_align_relocation();
|
||||
debug_assert_eq!(section_offsets[index].reloc_offset, writer.len());
|
||||
for reloc in §ion.relocations {
|
||||
let r_type = match self.architecture {
|
||||
Architecture::Aarch64 => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_AARCH64_ABS64
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_AARCH64_ABS32
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_AARCH64_ABS16
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_AARCH64_PREL64
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_AARCH64_PREL32
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_AARCH64_PREL16
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::AArch64Call, 26)
|
||||
| (RelocationKind::PltRelative, RelocationEncoding::AArch64Call, 26) => {
|
||||
elf::R_AARCH64_CALL26
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Aarch64_Ilp32 => {
|
||||
match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_AARCH64_P32_ABS32
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented relocation {:?}",
|
||||
reloc
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
Architecture::Arm => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_ARM_ABS32,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Avr => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_AVR_32,
|
||||
(RelocationKind::Absolute, _, 16) => elf::R_AVR_16,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Bpf => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_BPF_64_64,
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_BPF_64_32,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Csky => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_CKCORE_ADDR32,
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_CKCORE_PCREL32
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::I386 => match (reloc.kind, reloc.size) {
|
||||
(RelocationKind::Absolute, 32) => elf::R_386_32,
|
||||
(RelocationKind::Relative, 32) => elf::R_386_PC32,
|
||||
(RelocationKind::Got, 32) => elf::R_386_GOT32,
|
||||
(RelocationKind::PltRelative, 32) => elf::R_386_PLT32,
|
||||
(RelocationKind::GotBaseOffset, 32) => elf::R_386_GOTOFF,
|
||||
(RelocationKind::GotBaseRelative, 32) => elf::R_386_GOTPC,
|
||||
(RelocationKind::Absolute, 16) => elf::R_386_16,
|
||||
(RelocationKind::Relative, 16) => elf::R_386_PC16,
|
||||
(RelocationKind::Absolute, 8) => elf::R_386_8,
|
||||
(RelocationKind::Relative, 8) => elf::R_386_PC8,
|
||||
(RelocationKind::Elf(x), _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::X86_64 | Architecture::X86_64_X32 => {
|
||||
match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_X86_64_64
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::X86Branch, 32) => {
|
||||
elf::R_X86_64_PLT32
|
||||
}
|
||||
(RelocationKind::Relative, _, 32) => elf::R_X86_64_PC32,
|
||||
(RelocationKind::Got, _, 32) => elf::R_X86_64_GOT32,
|
||||
(RelocationKind::PltRelative, _, 32) => elf::R_X86_64_PLT32,
|
||||
(RelocationKind::GotRelative, _, 32) => elf::R_X86_64_GOTPCREL,
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_X86_64_32
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::X86Signed, 32) => {
|
||||
elf::R_X86_64_32S
|
||||
}
|
||||
(RelocationKind::Absolute, _, 16) => elf::R_X86_64_16,
|
||||
(RelocationKind::Relative, _, 16) => elf::R_X86_64_PC16,
|
||||
(RelocationKind::Absolute, _, 8) => elf::R_X86_64_8,
|
||||
(RelocationKind::Relative, _, 8) => elf::R_X86_64_PC8,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented relocation {:?}",
|
||||
reloc
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
Architecture::Hexagon => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_HEX_32,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::LoongArch64 => match (reloc.kind, reloc.encoding, reloc.size)
|
||||
{
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_LARCH_32,
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_LARCH_64,
|
||||
(RelocationKind::Relative, _, 32) => elf::R_LARCH_32_PCREL,
|
||||
(RelocationKind::Relative, _, 64) => elf::R_LARCH_64_PCREL,
|
||||
(RelocationKind::Relative, RelocationEncoding::LoongArchBranch, 16)
|
||||
| (
|
||||
RelocationKind::PltRelative,
|
||||
RelocationEncoding::LoongArchBranch,
|
||||
16,
|
||||
) => elf::R_LARCH_B16,
|
||||
(RelocationKind::Relative, RelocationEncoding::LoongArchBranch, 21)
|
||||
| (
|
||||
RelocationKind::PltRelative,
|
||||
RelocationEncoding::LoongArchBranch,
|
||||
21,
|
||||
) => elf::R_LARCH_B21,
|
||||
(RelocationKind::Relative, RelocationEncoding::LoongArchBranch, 26)
|
||||
| (
|
||||
RelocationKind::PltRelative,
|
||||
RelocationEncoding::LoongArchBranch,
|
||||
26,
|
||||
) => elf::R_LARCH_B26,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Mips | Architecture::Mips64 => {
|
||||
match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 16) => elf::R_MIPS_16,
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_MIPS_32,
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_MIPS_64,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented relocation {:?}",
|
||||
reloc
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
Architecture::Msp430 => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_MSP430_32,
|
||||
(RelocationKind::Absolute, _, 16) => elf::R_MSP430_16_BYTE,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::PowerPc => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_PPC_ADDR32,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::PowerPc64 => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_PPC64_ADDR32,
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_PPC64_ADDR64,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Riscv32 | Architecture::Riscv64 => {
|
||||
match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_RISCV_32,
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_RISCV_64,
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_RISCV_32_PCREL
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented relocation {:?}",
|
||||
reloc
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
Architecture::S390x => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 8) => {
|
||||
elf::R_390_8
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_390_16
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_390_32
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_390_64
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_390_PC16
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_390_PC32
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_390_PC64
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::S390xDbl, 16) => {
|
||||
elf::R_390_PC16DBL
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::S390xDbl, 32) => {
|
||||
elf::R_390_PC32DBL
|
||||
}
|
||||
(RelocationKind::PltRelative, RelocationEncoding::S390xDbl, 16) => {
|
||||
elf::R_390_PLT16DBL
|
||||
}
|
||||
(RelocationKind::PltRelative, RelocationEncoding::S390xDbl, 32) => {
|
||||
elf::R_390_PLT32DBL
|
||||
}
|
||||
(RelocationKind::Got, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_390_GOT16
|
||||
}
|
||||
(RelocationKind::Got, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_390_GOT32
|
||||
}
|
||||
(RelocationKind::Got, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_390_GOT64
|
||||
}
|
||||
(RelocationKind::GotRelative, RelocationEncoding::S390xDbl, 32) => {
|
||||
elf::R_390_GOTENT
|
||||
}
|
||||
(RelocationKind::GotBaseOffset, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_390_GOTOFF16
|
||||
}
|
||||
(RelocationKind::GotBaseOffset, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_390_GOTOFF32
|
||||
}
|
||||
(RelocationKind::GotBaseOffset, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_390_GOTOFF64
|
||||
}
|
||||
(RelocationKind::GotBaseRelative, RelocationEncoding::Generic, 64) => {
|
||||
elf::R_390_GOTPC
|
||||
}
|
||||
(RelocationKind::GotBaseRelative, RelocationEncoding::S390xDbl, 32) => {
|
||||
elf::R_390_GOTPCDBL
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Sbf => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_SBF_64_64,
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_SBF_64_32,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Sharc => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeA, 32) => {
|
||||
elf::R_SHARC_ADDR32_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_SHARC_ADDR_VAR_V3
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::SharcTypeA, 24) => {
|
||||
elf::R_SHARC_PCRLONG_V3
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::SharcTypeA, 6) => {
|
||||
elf::R_SHARC_PCRSHORT_V3
|
||||
}
|
||||
(RelocationKind::Relative, RelocationEncoding::SharcTypeB, 6) => {
|
||||
elf::R_SHARC_PCRSHORT_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 16) => {
|
||||
elf::R_SHARC_ADDR_VAR16_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeA, 16) => {
|
||||
elf::R_SHARC_DATA16_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeB, 16) => {
|
||||
elf::R_SHARC_DATA16_VISA_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeA, 24) => {
|
||||
elf::R_SHARC_ADDR24_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeA, 6) => {
|
||||
elf::R_SHARC_DATA6_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeB, 6) => {
|
||||
elf::R_SHARC_DATA6_VISA_V3
|
||||
}
|
||||
(RelocationKind::Absolute, RelocationEncoding::SharcTypeB, 7) => {
|
||||
elf::R_SHARC_DATA7_VISA_V3
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Sparc64 => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
// TODO: use R_SPARC_32/R_SPARC_64 if aligned.
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_SPARC_UA32,
|
||||
(RelocationKind::Absolute, _, 64) => elf::R_SPARC_UA64,
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
Architecture::Xtensa => match (reloc.kind, reloc.encoding, reloc.size) {
|
||||
(RelocationKind::Absolute, _, 32) => elf::R_XTENSA_32,
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 32) => {
|
||||
elf::R_XTENSA_32_PCREL
|
||||
}
|
||||
(RelocationKind::Elf(x), _, _) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
if let RelocationKind::Elf(x) = reloc.kind {
|
||||
x
|
||||
} else {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
}
|
||||
};
|
||||
let r_sym = symbol_offsets[reloc.symbol.0].index.0;
|
||||
writer.write_relocation(
|
||||
is_rela,
|
||||
&Rel {
|
||||
r_offset: reloc.offset,
|
||||
r_sym,
|
||||
r_type,
|
||||
r_addend: reloc.addend,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writer.write_shstrtab();
|
||||
|
||||
// Write section headers.
|
||||
writer.write_null_section_header();
|
||||
|
||||
let symtab_index = writer.symtab_index();
|
||||
for (comdat, comdat_offset) in self.comdats.iter().zip(comdat_offsets.iter()) {
|
||||
writer.write_comdat_section_header(
|
||||
comdat_offset.str_id,
|
||||
symtab_index,
|
||||
symbol_offsets[comdat.symbol.0].index,
|
||||
comdat_offset.offset,
|
||||
comdat.sections.len(),
|
||||
);
|
||||
}
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let sh_type = match section.kind {
|
||||
SectionKind::UninitializedData | SectionKind::UninitializedTls => elf::SHT_NOBITS,
|
||||
SectionKind::Note => elf::SHT_NOTE,
|
||||
SectionKind::Elf(sh_type) => sh_type,
|
||||
_ => elf::SHT_PROGBITS,
|
||||
};
|
||||
let sh_flags = if let SectionFlags::Elf { sh_flags } = section.flags {
|
||||
sh_flags
|
||||
} else {
|
||||
match section.kind {
|
||||
SectionKind::Text => elf::SHF_ALLOC | elf::SHF_EXECINSTR,
|
||||
SectionKind::Data | SectionKind::ReadOnlyDataWithRel => {
|
||||
elf::SHF_ALLOC | elf::SHF_WRITE
|
||||
}
|
||||
SectionKind::Tls => elf::SHF_ALLOC | elf::SHF_WRITE | elf::SHF_TLS,
|
||||
SectionKind::UninitializedData => elf::SHF_ALLOC | elf::SHF_WRITE,
|
||||
SectionKind::UninitializedTls => elf::SHF_ALLOC | elf::SHF_WRITE | elf::SHF_TLS,
|
||||
SectionKind::ReadOnlyData => elf::SHF_ALLOC,
|
||||
SectionKind::ReadOnlyString => {
|
||||
elf::SHF_ALLOC | elf::SHF_STRINGS | elf::SHF_MERGE
|
||||
}
|
||||
SectionKind::OtherString => elf::SHF_STRINGS | elf::SHF_MERGE,
|
||||
SectionKind::Other
|
||||
| SectionKind::Debug
|
||||
| SectionKind::Metadata
|
||||
| SectionKind::Linker
|
||||
| SectionKind::Note
|
||||
| SectionKind::Elf(_) => 0,
|
||||
SectionKind::Unknown | SectionKind::Common | SectionKind::TlsVariables => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented section `{}` kind {:?}",
|
||||
section.name().unwrap_or(""),
|
||||
section.kind
|
||||
)));
|
||||
}
|
||||
}
|
||||
.into()
|
||||
};
|
||||
// TODO: not sure if this is correct, maybe user should determine this
|
||||
let sh_entsize = match section.kind {
|
||||
SectionKind::ReadOnlyString | SectionKind::OtherString => 1,
|
||||
_ => 0,
|
||||
};
|
||||
writer.write_section_header(&SectionHeader {
|
||||
name: Some(section_offsets[index].str_id),
|
||||
sh_type,
|
||||
sh_flags,
|
||||
sh_addr: 0,
|
||||
sh_offset: section_offsets[index].offset as u64,
|
||||
sh_size: section.size,
|
||||
sh_link: 0,
|
||||
sh_info: 0,
|
||||
sh_addralign: section.align,
|
||||
sh_entsize,
|
||||
});
|
||||
|
||||
if !section.relocations.is_empty() {
|
||||
writer.write_relocation_section_header(
|
||||
section_offsets[index].reloc_str_id.unwrap(),
|
||||
section_offsets[index].index,
|
||||
symtab_index,
|
||||
section_offsets[index].reloc_offset,
|
||||
section.relocations.len(),
|
||||
is_rela,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
writer.write_symtab_section_header(symtab_num_local);
|
||||
writer.write_symtab_shndx_section_header();
|
||||
writer.write_strtab_section_header();
|
||||
writer.write_shstrtab_section_header();
|
||||
|
||||
debug_assert_eq!(writer.reserved_len(), writer.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
2143
vendor/object/src/write/elf/writer.rs
vendored
Normal file
2143
vendor/object/src/write/elf/writer.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1095
vendor/object/src/write/macho.rs
vendored
Normal file
1095
vendor/object/src/write/macho.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
961
vendor/object/src/write/mod.rs
vendored
Normal file
961
vendor/object/src/write/mod.rs
vendored
Normal file
@@ -0,0 +1,961 @@
|
||||
//! Interface for writing object files.
|
||||
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::string::String;
|
||||
use alloc::vec::Vec;
|
||||
use core::{fmt, result, str};
|
||||
#[cfg(not(feature = "std"))]
|
||||
use hashbrown::HashMap;
|
||||
#[cfg(feature = "std")]
|
||||
use std::{boxed::Box, collections::HashMap, error, io};
|
||||
|
||||
use crate::endian::{Endianness, U32, U64};
|
||||
use crate::{
|
||||
Architecture, BinaryFormat, ComdatKind, FileFlags, RelocationEncoding, RelocationKind,
|
||||
SectionFlags, SectionKind, SubArchitecture, SymbolFlags, SymbolKind, SymbolScope,
|
||||
};
|
||||
|
||||
#[cfg(feature = "coff")]
|
||||
pub mod coff;
|
||||
#[cfg(feature = "coff")]
|
||||
pub use coff::CoffExportStyle;
|
||||
|
||||
#[cfg(feature = "elf")]
|
||||
pub mod elf;
|
||||
|
||||
#[cfg(feature = "macho")]
|
||||
mod macho;
|
||||
#[cfg(feature = "macho")]
|
||||
pub use macho::MachOBuildVersion;
|
||||
|
||||
#[cfg(feature = "pe")]
|
||||
pub mod pe;
|
||||
|
||||
#[cfg(feature = "xcoff")]
|
||||
mod xcoff;
|
||||
|
||||
mod string;
|
||||
pub use string::StringId;
|
||||
|
||||
mod util;
|
||||
pub use util::*;
|
||||
|
||||
/// The error type used within the write module.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Error(String);
|
||||
|
||||
impl fmt::Display for Error {
|
||||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl error::Error for Error {}
|
||||
|
||||
/// The result type used within the write module.
|
||||
pub type Result<T> = result::Result<T, Error>;
|
||||
|
||||
/// A writable relocatable object file.
|
||||
#[derive(Debug)]
|
||||
pub struct Object<'a> {
|
||||
format: BinaryFormat,
|
||||
architecture: Architecture,
|
||||
sub_architecture: Option<SubArchitecture>,
|
||||
endian: Endianness,
|
||||
sections: Vec<Section<'a>>,
|
||||
standard_sections: HashMap<StandardSection, SectionId>,
|
||||
symbols: Vec<Symbol>,
|
||||
symbol_map: HashMap<Vec<u8>, SymbolId>,
|
||||
stub_symbols: HashMap<SymbolId, SymbolId>,
|
||||
comdats: Vec<Comdat>,
|
||||
/// File flags that are specific to each file format.
|
||||
pub flags: FileFlags,
|
||||
/// The symbol name mangling scheme.
|
||||
pub mangling: Mangling,
|
||||
/// Mach-O "_tlv_bootstrap" symbol.
|
||||
tlv_bootstrap: Option<SymbolId>,
|
||||
/// Mach-O CPU subtype.
|
||||
#[cfg(feature = "macho")]
|
||||
macho_cpu_subtype: Option<u32>,
|
||||
#[cfg(feature = "macho")]
|
||||
macho_build_version: Option<MachOBuildVersion>,
|
||||
}
|
||||
|
||||
impl<'a> Object<'a> {
|
||||
/// Create an empty object file.
|
||||
pub fn new(format: BinaryFormat, architecture: Architecture, endian: Endianness) -> Object<'a> {
|
||||
Object {
|
||||
format,
|
||||
architecture,
|
||||
sub_architecture: None,
|
||||
endian,
|
||||
sections: Vec::new(),
|
||||
standard_sections: HashMap::new(),
|
||||
symbols: Vec::new(),
|
||||
symbol_map: HashMap::new(),
|
||||
stub_symbols: HashMap::new(),
|
||||
comdats: Vec::new(),
|
||||
flags: FileFlags::None,
|
||||
mangling: Mangling::default(format, architecture),
|
||||
tlv_bootstrap: None,
|
||||
#[cfg(feature = "macho")]
|
||||
macho_cpu_subtype: None,
|
||||
#[cfg(feature = "macho")]
|
||||
macho_build_version: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the file format.
|
||||
#[inline]
|
||||
pub fn format(&self) -> BinaryFormat {
|
||||
self.format
|
||||
}
|
||||
|
||||
/// Return the architecture.
|
||||
#[inline]
|
||||
pub fn architecture(&self) -> Architecture {
|
||||
self.architecture
|
||||
}
|
||||
|
||||
/// Return the sub-architecture.
|
||||
#[inline]
|
||||
pub fn sub_architecture(&self) -> Option<SubArchitecture> {
|
||||
self.sub_architecture
|
||||
}
|
||||
|
||||
/// Specify the sub-architecture.
|
||||
pub fn set_sub_architecture(&mut self, sub_architecture: Option<SubArchitecture>) {
|
||||
self.sub_architecture = sub_architecture;
|
||||
}
|
||||
|
||||
/// Return the current mangling setting.
|
||||
#[inline]
|
||||
pub fn mangling(&self) -> Mangling {
|
||||
self.mangling
|
||||
}
|
||||
|
||||
/// Specify the mangling setting.
|
||||
#[inline]
|
||||
pub fn set_mangling(&mut self, mangling: Mangling) {
|
||||
self.mangling = mangling;
|
||||
}
|
||||
|
||||
/// Return the name for a standard segment.
|
||||
///
|
||||
/// This will vary based on the file format.
|
||||
#[allow(unused_variables)]
|
||||
pub fn segment_name(&self, segment: StandardSegment) -> &'static [u8] {
|
||||
match self.format {
|
||||
#[cfg(feature = "coff")]
|
||||
BinaryFormat::Coff => &[],
|
||||
#[cfg(feature = "elf")]
|
||||
BinaryFormat::Elf => &[],
|
||||
#[cfg(feature = "macho")]
|
||||
BinaryFormat::MachO => self.macho_segment_name(segment),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the section with the given `SectionId`.
|
||||
#[inline]
|
||||
pub fn section(&self, section: SectionId) -> &Section<'a> {
|
||||
&self.sections[section.0]
|
||||
}
|
||||
|
||||
/// Mutably get the section with the given `SectionId`.
|
||||
#[inline]
|
||||
pub fn section_mut(&mut self, section: SectionId) -> &mut Section<'a> {
|
||||
&mut self.sections[section.0]
|
||||
}
|
||||
|
||||
/// Set the data for an existing section.
|
||||
///
|
||||
/// Must not be called for sections that already have data, or that contain uninitialized data.
|
||||
pub fn set_section_data<T>(&mut self, section: SectionId, data: T, align: u64)
|
||||
where
|
||||
T: Into<Cow<'a, [u8]>>,
|
||||
{
|
||||
self.sections[section.0].set_data(data, align)
|
||||
}
|
||||
|
||||
/// Append data to an existing section. Returns the section offset of the data.
|
||||
pub fn append_section_data(&mut self, section: SectionId, data: &[u8], align: u64) -> u64 {
|
||||
self.sections[section.0].append_data(data, align)
|
||||
}
|
||||
|
||||
/// Append zero-initialized data to an existing section. Returns the section offset of the data.
|
||||
pub fn append_section_bss(&mut self, section: SectionId, size: u64, align: u64) -> u64 {
|
||||
self.sections[section.0].append_bss(size, align)
|
||||
}
|
||||
|
||||
/// Return the `SectionId` of a standard section.
|
||||
///
|
||||
/// If the section doesn't already exist then it is created.
|
||||
pub fn section_id(&mut self, section: StandardSection) -> SectionId {
|
||||
self.standard_sections
|
||||
.get(§ion)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| {
|
||||
let (segment, name, kind, flags) = self.section_info(section);
|
||||
let id = self.add_section(segment.to_vec(), name.to_vec(), kind);
|
||||
self.section_mut(id).flags = flags;
|
||||
id
|
||||
})
|
||||
}
|
||||
|
||||
/// Add a new section and return its `SectionId`.
|
||||
///
|
||||
/// This also creates a section symbol.
|
||||
pub fn add_section(&mut self, segment: Vec<u8>, name: Vec<u8>, kind: SectionKind) -> SectionId {
|
||||
let id = SectionId(self.sections.len());
|
||||
self.sections.push(Section {
|
||||
segment,
|
||||
name,
|
||||
kind,
|
||||
size: 0,
|
||||
align: 1,
|
||||
data: Cow::Borrowed(&[]),
|
||||
relocations: Vec::new(),
|
||||
symbol: None,
|
||||
flags: SectionFlags::None,
|
||||
});
|
||||
|
||||
// Add to self.standard_sections if required. This may match multiple standard sections.
|
||||
let section = &self.sections[id.0];
|
||||
for standard_section in StandardSection::all() {
|
||||
if !self.standard_sections.contains_key(standard_section) {
|
||||
let (segment, name, kind, _flags) = self.section_info(*standard_section);
|
||||
if segment == &*section.segment && name == &*section.name && kind == section.kind {
|
||||
self.standard_sections.insert(*standard_section, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
id
|
||||
}
|
||||
|
||||
fn section_info(
|
||||
&self,
|
||||
section: StandardSection,
|
||||
) -> (&'static [u8], &'static [u8], SectionKind, SectionFlags) {
|
||||
match self.format {
|
||||
#[cfg(feature = "coff")]
|
||||
BinaryFormat::Coff => self.coff_section_info(section),
|
||||
#[cfg(feature = "elf")]
|
||||
BinaryFormat::Elf => self.elf_section_info(section),
|
||||
#[cfg(feature = "macho")]
|
||||
BinaryFormat::MachO => self.macho_section_info(section),
|
||||
#[cfg(feature = "xcoff")]
|
||||
BinaryFormat::Xcoff => self.xcoff_section_info(section),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a subsection. Returns the `SectionId` and section offset of the data.
|
||||
pub fn add_subsection(
|
||||
&mut self,
|
||||
section: StandardSection,
|
||||
name: &[u8],
|
||||
data: &[u8],
|
||||
align: u64,
|
||||
) -> (SectionId, u64) {
|
||||
let section_id = if self.has_subsections_via_symbols() {
|
||||
self.set_subsections_via_symbols();
|
||||
self.section_id(section)
|
||||
} else {
|
||||
let (segment, name, kind, flags) = self.subsection_info(section, name);
|
||||
let id = self.add_section(segment.to_vec(), name, kind);
|
||||
self.section_mut(id).flags = flags;
|
||||
id
|
||||
};
|
||||
let offset = self.append_section_data(section_id, data, align);
|
||||
(section_id, offset)
|
||||
}
|
||||
|
||||
fn has_subsections_via_symbols(&self) -> bool {
|
||||
match self.format {
|
||||
BinaryFormat::Coff | BinaryFormat::Elf | BinaryFormat::Xcoff => false,
|
||||
BinaryFormat::MachO => true,
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_subsections_via_symbols(&mut self) {
|
||||
match self.format {
|
||||
#[cfg(feature = "macho")]
|
||||
BinaryFormat::MachO => self.macho_set_subsections_via_symbols(),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn subsection_info(
|
||||
&self,
|
||||
section: StandardSection,
|
||||
value: &[u8],
|
||||
) -> (&'static [u8], Vec<u8>, SectionKind, SectionFlags) {
|
||||
let (segment, section, kind, flags) = self.section_info(section);
|
||||
let name = self.subsection_name(section, value);
|
||||
(segment, name, kind, flags)
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn subsection_name(&self, section: &[u8], value: &[u8]) -> Vec<u8> {
|
||||
debug_assert!(!self.has_subsections_via_symbols());
|
||||
match self.format {
|
||||
#[cfg(feature = "coff")]
|
||||
BinaryFormat::Coff => self.coff_subsection_name(section, value),
|
||||
#[cfg(feature = "elf")]
|
||||
BinaryFormat::Elf => self.elf_subsection_name(section, value),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the COMDAT section group with the given `ComdatId`.
|
||||
#[inline]
|
||||
pub fn comdat(&self, comdat: ComdatId) -> &Comdat {
|
||||
&self.comdats[comdat.0]
|
||||
}
|
||||
|
||||
/// Mutably get the COMDAT section group with the given `ComdatId`.
|
||||
#[inline]
|
||||
pub fn comdat_mut(&mut self, comdat: ComdatId) -> &mut Comdat {
|
||||
&mut self.comdats[comdat.0]
|
||||
}
|
||||
|
||||
/// Add a new COMDAT section group and return its `ComdatId`.
|
||||
pub fn add_comdat(&mut self, comdat: Comdat) -> ComdatId {
|
||||
let comdat_id = ComdatId(self.comdats.len());
|
||||
self.comdats.push(comdat);
|
||||
comdat_id
|
||||
}
|
||||
|
||||
/// Get the `SymbolId` of the symbol with the given name.
|
||||
pub fn symbol_id(&self, name: &[u8]) -> Option<SymbolId> {
|
||||
self.symbol_map.get(name).cloned()
|
||||
}
|
||||
|
||||
/// Get the symbol with the given `SymbolId`.
|
||||
#[inline]
|
||||
pub fn symbol(&self, symbol: SymbolId) -> &Symbol {
|
||||
&self.symbols[symbol.0]
|
||||
}
|
||||
|
||||
/// Mutably get the symbol with the given `SymbolId`.
|
||||
#[inline]
|
||||
pub fn symbol_mut(&mut self, symbol: SymbolId) -> &mut Symbol {
|
||||
&mut self.symbols[symbol.0]
|
||||
}
|
||||
|
||||
/// Add a new symbol and return its `SymbolId`.
|
||||
pub fn add_symbol(&mut self, mut symbol: Symbol) -> SymbolId {
|
||||
// Defined symbols must have a scope.
|
||||
debug_assert!(symbol.is_undefined() || symbol.scope != SymbolScope::Unknown);
|
||||
if symbol.kind == SymbolKind::Section {
|
||||
// There can only be one section symbol, but update its flags, since
|
||||
// the automatically generated section symbol will have none.
|
||||
let symbol_id = self.section_symbol(symbol.section.id().unwrap());
|
||||
if symbol.flags != SymbolFlags::None {
|
||||
self.symbol_mut(symbol_id).flags = symbol.flags;
|
||||
}
|
||||
return symbol_id;
|
||||
}
|
||||
if !symbol.name.is_empty()
|
||||
&& (symbol.kind == SymbolKind::Text
|
||||
|| symbol.kind == SymbolKind::Data
|
||||
|| symbol.kind == SymbolKind::Tls)
|
||||
{
|
||||
let unmangled_name = symbol.name.clone();
|
||||
if let Some(prefix) = self.mangling.global_prefix() {
|
||||
symbol.name.insert(0, prefix);
|
||||
}
|
||||
let symbol_id = self.add_raw_symbol(symbol);
|
||||
self.symbol_map.insert(unmangled_name, symbol_id);
|
||||
symbol_id
|
||||
} else {
|
||||
self.add_raw_symbol(symbol)
|
||||
}
|
||||
}
|
||||
|
||||
fn add_raw_symbol(&mut self, symbol: Symbol) -> SymbolId {
|
||||
let symbol_id = SymbolId(self.symbols.len());
|
||||
self.symbols.push(symbol);
|
||||
symbol_id
|
||||
}
|
||||
|
||||
/// Return true if the file format supports `StandardSection::UninitializedTls`.
|
||||
#[inline]
|
||||
pub fn has_uninitialized_tls(&self) -> bool {
|
||||
self.format != BinaryFormat::Coff
|
||||
}
|
||||
|
||||
/// Return true if the file format supports `StandardSection::Common`.
|
||||
#[inline]
|
||||
pub fn has_common(&self) -> bool {
|
||||
self.format == BinaryFormat::MachO
|
||||
}
|
||||
|
||||
/// Add a new common symbol and return its `SymbolId`.
|
||||
///
|
||||
/// For Mach-O, this appends the symbol to the `__common` section.
|
||||
pub fn add_common_symbol(&mut self, mut symbol: Symbol, size: u64, align: u64) -> SymbolId {
|
||||
if self.has_common() {
|
||||
let symbol_id = self.add_symbol(symbol);
|
||||
let section = self.section_id(StandardSection::Common);
|
||||
self.add_symbol_bss(symbol_id, section, size, align);
|
||||
symbol_id
|
||||
} else {
|
||||
symbol.section = SymbolSection::Common;
|
||||
symbol.size = size;
|
||||
self.add_symbol(symbol)
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a new file symbol and return its `SymbolId`.
|
||||
pub fn add_file_symbol(&mut self, name: Vec<u8>) -> SymbolId {
|
||||
self.add_raw_symbol(Symbol {
|
||||
name,
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::File,
|
||||
scope: SymbolScope::Compilation,
|
||||
weak: false,
|
||||
section: SymbolSection::None,
|
||||
flags: SymbolFlags::None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the symbol for a section.
|
||||
pub fn section_symbol(&mut self, section_id: SectionId) -> SymbolId {
|
||||
let section = &mut self.sections[section_id.0];
|
||||
if let Some(symbol) = section.symbol {
|
||||
return symbol;
|
||||
}
|
||||
let name = if self.format == BinaryFormat::Coff {
|
||||
section.name.clone()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let symbol_id = SymbolId(self.symbols.len());
|
||||
self.symbols.push(Symbol {
|
||||
name,
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Section,
|
||||
scope: SymbolScope::Compilation,
|
||||
weak: false,
|
||||
section: SymbolSection::Section(section_id),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
section.symbol = Some(symbol_id);
|
||||
symbol_id
|
||||
}
|
||||
|
||||
/// Append data to an existing section, and update a symbol to refer to it.
|
||||
///
|
||||
/// For Mach-O, this also creates a `__thread_vars` entry for TLS symbols, and the
|
||||
/// symbol will indirectly point to the added data via the `__thread_vars` entry.
|
||||
///
|
||||
/// Returns the section offset of the data.
|
||||
pub fn add_symbol_data(
|
||||
&mut self,
|
||||
symbol_id: SymbolId,
|
||||
section: SectionId,
|
||||
data: &[u8],
|
||||
align: u64,
|
||||
) -> u64 {
|
||||
let offset = self.append_section_data(section, data, align);
|
||||
self.set_symbol_data(symbol_id, section, offset, data.len() as u64);
|
||||
offset
|
||||
}
|
||||
|
||||
/// Append zero-initialized data to an existing section, and update a symbol to refer to it.
|
||||
///
|
||||
/// For Mach-O, this also creates a `__thread_vars` entry for TLS symbols, and the
|
||||
/// symbol will indirectly point to the added data via the `__thread_vars` entry.
|
||||
///
|
||||
/// Returns the section offset of the data.
|
||||
pub fn add_symbol_bss(
|
||||
&mut self,
|
||||
symbol_id: SymbolId,
|
||||
section: SectionId,
|
||||
size: u64,
|
||||
align: u64,
|
||||
) -> u64 {
|
||||
let offset = self.append_section_bss(section, size, align);
|
||||
self.set_symbol_data(symbol_id, section, offset, size);
|
||||
offset
|
||||
}
|
||||
|
||||
/// Update a symbol to refer to the given data within a section.
|
||||
///
|
||||
/// For Mach-O, this also creates a `__thread_vars` entry for TLS symbols, and the
|
||||
/// symbol will indirectly point to the data via the `__thread_vars` entry.
|
||||
#[allow(unused_mut)]
|
||||
pub fn set_symbol_data(
|
||||
&mut self,
|
||||
mut symbol_id: SymbolId,
|
||||
section: SectionId,
|
||||
offset: u64,
|
||||
size: u64,
|
||||
) {
|
||||
// Defined symbols must have a scope.
|
||||
debug_assert!(self.symbol(symbol_id).scope != SymbolScope::Unknown);
|
||||
match self.format {
|
||||
#[cfg(feature = "macho")]
|
||||
BinaryFormat::MachO => symbol_id = self.macho_add_thread_var(symbol_id),
|
||||
_ => {}
|
||||
}
|
||||
let symbol = self.symbol_mut(symbol_id);
|
||||
symbol.value = offset;
|
||||
symbol.size = size;
|
||||
symbol.section = SymbolSection::Section(section);
|
||||
}
|
||||
|
||||
/// Convert a symbol to a section symbol and offset.
|
||||
///
|
||||
/// Returns `None` if the symbol does not have a section.
|
||||
pub fn symbol_section_and_offset(&mut self, symbol_id: SymbolId) -> Option<(SymbolId, u64)> {
|
||||
let symbol = self.symbol(symbol_id);
|
||||
if symbol.kind == SymbolKind::Section {
|
||||
return Some((symbol_id, 0));
|
||||
}
|
||||
let symbol_offset = symbol.value;
|
||||
let section = symbol.section.id()?;
|
||||
let section_symbol = self.section_symbol(section);
|
||||
Some((section_symbol, symbol_offset))
|
||||
}
|
||||
|
||||
/// Add a relocation to a section.
|
||||
///
|
||||
/// Relocations must only be added after the referenced symbols have been added
|
||||
/// and defined (if applicable).
|
||||
pub fn add_relocation(&mut self, section: SectionId, mut relocation: Relocation) -> Result<()> {
|
||||
let addend = match self.format {
|
||||
#[cfg(feature = "coff")]
|
||||
BinaryFormat::Coff => self.coff_fixup_relocation(&mut relocation),
|
||||
#[cfg(feature = "elf")]
|
||||
BinaryFormat::Elf => self.elf_fixup_relocation(&mut relocation)?,
|
||||
#[cfg(feature = "macho")]
|
||||
BinaryFormat::MachO => self.macho_fixup_relocation(&mut relocation),
|
||||
#[cfg(feature = "xcoff")]
|
||||
BinaryFormat::Xcoff => self.xcoff_fixup_relocation(&mut relocation),
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
if addend != 0 {
|
||||
self.write_relocation_addend(section, &relocation, addend)?;
|
||||
}
|
||||
self.sections[section.0].relocations.push(relocation);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_relocation_addend(
|
||||
&mut self,
|
||||
section: SectionId,
|
||||
relocation: &Relocation,
|
||||
addend: i64,
|
||||
) -> Result<()> {
|
||||
let data = self.sections[section.0].data_mut();
|
||||
let offset = relocation.offset as usize;
|
||||
match relocation.size {
|
||||
32 => data.write_at(offset, &U32::new(self.endian, addend as u32)),
|
||||
64 => data.write_at(offset, &U64::new(self.endian, addend as u64)),
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented relocation addend {:?}",
|
||||
relocation
|
||||
)));
|
||||
}
|
||||
}
|
||||
.map_err(|_| {
|
||||
Error(format!(
|
||||
"invalid relocation offset {}+{} (max {})",
|
||||
relocation.offset,
|
||||
relocation.size,
|
||||
data.len()
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
/// Write the object to a `Vec`.
|
||||
pub fn write(&self) -> Result<Vec<u8>> {
|
||||
let mut buffer = Vec::new();
|
||||
self.emit(&mut buffer)?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
/// Write the object to a `Write` implementation.
|
||||
///
|
||||
/// Also flushes the writer.
|
||||
///
|
||||
/// It is advisable to use a buffered writer like [`BufWriter`](std::io::BufWriter)
|
||||
/// instead of an unbuffered writer like [`File`](std::fs::File).
|
||||
#[cfg(feature = "std")]
|
||||
pub fn write_stream<W: io::Write>(&self, w: W) -> result::Result<(), Box<dyn error::Error>> {
|
||||
let mut stream = StreamingBuffer::new(w);
|
||||
self.emit(&mut stream)?;
|
||||
stream.result()?;
|
||||
stream.into_inner().flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write the object to a `WritableBuffer`.
|
||||
pub fn emit(&self, buffer: &mut dyn WritableBuffer) -> Result<()> {
|
||||
match self.format {
|
||||
#[cfg(feature = "coff")]
|
||||
BinaryFormat::Coff => self.coff_write(buffer),
|
||||
#[cfg(feature = "elf")]
|
||||
BinaryFormat::Elf => self.elf_write(buffer),
|
||||
#[cfg(feature = "macho")]
|
||||
BinaryFormat::MachO => self.macho_write(buffer),
|
||||
#[cfg(feature = "xcoff")]
|
||||
BinaryFormat::Xcoff => self.xcoff_write(buffer),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A standard segment kind.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum StandardSegment {
|
||||
Text,
|
||||
Data,
|
||||
Debug,
|
||||
}
|
||||
|
||||
/// A standard section kind.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum StandardSection {
|
||||
Text,
|
||||
Data,
|
||||
ReadOnlyData,
|
||||
ReadOnlyDataWithRel,
|
||||
ReadOnlyString,
|
||||
UninitializedData,
|
||||
Tls,
|
||||
/// Zero-fill TLS initializers. Unsupported for COFF.
|
||||
UninitializedTls,
|
||||
/// TLS variable structures. Only supported for Mach-O.
|
||||
TlsVariables,
|
||||
/// Common data. Only supported for Mach-O.
|
||||
Common,
|
||||
/// Notes for GNU properties. Only supported for ELF.
|
||||
GnuProperty,
|
||||
}
|
||||
|
||||
impl StandardSection {
|
||||
/// Return the section kind of a standard section.
|
||||
pub fn kind(self) -> SectionKind {
|
||||
match self {
|
||||
StandardSection::Text => SectionKind::Text,
|
||||
StandardSection::Data => SectionKind::Data,
|
||||
StandardSection::ReadOnlyData => SectionKind::ReadOnlyData,
|
||||
StandardSection::ReadOnlyDataWithRel => SectionKind::ReadOnlyDataWithRel,
|
||||
StandardSection::ReadOnlyString => SectionKind::ReadOnlyString,
|
||||
StandardSection::UninitializedData => SectionKind::UninitializedData,
|
||||
StandardSection::Tls => SectionKind::Tls,
|
||||
StandardSection::UninitializedTls => SectionKind::UninitializedTls,
|
||||
StandardSection::TlsVariables => SectionKind::TlsVariables,
|
||||
StandardSection::Common => SectionKind::Common,
|
||||
StandardSection::GnuProperty => SectionKind::Note,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remembering to update this is error-prone, can we do better?
|
||||
fn all() -> &'static [StandardSection] {
|
||||
&[
|
||||
StandardSection::Text,
|
||||
StandardSection::Data,
|
||||
StandardSection::ReadOnlyData,
|
||||
StandardSection::ReadOnlyDataWithRel,
|
||||
StandardSection::ReadOnlyString,
|
||||
StandardSection::UninitializedData,
|
||||
StandardSection::Tls,
|
||||
StandardSection::UninitializedTls,
|
||||
StandardSection::TlsVariables,
|
||||
StandardSection::Common,
|
||||
StandardSection::GnuProperty,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
/// An identifier used to reference a section.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SectionId(usize);
|
||||
|
||||
/// A section in an object file.
|
||||
#[derive(Debug)]
|
||||
pub struct Section<'a> {
|
||||
segment: Vec<u8>,
|
||||
name: Vec<u8>,
|
||||
kind: SectionKind,
|
||||
size: u64,
|
||||
align: u64,
|
||||
data: Cow<'a, [u8]>,
|
||||
relocations: Vec<Relocation>,
|
||||
symbol: Option<SymbolId>,
|
||||
/// Section flags that are specific to each file format.
|
||||
pub flags: SectionFlags,
|
||||
}
|
||||
|
||||
impl<'a> Section<'a> {
|
||||
/// Try to convert the name to a utf8 string.
|
||||
#[inline]
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
str::from_utf8(&self.name).ok()
|
||||
}
|
||||
|
||||
/// Try to convert the segment to a utf8 string.
|
||||
#[inline]
|
||||
pub fn segment(&self) -> Option<&str> {
|
||||
str::from_utf8(&self.segment).ok()
|
||||
}
|
||||
|
||||
/// Return true if this section contains zerofill data.
|
||||
#[inline]
|
||||
pub fn is_bss(&self) -> bool {
|
||||
self.kind.is_bss()
|
||||
}
|
||||
|
||||
/// Set the data for a section.
|
||||
///
|
||||
/// Must not be called for sections that already have data, or that contain uninitialized data.
|
||||
pub fn set_data<T>(&mut self, data: T, align: u64)
|
||||
where
|
||||
T: Into<Cow<'a, [u8]>>,
|
||||
{
|
||||
debug_assert!(!self.is_bss());
|
||||
debug_assert_eq!(align & (align - 1), 0);
|
||||
debug_assert!(self.data.is_empty());
|
||||
self.data = data.into();
|
||||
self.size = self.data.len() as u64;
|
||||
self.align = align;
|
||||
}
|
||||
|
||||
/// Append data to a section.
|
||||
///
|
||||
/// Must not be called for sections that contain uninitialized data.
|
||||
pub fn append_data(&mut self, append_data: &[u8], align: u64) -> u64 {
|
||||
debug_assert!(!self.is_bss());
|
||||
debug_assert_eq!(align & (align - 1), 0);
|
||||
if self.align < align {
|
||||
self.align = align;
|
||||
}
|
||||
let align = align as usize;
|
||||
let data = self.data.to_mut();
|
||||
let mut offset = data.len();
|
||||
if offset & (align - 1) != 0 {
|
||||
offset += align - (offset & (align - 1));
|
||||
data.resize(offset, 0);
|
||||
}
|
||||
data.extend_from_slice(append_data);
|
||||
self.size = data.len() as u64;
|
||||
offset as u64
|
||||
}
|
||||
|
||||
/// Append uninitialized data to a section.
|
||||
///
|
||||
/// Must not be called for sections that contain initialized data.
|
||||
pub fn append_bss(&mut self, size: u64, align: u64) -> u64 {
|
||||
debug_assert!(self.is_bss());
|
||||
debug_assert_eq!(align & (align - 1), 0);
|
||||
if self.align < align {
|
||||
self.align = align;
|
||||
}
|
||||
let mut offset = self.size;
|
||||
if offset & (align - 1) != 0 {
|
||||
offset += align - (offset & (align - 1));
|
||||
self.size = offset;
|
||||
}
|
||||
self.size += size;
|
||||
offset
|
||||
}
|
||||
|
||||
/// Returns the section as-built so far.
|
||||
///
|
||||
/// This requires that the section is not a bss section.
|
||||
pub fn data(&self) -> &[u8] {
|
||||
debug_assert!(!self.is_bss());
|
||||
&self.data
|
||||
}
|
||||
|
||||
/// Returns the section as-built so far.
|
||||
///
|
||||
/// This requires that the section is not a bss section.
|
||||
pub fn data_mut(&mut self) -> &mut [u8] {
|
||||
debug_assert!(!self.is_bss());
|
||||
self.data.to_mut()
|
||||
}
|
||||
}
|
||||
|
||||
/// The section where a symbol is defined.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum SymbolSection {
|
||||
/// The section is not applicable for this symbol (such as file symbols).
|
||||
None,
|
||||
/// The symbol is undefined.
|
||||
Undefined,
|
||||
/// The symbol has an absolute value.
|
||||
Absolute,
|
||||
/// The symbol is a zero-initialized symbol that will be combined with duplicate definitions.
|
||||
Common,
|
||||
/// The symbol is defined in the given section.
|
||||
Section(SectionId),
|
||||
}
|
||||
|
||||
impl SymbolSection {
|
||||
/// Returns the section id for the section where the symbol is defined.
|
||||
///
|
||||
/// May return `None` if the symbol is not defined in a section.
|
||||
#[inline]
|
||||
pub fn id(self) -> Option<SectionId> {
|
||||
if let SymbolSection::Section(id) = self {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An identifier used to reference a symbol.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SymbolId(usize);
|
||||
|
||||
/// A symbol in an object file.
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
/// The name of the symbol.
|
||||
pub name: Vec<u8>,
|
||||
/// The value of the symbol.
|
||||
///
|
||||
/// If the symbol defined in a section, then this is the section offset of the symbol.
|
||||
pub value: u64,
|
||||
/// The size of the symbol.
|
||||
pub size: u64,
|
||||
/// The kind of the symbol.
|
||||
pub kind: SymbolKind,
|
||||
/// The scope of the symbol.
|
||||
pub scope: SymbolScope,
|
||||
/// Whether the symbol has weak binding.
|
||||
pub weak: bool,
|
||||
/// The section containing the symbol.
|
||||
pub section: SymbolSection,
|
||||
/// Symbol flags that are specific to each file format.
|
||||
pub flags: SymbolFlags<SectionId, SymbolId>,
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
/// Try to convert the name to a utf8 string.
|
||||
#[inline]
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
str::from_utf8(&self.name).ok()
|
||||
}
|
||||
|
||||
/// Return true if the symbol is undefined.
|
||||
#[inline]
|
||||
pub fn is_undefined(&self) -> bool {
|
||||
self.section == SymbolSection::Undefined
|
||||
}
|
||||
|
||||
/// Return true if the symbol is common data.
|
||||
///
|
||||
/// Note: does not check for `SymbolSection::Section` with `SectionKind::Common`.
|
||||
#[inline]
|
||||
pub fn is_common(&self) -> bool {
|
||||
self.section == SymbolSection::Common
|
||||
}
|
||||
|
||||
/// Return true if the symbol scope is local.
|
||||
#[inline]
|
||||
pub fn is_local(&self) -> bool {
|
||||
self.scope == SymbolScope::Compilation
|
||||
}
|
||||
}
|
||||
|
||||
/// A relocation in an object file.
|
||||
#[derive(Debug)]
|
||||
pub struct Relocation {
|
||||
/// The section offset of the place of the relocation.
|
||||
pub offset: u64,
|
||||
/// The size in bits of the place of relocation.
|
||||
pub size: u8,
|
||||
/// The operation used to calculate the result of the relocation.
|
||||
pub kind: RelocationKind,
|
||||
/// Information about how the result of the relocation operation is encoded in the place.
|
||||
pub encoding: RelocationEncoding,
|
||||
/// The symbol referred to by the relocation.
|
||||
///
|
||||
/// This may be a section symbol.
|
||||
pub symbol: SymbolId,
|
||||
/// The addend to use in the relocation calculation.
|
||||
///
|
||||
/// This may be in addition to an implicit addend stored at the place of the relocation.
|
||||
pub addend: i64,
|
||||
}
|
||||
|
||||
/// An identifier used to reference a COMDAT section group.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct ComdatId(usize);
|
||||
|
||||
/// A COMDAT section group.
|
||||
#[derive(Debug)]
|
||||
pub struct Comdat {
|
||||
/// The COMDAT selection kind.
|
||||
///
|
||||
/// This determines the way in which the linker resolves multiple definitions of the COMDAT
|
||||
/// sections.
|
||||
pub kind: ComdatKind,
|
||||
/// The COMDAT symbol.
|
||||
///
|
||||
/// If this symbol is referenced, then all sections in the group will be included by the
|
||||
/// linker.
|
||||
pub symbol: SymbolId,
|
||||
/// The sections in the group.
|
||||
pub sections: Vec<SectionId>,
|
||||
}
|
||||
|
||||
/// The symbol name mangling scheme.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum Mangling {
|
||||
/// No symbol mangling.
|
||||
None,
|
||||
/// Windows COFF symbol mangling.
|
||||
Coff,
|
||||
/// Windows COFF i386 symbol mangling.
|
||||
CoffI386,
|
||||
/// ELF symbol mangling.
|
||||
Elf,
|
||||
/// Mach-O symbol mangling.
|
||||
MachO,
|
||||
/// Xcoff symbol mangling.
|
||||
Xcoff,
|
||||
}
|
||||
|
||||
impl Mangling {
|
||||
/// Return the default symboling mangling for the given format and architecture.
|
||||
pub fn default(format: BinaryFormat, architecture: Architecture) -> Self {
|
||||
match (format, architecture) {
|
||||
(BinaryFormat::Coff, Architecture::I386) => Mangling::CoffI386,
|
||||
(BinaryFormat::Coff, _) => Mangling::Coff,
|
||||
(BinaryFormat::Elf, _) => Mangling::Elf,
|
||||
(BinaryFormat::MachO, _) => Mangling::MachO,
|
||||
(BinaryFormat::Xcoff, _) => Mangling::Xcoff,
|
||||
_ => Mangling::None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the prefix to use for global symbols.
|
||||
pub fn global_prefix(self) -> Option<u8> {
|
||||
match self {
|
||||
Mangling::None | Mangling::Elf | Mangling::Coff | Mangling::Xcoff => None,
|
||||
Mangling::CoffI386 | Mangling::MachO => Some(b'_'),
|
||||
}
|
||||
}
|
||||
}
|
||||
847
vendor/object/src/write/pe.rs
vendored
Normal file
847
vendor/object/src/write/pe.rs
vendored
Normal file
@@ -0,0 +1,847 @@
|
||||
//! Helper for writing PE files.
|
||||
use alloc::string::String;
|
||||
use alloc::vec::Vec;
|
||||
use core::mem;
|
||||
|
||||
use crate::endian::{LittleEndian as LE, *};
|
||||
use crate::pe;
|
||||
use crate::write::util;
|
||||
use crate::write::{Error, Result, WritableBuffer};
|
||||
|
||||
/// A helper for writing PE files.
|
||||
///
|
||||
/// Writing uses a two phase approach. The first phase reserves file ranges and virtual
|
||||
/// address ranges for everything in the order that they will be written.
|
||||
///
|
||||
/// The second phase writes everything out in order. Thus the caller must ensure writing
|
||||
/// is in the same order that file ranges were reserved.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub struct Writer<'a> {
|
||||
is_64: bool,
|
||||
section_alignment: u32,
|
||||
file_alignment: u32,
|
||||
|
||||
buffer: &'a mut dyn WritableBuffer,
|
||||
len: u32,
|
||||
virtual_len: u32,
|
||||
headers_len: u32,
|
||||
|
||||
code_address: u32,
|
||||
data_address: u32,
|
||||
code_len: u32,
|
||||
data_len: u32,
|
||||
bss_len: u32,
|
||||
|
||||
nt_headers_offset: u32,
|
||||
data_directories: Vec<DataDirectory>,
|
||||
section_header_num: u16,
|
||||
sections: Vec<Section>,
|
||||
|
||||
symbol_offset: u32,
|
||||
symbol_num: u32,
|
||||
|
||||
reloc_blocks: Vec<RelocBlock>,
|
||||
relocs: Vec<U16<LE>>,
|
||||
reloc_offset: u32,
|
||||
}
|
||||
|
||||
impl<'a> Writer<'a> {
|
||||
/// Create a new `Writer`.
|
||||
pub fn new(
|
||||
is_64: bool,
|
||||
section_alignment: u32,
|
||||
file_alignment: u32,
|
||||
buffer: &'a mut dyn WritableBuffer,
|
||||
) -> Self {
|
||||
Writer {
|
||||
is_64,
|
||||
section_alignment,
|
||||
file_alignment,
|
||||
|
||||
buffer,
|
||||
len: 0,
|
||||
virtual_len: 0,
|
||||
headers_len: 0,
|
||||
|
||||
code_address: 0,
|
||||
data_address: 0,
|
||||
code_len: 0,
|
||||
data_len: 0,
|
||||
bss_len: 0,
|
||||
|
||||
nt_headers_offset: 0,
|
||||
data_directories: Vec::new(),
|
||||
section_header_num: 0,
|
||||
sections: Vec::new(),
|
||||
|
||||
symbol_offset: 0,
|
||||
symbol_num: 0,
|
||||
|
||||
reloc_blocks: Vec::new(),
|
||||
relocs: Vec::new(),
|
||||
reloc_offset: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the current virtual address size that has been reserved.
|
||||
///
|
||||
/// This is only valid after section headers have been reserved.
|
||||
pub fn virtual_len(&self) -> u32 {
|
||||
self.virtual_len
|
||||
}
|
||||
|
||||
/// Reserve a virtual address range with the given size.
|
||||
///
|
||||
/// The reserved length will be increased to match the section alignment.
|
||||
///
|
||||
/// Returns the aligned offset of the start of the range.
|
||||
pub fn reserve_virtual(&mut self, len: u32) -> u32 {
|
||||
let offset = self.virtual_len;
|
||||
self.virtual_len += len;
|
||||
self.virtual_len = util::align_u32(self.virtual_len, self.section_alignment);
|
||||
offset
|
||||
}
|
||||
|
||||
/// Reserve up to the given virtual address.
|
||||
///
|
||||
/// The reserved length will be increased to match the section alignment.
|
||||
pub fn reserve_virtual_until(&mut self, address: u32) {
|
||||
debug_assert!(self.virtual_len <= address);
|
||||
self.virtual_len = util::align_u32(address, self.section_alignment);
|
||||
}
|
||||
|
||||
/// Return the current file length that has been reserved.
|
||||
pub fn reserved_len(&self) -> u32 {
|
||||
self.len
|
||||
}
|
||||
|
||||
/// Return the current file length that has been written.
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.buffer.len()
|
||||
}
|
||||
|
||||
/// Reserve a file range with the given size and starting alignment.
|
||||
///
|
||||
/// Returns the aligned offset of the start of the range.
|
||||
pub fn reserve(&mut self, len: u32, align_start: u32) -> u32 {
|
||||
if len == 0 {
|
||||
return self.len;
|
||||
}
|
||||
self.reserve_align(align_start);
|
||||
let offset = self.len;
|
||||
self.len += len;
|
||||
offset
|
||||
}
|
||||
|
||||
/// Reserve a file range with the given size and using the file alignment.
|
||||
///
|
||||
/// Returns the aligned offset of the start of the range.
|
||||
pub fn reserve_file(&mut self, len: u32) -> u32 {
|
||||
self.reserve(len, self.file_alignment)
|
||||
}
|
||||
|
||||
/// Write data.
|
||||
pub fn write(&mut self, data: &[u8]) {
|
||||
self.buffer.write_bytes(data);
|
||||
}
|
||||
|
||||
/// Reserve alignment padding bytes.
|
||||
pub fn reserve_align(&mut self, align_start: u32) {
|
||||
self.len = util::align_u32(self.len, align_start);
|
||||
}
|
||||
|
||||
/// Write alignment padding bytes.
|
||||
pub fn write_align(&mut self, align_start: u32) {
|
||||
util::write_align(self.buffer, align_start as usize);
|
||||
}
|
||||
|
||||
/// Write padding up to the next multiple of file alignment.
|
||||
pub fn write_file_align(&mut self) {
|
||||
self.write_align(self.file_alignment);
|
||||
}
|
||||
|
||||
/// Reserve the file range up to the given file offset.
|
||||
pub fn reserve_until(&mut self, offset: u32) {
|
||||
debug_assert!(self.len <= offset);
|
||||
self.len = offset;
|
||||
}
|
||||
|
||||
/// Write padding up to the given file offset.
|
||||
pub fn pad_until(&mut self, offset: u32) {
|
||||
debug_assert!(self.buffer.len() <= offset as usize);
|
||||
self.buffer.resize(offset as usize);
|
||||
}
|
||||
|
||||
/// Reserve the range for the DOS header.
|
||||
///
|
||||
/// This must be at the start of the file.
|
||||
///
|
||||
/// When writing, you may use `write_custom_dos_header` or `write_empty_dos_header`.
|
||||
pub fn reserve_dos_header(&mut self) {
|
||||
debug_assert_eq!(self.len, 0);
|
||||
self.reserve(mem::size_of::<pe::ImageDosHeader>() as u32, 1);
|
||||
}
|
||||
|
||||
/// Write a custom DOS header.
|
||||
///
|
||||
/// This must be at the start of the file.
|
||||
pub fn write_custom_dos_header(&mut self, dos_header: &pe::ImageDosHeader) -> Result<()> {
|
||||
debug_assert_eq!(self.buffer.len(), 0);
|
||||
|
||||
// Start writing.
|
||||
self.buffer
|
||||
.reserve(self.len as usize)
|
||||
.map_err(|_| Error(String::from("Cannot allocate buffer")))?;
|
||||
|
||||
self.buffer.write(dos_header);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write the DOS header for a file without a stub.
|
||||
///
|
||||
/// This must be at the start of the file.
|
||||
///
|
||||
/// Uses default values for all fields.
|
||||
pub fn write_empty_dos_header(&mut self) -> Result<()> {
|
||||
self.write_custom_dos_header(&pe::ImageDosHeader {
|
||||
e_magic: U16::new(LE, pe::IMAGE_DOS_SIGNATURE),
|
||||
e_cblp: U16::new(LE, 0),
|
||||
e_cp: U16::new(LE, 0),
|
||||
e_crlc: U16::new(LE, 0),
|
||||
e_cparhdr: U16::new(LE, 0),
|
||||
e_minalloc: U16::new(LE, 0),
|
||||
e_maxalloc: U16::new(LE, 0),
|
||||
e_ss: U16::new(LE, 0),
|
||||
e_sp: U16::new(LE, 0),
|
||||
e_csum: U16::new(LE, 0),
|
||||
e_ip: U16::new(LE, 0),
|
||||
e_cs: U16::new(LE, 0),
|
||||
e_lfarlc: U16::new(LE, 0),
|
||||
e_ovno: U16::new(LE, 0),
|
||||
e_res: [U16::new(LE, 0); 4],
|
||||
e_oemid: U16::new(LE, 0),
|
||||
e_oeminfo: U16::new(LE, 0),
|
||||
e_res2: [U16::new(LE, 0); 10],
|
||||
e_lfanew: U32::new(LE, self.nt_headers_offset),
|
||||
})
|
||||
}
|
||||
|
||||
/// Reserve a fixed DOS header and stub.
|
||||
///
|
||||
/// Use `reserve_dos_header` and `reserve` if you need a custom stub.
|
||||
pub fn reserve_dos_header_and_stub(&mut self) {
|
||||
self.reserve_dos_header();
|
||||
self.reserve(64, 1);
|
||||
}
|
||||
|
||||
/// Write a fixed DOS header and stub.
|
||||
///
|
||||
/// Use `write_custom_dos_header` and `write` if you need a custom stub.
|
||||
pub fn write_dos_header_and_stub(&mut self) -> Result<()> {
|
||||
self.write_custom_dos_header(&pe::ImageDosHeader {
|
||||
e_magic: U16::new(LE, pe::IMAGE_DOS_SIGNATURE),
|
||||
e_cblp: U16::new(LE, 0x90),
|
||||
e_cp: U16::new(LE, 3),
|
||||
e_crlc: U16::new(LE, 0),
|
||||
e_cparhdr: U16::new(LE, 4),
|
||||
e_minalloc: U16::new(LE, 0),
|
||||
e_maxalloc: U16::new(LE, 0xffff),
|
||||
e_ss: U16::new(LE, 0),
|
||||
e_sp: U16::new(LE, 0xb8),
|
||||
e_csum: U16::new(LE, 0),
|
||||
e_ip: U16::new(LE, 0),
|
||||
e_cs: U16::new(LE, 0),
|
||||
e_lfarlc: U16::new(LE, 0x40),
|
||||
e_ovno: U16::new(LE, 0),
|
||||
e_res: [U16::new(LE, 0); 4],
|
||||
e_oemid: U16::new(LE, 0),
|
||||
e_oeminfo: U16::new(LE, 0),
|
||||
e_res2: [U16::new(LE, 0); 10],
|
||||
e_lfanew: U32::new(LE, self.nt_headers_offset),
|
||||
})?;
|
||||
|
||||
#[rustfmt::skip]
|
||||
self.buffer.write_bytes(&[
|
||||
0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd,
|
||||
0x21, 0xb8, 0x01, 0x4c, 0xcd, 0x21, 0x54, 0x68,
|
||||
0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72,
|
||||
0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f,
|
||||
0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e,
|
||||
0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20,
|
||||
0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a,
|
||||
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn nt_headers_size(&self) -> u32 {
|
||||
if self.is_64 {
|
||||
mem::size_of::<pe::ImageNtHeaders64>() as u32
|
||||
} else {
|
||||
mem::size_of::<pe::ImageNtHeaders32>() as u32
|
||||
}
|
||||
}
|
||||
|
||||
fn optional_header_size(&self) -> u32 {
|
||||
let size = if self.is_64 {
|
||||
mem::size_of::<pe::ImageOptionalHeader64>() as u32
|
||||
} else {
|
||||
mem::size_of::<pe::ImageOptionalHeader32>() as u32
|
||||
};
|
||||
size + self.data_directories.len() as u32 * mem::size_of::<pe::ImageDataDirectory>() as u32
|
||||
}
|
||||
|
||||
/// Return the offset of the NT headers, if reserved.
|
||||
pub fn nt_headers_offset(&self) -> u32 {
|
||||
self.nt_headers_offset
|
||||
}
|
||||
|
||||
/// Reserve the range for the NT headers.
|
||||
pub fn reserve_nt_headers(&mut self, data_directory_num: usize) {
|
||||
debug_assert_eq!(self.nt_headers_offset, 0);
|
||||
self.nt_headers_offset = self.reserve(self.nt_headers_size(), 8);
|
||||
self.data_directories = vec![DataDirectory::default(); data_directory_num];
|
||||
self.reserve(
|
||||
data_directory_num as u32 * mem::size_of::<pe::ImageDataDirectory>() as u32,
|
||||
1,
|
||||
);
|
||||
}
|
||||
|
||||
/// Set the virtual address and size of a data directory.
|
||||
pub fn set_data_directory(&mut self, index: usize, virtual_address: u32, size: u32) {
|
||||
self.data_directories[index] = DataDirectory {
|
||||
virtual_address,
|
||||
size,
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the NT headers.
|
||||
pub fn write_nt_headers(&mut self, nt_headers: NtHeaders) {
|
||||
self.pad_until(self.nt_headers_offset);
|
||||
self.buffer.write(&U32::new(LE, pe::IMAGE_NT_SIGNATURE));
|
||||
let file_header = pe::ImageFileHeader {
|
||||
machine: U16::new(LE, nt_headers.machine),
|
||||
number_of_sections: U16::new(LE, self.section_header_num),
|
||||
time_date_stamp: U32::new(LE, nt_headers.time_date_stamp),
|
||||
pointer_to_symbol_table: U32::new(LE, self.symbol_offset),
|
||||
number_of_symbols: U32::new(LE, self.symbol_num),
|
||||
size_of_optional_header: U16::new(LE, self.optional_header_size() as u16),
|
||||
characteristics: U16::new(LE, nt_headers.characteristics),
|
||||
};
|
||||
self.buffer.write(&file_header);
|
||||
if self.is_64 {
|
||||
let optional_header = pe::ImageOptionalHeader64 {
|
||||
magic: U16::new(LE, pe::IMAGE_NT_OPTIONAL_HDR64_MAGIC),
|
||||
major_linker_version: nt_headers.major_linker_version,
|
||||
minor_linker_version: nt_headers.minor_linker_version,
|
||||
size_of_code: U32::new(LE, self.code_len),
|
||||
size_of_initialized_data: U32::new(LE, self.data_len),
|
||||
size_of_uninitialized_data: U32::new(LE, self.bss_len),
|
||||
address_of_entry_point: U32::new(LE, nt_headers.address_of_entry_point),
|
||||
base_of_code: U32::new(LE, self.code_address),
|
||||
image_base: U64::new(LE, nt_headers.image_base),
|
||||
section_alignment: U32::new(LE, self.section_alignment),
|
||||
file_alignment: U32::new(LE, self.file_alignment),
|
||||
major_operating_system_version: U16::new(
|
||||
LE,
|
||||
nt_headers.major_operating_system_version,
|
||||
),
|
||||
minor_operating_system_version: U16::new(
|
||||
LE,
|
||||
nt_headers.minor_operating_system_version,
|
||||
),
|
||||
major_image_version: U16::new(LE, nt_headers.major_image_version),
|
||||
minor_image_version: U16::new(LE, nt_headers.minor_image_version),
|
||||
major_subsystem_version: U16::new(LE, nt_headers.major_subsystem_version),
|
||||
minor_subsystem_version: U16::new(LE, nt_headers.minor_subsystem_version),
|
||||
win32_version_value: U32::new(LE, 0),
|
||||
size_of_image: U32::new(LE, self.virtual_len),
|
||||
size_of_headers: U32::new(LE, self.headers_len),
|
||||
check_sum: U32::new(LE, 0),
|
||||
subsystem: U16::new(LE, nt_headers.subsystem),
|
||||
dll_characteristics: U16::new(LE, nt_headers.dll_characteristics),
|
||||
size_of_stack_reserve: U64::new(LE, nt_headers.size_of_stack_reserve),
|
||||
size_of_stack_commit: U64::new(LE, nt_headers.size_of_stack_commit),
|
||||
size_of_heap_reserve: U64::new(LE, nt_headers.size_of_heap_reserve),
|
||||
size_of_heap_commit: U64::new(LE, nt_headers.size_of_heap_commit),
|
||||
loader_flags: U32::new(LE, 0),
|
||||
number_of_rva_and_sizes: U32::new(LE, self.data_directories.len() as u32),
|
||||
};
|
||||
self.buffer.write(&optional_header);
|
||||
} else {
|
||||
let optional_header = pe::ImageOptionalHeader32 {
|
||||
magic: U16::new(LE, pe::IMAGE_NT_OPTIONAL_HDR32_MAGIC),
|
||||
major_linker_version: nt_headers.major_linker_version,
|
||||
minor_linker_version: nt_headers.minor_linker_version,
|
||||
size_of_code: U32::new(LE, self.code_len),
|
||||
size_of_initialized_data: U32::new(LE, self.data_len),
|
||||
size_of_uninitialized_data: U32::new(LE, self.bss_len),
|
||||
address_of_entry_point: U32::new(LE, nt_headers.address_of_entry_point),
|
||||
base_of_code: U32::new(LE, self.code_address),
|
||||
base_of_data: U32::new(LE, self.data_address),
|
||||
image_base: U32::new(LE, nt_headers.image_base as u32),
|
||||
section_alignment: U32::new(LE, self.section_alignment),
|
||||
file_alignment: U32::new(LE, self.file_alignment),
|
||||
major_operating_system_version: U16::new(
|
||||
LE,
|
||||
nt_headers.major_operating_system_version,
|
||||
),
|
||||
minor_operating_system_version: U16::new(
|
||||
LE,
|
||||
nt_headers.minor_operating_system_version,
|
||||
),
|
||||
major_image_version: U16::new(LE, nt_headers.major_image_version),
|
||||
minor_image_version: U16::new(LE, nt_headers.minor_image_version),
|
||||
major_subsystem_version: U16::new(LE, nt_headers.major_subsystem_version),
|
||||
minor_subsystem_version: U16::new(LE, nt_headers.minor_subsystem_version),
|
||||
win32_version_value: U32::new(LE, 0),
|
||||
size_of_image: U32::new(LE, self.virtual_len),
|
||||
size_of_headers: U32::new(LE, self.headers_len),
|
||||
check_sum: U32::new(LE, 0),
|
||||
subsystem: U16::new(LE, nt_headers.subsystem),
|
||||
dll_characteristics: U16::new(LE, nt_headers.dll_characteristics),
|
||||
size_of_stack_reserve: U32::new(LE, nt_headers.size_of_stack_reserve as u32),
|
||||
size_of_stack_commit: U32::new(LE, nt_headers.size_of_stack_commit as u32),
|
||||
size_of_heap_reserve: U32::new(LE, nt_headers.size_of_heap_reserve as u32),
|
||||
size_of_heap_commit: U32::new(LE, nt_headers.size_of_heap_commit as u32),
|
||||
loader_flags: U32::new(LE, 0),
|
||||
number_of_rva_and_sizes: U32::new(LE, self.data_directories.len() as u32),
|
||||
};
|
||||
self.buffer.write(&optional_header);
|
||||
}
|
||||
|
||||
for dir in &self.data_directories {
|
||||
self.buffer.write(&pe::ImageDataDirectory {
|
||||
virtual_address: U32::new(LE, dir.virtual_address),
|
||||
size: U32::new(LE, dir.size),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Reserve the section headers.
|
||||
///
|
||||
/// The number of reserved section headers must be the same as the number of sections that
|
||||
/// are later reserved.
|
||||
// TODO: change this to a maximum number of sections?
|
||||
pub fn reserve_section_headers(&mut self, section_header_num: u16) {
|
||||
debug_assert_eq!(self.section_header_num, 0);
|
||||
self.section_header_num = section_header_num;
|
||||
self.reserve(
|
||||
u32::from(section_header_num) * mem::size_of::<pe::ImageSectionHeader>() as u32,
|
||||
1,
|
||||
);
|
||||
// Padding before sections must be included in headers_len.
|
||||
self.reserve_align(self.file_alignment);
|
||||
self.headers_len = self.len;
|
||||
self.reserve_virtual(self.len);
|
||||
}
|
||||
|
||||
/// Write the section headers.
|
||||
///
|
||||
/// This uses information that was recorded when the sections were reserved.
|
||||
pub fn write_section_headers(&mut self) {
|
||||
debug_assert_eq!(self.section_header_num as usize, self.sections.len());
|
||||
for section in &self.sections {
|
||||
let section_header = pe::ImageSectionHeader {
|
||||
name: section.name,
|
||||
virtual_size: U32::new(LE, section.range.virtual_size),
|
||||
virtual_address: U32::new(LE, section.range.virtual_address),
|
||||
size_of_raw_data: U32::new(LE, section.range.file_size),
|
||||
pointer_to_raw_data: U32::new(LE, section.range.file_offset),
|
||||
pointer_to_relocations: U32::new(LE, 0),
|
||||
pointer_to_linenumbers: U32::new(LE, 0),
|
||||
number_of_relocations: U16::new(LE, 0),
|
||||
number_of_linenumbers: U16::new(LE, 0),
|
||||
characteristics: U32::new(LE, section.characteristics),
|
||||
};
|
||||
self.buffer.write(§ion_header);
|
||||
}
|
||||
}
|
||||
|
||||
/// Reserve a section.
|
||||
///
|
||||
/// Returns the file range and virtual address range that are reserved
|
||||
/// for the section.
|
||||
pub fn reserve_section(
|
||||
&mut self,
|
||||
name: [u8; 8],
|
||||
characteristics: u32,
|
||||
virtual_size: u32,
|
||||
data_size: u32,
|
||||
) -> SectionRange {
|
||||
let virtual_address = self.reserve_virtual(virtual_size);
|
||||
|
||||
// Padding after section must be included in section file size.
|
||||
let file_size = util::align_u32(data_size, self.file_alignment);
|
||||
let file_offset = if file_size != 0 {
|
||||
self.reserve(file_size, self.file_alignment)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// Sizes in optional header use the virtual size with the file alignment.
|
||||
let aligned_virtual_size = util::align_u32(virtual_size, self.file_alignment);
|
||||
if characteristics & pe::IMAGE_SCN_CNT_CODE != 0 {
|
||||
if self.code_address == 0 {
|
||||
self.code_address = virtual_address;
|
||||
}
|
||||
self.code_len += aligned_virtual_size;
|
||||
} else if characteristics & pe::IMAGE_SCN_CNT_INITIALIZED_DATA != 0 {
|
||||
if self.data_address == 0 {
|
||||
self.data_address = virtual_address;
|
||||
}
|
||||
self.data_len += aligned_virtual_size;
|
||||
} else if characteristics & pe::IMAGE_SCN_CNT_UNINITIALIZED_DATA != 0 {
|
||||
if self.data_address == 0 {
|
||||
self.data_address = virtual_address;
|
||||
}
|
||||
self.bss_len += aligned_virtual_size;
|
||||
}
|
||||
|
||||
let range = SectionRange {
|
||||
virtual_address,
|
||||
virtual_size,
|
||||
file_offset,
|
||||
file_size,
|
||||
};
|
||||
self.sections.push(Section {
|
||||
name,
|
||||
characteristics,
|
||||
range,
|
||||
});
|
||||
range
|
||||
}
|
||||
|
||||
/// Write the data for a section.
|
||||
pub fn write_section(&mut self, offset: u32, data: &[u8]) {
|
||||
if data.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.pad_until(offset);
|
||||
self.write(data);
|
||||
self.write_align(self.file_alignment);
|
||||
}
|
||||
|
||||
/// Reserve a `.text` section.
|
||||
///
|
||||
/// Contains executable code.
|
||||
pub fn reserve_text_section(&mut self, size: u32) -> SectionRange {
|
||||
self.reserve_section(
|
||||
*b".text\0\0\0",
|
||||
pe::IMAGE_SCN_CNT_CODE | pe::IMAGE_SCN_MEM_EXECUTE | pe::IMAGE_SCN_MEM_READ,
|
||||
size,
|
||||
size,
|
||||
)
|
||||
}
|
||||
|
||||
/// Reserve a `.data` section.
|
||||
///
|
||||
/// Contains initialized data.
|
||||
///
|
||||
/// May also contain uninitialized data if `virtual_size` is greater than `data_size`.
|
||||
pub fn reserve_data_section(&mut self, virtual_size: u32, data_size: u32) -> SectionRange {
|
||||
self.reserve_section(
|
||||
*b".data\0\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ | pe::IMAGE_SCN_MEM_WRITE,
|
||||
virtual_size,
|
||||
data_size,
|
||||
)
|
||||
}
|
||||
|
||||
/// Reserve a `.rdata` section.
|
||||
///
|
||||
/// Contains read-only initialized data.
|
||||
pub fn reserve_rdata_section(&mut self, size: u32) -> SectionRange {
|
||||
self.reserve_section(
|
||||
*b".rdata\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ,
|
||||
size,
|
||||
size,
|
||||
)
|
||||
}
|
||||
|
||||
/// Reserve a `.bss` section.
|
||||
///
|
||||
/// Contains uninitialized data.
|
||||
pub fn reserve_bss_section(&mut self, size: u32) -> SectionRange {
|
||||
self.reserve_section(
|
||||
*b".bss\0\0\0\0",
|
||||
pe::IMAGE_SCN_CNT_UNINITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ | pe::IMAGE_SCN_MEM_WRITE,
|
||||
size,
|
||||
0,
|
||||
)
|
||||
}
|
||||
|
||||
/// Reserve an `.idata` section.
|
||||
///
|
||||
/// Contains import tables. Note that it is permissible to store import tables in a different
|
||||
/// section.
|
||||
///
|
||||
/// This also sets the `pe::IMAGE_DIRECTORY_ENTRY_IMPORT` data directory.
|
||||
pub fn reserve_idata_section(&mut self, size: u32) -> SectionRange {
|
||||
let range = self.reserve_section(
|
||||
*b".idata\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ | pe::IMAGE_SCN_MEM_WRITE,
|
||||
size,
|
||||
size,
|
||||
);
|
||||
let dir = &mut self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_IMPORT];
|
||||
debug_assert_eq!(dir.virtual_address, 0);
|
||||
*dir = DataDirectory {
|
||||
virtual_address: range.virtual_address,
|
||||
size,
|
||||
};
|
||||
range
|
||||
}
|
||||
|
||||
/// Reserve an `.edata` section.
|
||||
///
|
||||
/// Contains export tables.
|
||||
///
|
||||
/// This also sets the `pe::IMAGE_DIRECTORY_ENTRY_EXPORT` data directory.
|
||||
pub fn reserve_edata_section(&mut self, size: u32) -> SectionRange {
|
||||
let range = self.reserve_section(
|
||||
*b".edata\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ,
|
||||
size,
|
||||
size,
|
||||
);
|
||||
let dir = &mut self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_EXPORT];
|
||||
debug_assert_eq!(dir.virtual_address, 0);
|
||||
*dir = DataDirectory {
|
||||
virtual_address: range.virtual_address,
|
||||
size,
|
||||
};
|
||||
range
|
||||
}
|
||||
|
||||
/// Reserve a `.pdata` section.
|
||||
///
|
||||
/// Contains exception information.
|
||||
///
|
||||
/// This also sets the `pe::IMAGE_DIRECTORY_ENTRY_EXCEPTION` data directory.
|
||||
pub fn reserve_pdata_section(&mut self, size: u32) -> SectionRange {
|
||||
let range = self.reserve_section(
|
||||
*b".pdata\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ,
|
||||
size,
|
||||
size,
|
||||
);
|
||||
let dir = &mut self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_EXCEPTION];
|
||||
debug_assert_eq!(dir.virtual_address, 0);
|
||||
*dir = DataDirectory {
|
||||
virtual_address: range.virtual_address,
|
||||
size,
|
||||
};
|
||||
range
|
||||
}
|
||||
|
||||
/// Reserve a `.xdata` section.
|
||||
///
|
||||
/// Contains exception information.
|
||||
pub fn reserve_xdata_section(&mut self, size: u32) -> SectionRange {
|
||||
self.reserve_section(
|
||||
*b".xdata\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ,
|
||||
size,
|
||||
size,
|
||||
)
|
||||
}
|
||||
|
||||
/// Reserve a `.rsrc` section.
|
||||
///
|
||||
/// Contains the resource directory.
|
||||
///
|
||||
/// This also sets the `pe::IMAGE_DIRECTORY_ENTRY_RESOURCE` data directory.
|
||||
pub fn reserve_rsrc_section(&mut self, size: u32) -> SectionRange {
|
||||
let range = self.reserve_section(
|
||||
*b".rsrc\0\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA | pe::IMAGE_SCN_MEM_READ,
|
||||
size,
|
||||
size,
|
||||
);
|
||||
let dir = &mut self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_RESOURCE];
|
||||
debug_assert_eq!(dir.virtual_address, 0);
|
||||
*dir = DataDirectory {
|
||||
virtual_address: range.virtual_address,
|
||||
size,
|
||||
};
|
||||
range
|
||||
}
|
||||
|
||||
/// Add a base relocation.
|
||||
///
|
||||
/// `typ` must be one of the `IMAGE_REL_BASED_*` constants.
|
||||
pub fn add_reloc(&mut self, mut virtual_address: u32, typ: u16) {
|
||||
let reloc = U16::new(LE, typ << 12 | (virtual_address & 0xfff) as u16);
|
||||
virtual_address &= !0xfff;
|
||||
if let Some(block) = self.reloc_blocks.last_mut() {
|
||||
if block.virtual_address == virtual_address {
|
||||
self.relocs.push(reloc);
|
||||
block.count += 1;
|
||||
return;
|
||||
}
|
||||
// Blocks must have an even number of relocations.
|
||||
if block.count & 1 != 0 {
|
||||
self.relocs.push(U16::new(LE, 0));
|
||||
block.count += 1;
|
||||
}
|
||||
debug_assert!(block.virtual_address < virtual_address);
|
||||
}
|
||||
self.relocs.push(reloc);
|
||||
self.reloc_blocks.push(RelocBlock {
|
||||
virtual_address,
|
||||
count: 1,
|
||||
});
|
||||
}
|
||||
|
||||
/// Return true if a base relocation has been added.
|
||||
pub fn has_relocs(&mut self) -> bool {
|
||||
!self.relocs.is_empty()
|
||||
}
|
||||
|
||||
/// Reserve a `.reloc` section.
|
||||
///
|
||||
/// This contains the base relocations that were added with `add_reloc`.
|
||||
///
|
||||
/// This also sets the `pe::IMAGE_DIRECTORY_ENTRY_BASERELOC` data directory.
|
||||
pub fn reserve_reloc_section(&mut self) -> SectionRange {
|
||||
if let Some(block) = self.reloc_blocks.last_mut() {
|
||||
// Blocks must have an even number of relocations.
|
||||
if block.count & 1 != 0 {
|
||||
self.relocs.push(U16::new(LE, 0));
|
||||
block.count += 1;
|
||||
}
|
||||
}
|
||||
let size = self.reloc_blocks.iter().map(RelocBlock::size).sum();
|
||||
let range = self.reserve_section(
|
||||
*b".reloc\0\0",
|
||||
pe::IMAGE_SCN_CNT_INITIALIZED_DATA
|
||||
| pe::IMAGE_SCN_MEM_READ
|
||||
| pe::IMAGE_SCN_MEM_DISCARDABLE,
|
||||
size,
|
||||
size,
|
||||
);
|
||||
let dir = &mut self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_BASERELOC];
|
||||
debug_assert_eq!(dir.virtual_address, 0);
|
||||
*dir = DataDirectory {
|
||||
virtual_address: range.virtual_address,
|
||||
size,
|
||||
};
|
||||
self.reloc_offset = range.file_offset;
|
||||
range
|
||||
}
|
||||
|
||||
/// Write a `.reloc` section.
|
||||
///
|
||||
/// This contains the base relocations that were added with `add_reloc`.
|
||||
pub fn write_reloc_section(&mut self) {
|
||||
if self.reloc_offset == 0 {
|
||||
return;
|
||||
}
|
||||
self.pad_until(self.reloc_offset);
|
||||
|
||||
let mut total = 0;
|
||||
for block in &self.reloc_blocks {
|
||||
self.buffer.write(&pe::ImageBaseRelocation {
|
||||
virtual_address: U32::new(LE, block.virtual_address),
|
||||
size_of_block: U32::new(LE, block.size()),
|
||||
});
|
||||
self.buffer
|
||||
.write_slice(&self.relocs[total..][..block.count as usize]);
|
||||
total += block.count as usize;
|
||||
}
|
||||
debug_assert_eq!(total, self.relocs.len());
|
||||
|
||||
self.write_align(self.file_alignment);
|
||||
}
|
||||
|
||||
/// Reserve the certificate table.
|
||||
///
|
||||
/// This also sets the `pe::IMAGE_DIRECTORY_ENTRY_SECURITY` data directory.
|
||||
// TODO: reserve individual certificates
|
||||
pub fn reserve_certificate_table(&mut self, size: u32) {
|
||||
let size = util::align_u32(size, 8);
|
||||
let offset = self.reserve(size, 8);
|
||||
let dir = &mut self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_SECURITY];
|
||||
debug_assert_eq!(dir.virtual_address, 0);
|
||||
*dir = DataDirectory {
|
||||
virtual_address: offset,
|
||||
size,
|
||||
};
|
||||
}
|
||||
|
||||
/// Write the certificate table.
|
||||
// TODO: write individual certificates
|
||||
pub fn write_certificate_table(&mut self, data: &[u8]) {
|
||||
let dir = self.data_directories[pe::IMAGE_DIRECTORY_ENTRY_SECURITY];
|
||||
self.pad_until(dir.virtual_address);
|
||||
self.write(data);
|
||||
self.pad_until(dir.virtual_address + dir.size);
|
||||
}
|
||||
}
|
||||
|
||||
/// Information required for writing [`pe::ImageNtHeaders32`] or [`pe::ImageNtHeaders64`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NtHeaders {
|
||||
// ImageFileHeader
|
||||
pub machine: u16,
|
||||
pub time_date_stamp: u32,
|
||||
pub characteristics: u16,
|
||||
// ImageOptionalHeader
|
||||
pub major_linker_version: u8,
|
||||
pub minor_linker_version: u8,
|
||||
pub address_of_entry_point: u32,
|
||||
pub image_base: u64,
|
||||
pub major_operating_system_version: u16,
|
||||
pub minor_operating_system_version: u16,
|
||||
pub major_image_version: u16,
|
||||
pub minor_image_version: u16,
|
||||
pub major_subsystem_version: u16,
|
||||
pub minor_subsystem_version: u16,
|
||||
pub subsystem: u16,
|
||||
pub dll_characteristics: u16,
|
||||
pub size_of_stack_reserve: u64,
|
||||
pub size_of_stack_commit: u64,
|
||||
pub size_of_heap_reserve: u64,
|
||||
pub size_of_heap_commit: u64,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
struct DataDirectory {
|
||||
virtual_address: u32,
|
||||
size: u32,
|
||||
}
|
||||
|
||||
/// Information required for writing [`pe::ImageSectionHeader`].
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Section {
|
||||
pub name: [u8; pe::IMAGE_SIZEOF_SHORT_NAME],
|
||||
pub characteristics: u32,
|
||||
pub range: SectionRange,
|
||||
}
|
||||
|
||||
/// The file range and virtual address range for a section.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct SectionRange {
|
||||
pub virtual_address: u32,
|
||||
pub virtual_size: u32,
|
||||
pub file_offset: u32,
|
||||
pub file_size: u32,
|
||||
}
|
||||
|
||||
struct RelocBlock {
|
||||
virtual_address: u32,
|
||||
count: u32,
|
||||
}
|
||||
|
||||
impl RelocBlock {
|
||||
fn size(&self) -> u32 {
|
||||
mem::size_of::<pe::ImageBaseRelocation>() as u32 + self.count * mem::size_of::<u16>() as u32
|
||||
}
|
||||
}
|
||||
159
vendor/object/src/write/string.rs
vendored
Normal file
159
vendor/object/src/write/string.rs
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
type IndexSet<K> = indexmap::IndexSet<K>;
|
||||
#[cfg(not(feature = "std"))]
|
||||
type IndexSet<K> = indexmap::IndexSet<K, hashbrown::hash_map::DefaultHashBuilder>;
|
||||
|
||||
/// An identifier for an entry in a string table.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct StringId(usize);
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct StringTable<'a> {
|
||||
strings: IndexSet<&'a [u8]>,
|
||||
offsets: Vec<usize>,
|
||||
}
|
||||
|
||||
impl<'a> StringTable<'a> {
|
||||
/// Add a string to the string table.
|
||||
///
|
||||
/// Panics if the string table has already been written, or
|
||||
/// if the string contains a null byte.
|
||||
pub fn add(&mut self, string: &'a [u8]) -> StringId {
|
||||
assert!(self.offsets.is_empty());
|
||||
assert!(!string.contains(&0));
|
||||
let id = self.strings.insert_full(string).0;
|
||||
StringId(id)
|
||||
}
|
||||
|
||||
/// Return the id of the given string.
|
||||
///
|
||||
/// Panics if the string is not in the string table.
|
||||
pub fn get_id(&self, string: &[u8]) -> StringId {
|
||||
let id = self.strings.get_index_of(string).unwrap();
|
||||
StringId(id)
|
||||
}
|
||||
|
||||
/// Return the string for the given id.
|
||||
///
|
||||
/// Panics if the string is not in the string table.
|
||||
pub fn get_string(&self, id: StringId) -> &'a [u8] {
|
||||
self.strings.get_index(id.0).unwrap()
|
||||
}
|
||||
|
||||
/// Return the offset of the given string.
|
||||
///
|
||||
/// Panics if the string table has not been written, or
|
||||
/// if the string is not in the string table.
|
||||
pub fn get_offset(&self, id: StringId) -> usize {
|
||||
self.offsets[id.0]
|
||||
}
|
||||
|
||||
/// Append the string table to the given `Vec`, and
|
||||
/// calculate the list of string offsets.
|
||||
///
|
||||
/// `base` is the initial string table offset. For example,
|
||||
/// this should be 1 for ELF, to account for the initial
|
||||
/// null byte (which must have been written by the caller).
|
||||
pub fn write(&mut self, base: usize, w: &mut Vec<u8>) {
|
||||
assert!(self.offsets.is_empty());
|
||||
|
||||
let mut ids: Vec<_> = (0..self.strings.len()).collect();
|
||||
sort(&mut ids, 1, &self.strings);
|
||||
|
||||
self.offsets = vec![0; ids.len()];
|
||||
let mut offset = base;
|
||||
let mut previous = &[][..];
|
||||
for id in ids {
|
||||
let string = self.strings.get_index(id).unwrap();
|
||||
if previous.ends_with(string) {
|
||||
self.offsets[id] = offset - string.len() - 1;
|
||||
} else {
|
||||
self.offsets[id] = offset;
|
||||
w.extend_from_slice(string);
|
||||
w.push(0);
|
||||
offset += string.len() + 1;
|
||||
previous = string;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Multi-key quicksort.
|
||||
//
|
||||
// Ordering is such that if a string is a suffix of at least one other string,
|
||||
// then it is placed immediately after one of those strings. That is:
|
||||
// - comparison starts at the end of the string
|
||||
// - shorter strings come later
|
||||
//
|
||||
// Based on the implementation in LLVM.
|
||||
fn sort(mut ids: &mut [usize], mut pos: usize, strings: &IndexSet<&[u8]>) {
|
||||
loop {
|
||||
if ids.len() <= 1 {
|
||||
return;
|
||||
}
|
||||
|
||||
let pivot = byte(ids[0], pos, strings);
|
||||
let mut lower = 0;
|
||||
let mut upper = ids.len();
|
||||
let mut i = 1;
|
||||
while i < upper {
|
||||
let b = byte(ids[i], pos, strings);
|
||||
if b > pivot {
|
||||
ids.swap(lower, i);
|
||||
lower += 1;
|
||||
i += 1;
|
||||
} else if b < pivot {
|
||||
upper -= 1;
|
||||
ids.swap(upper, i);
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
sort(&mut ids[..lower], pos, strings);
|
||||
sort(&mut ids[upper..], pos, strings);
|
||||
|
||||
if pivot == 0 {
|
||||
return;
|
||||
}
|
||||
ids = &mut ids[lower..upper];
|
||||
pos += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn byte(id: usize, pos: usize, strings: &IndexSet<&[u8]>) -> u8 {
|
||||
let string = strings.get_index(id).unwrap();
|
||||
let len = string.len();
|
||||
if len >= pos {
|
||||
string[len - pos]
|
||||
} else {
|
||||
// We know the strings don't contain null bytes.
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn string_table() {
|
||||
let mut table = StringTable::default();
|
||||
let id0 = table.add(b"");
|
||||
let id1 = table.add(b"foo");
|
||||
let id2 = table.add(b"bar");
|
||||
let id3 = table.add(b"foobar");
|
||||
|
||||
let mut data = Vec::new();
|
||||
data.push(0);
|
||||
table.write(1, &mut data);
|
||||
assert_eq!(data, b"\0foobar\0foo\0");
|
||||
|
||||
assert_eq!(table.get_offset(id0), 11);
|
||||
assert_eq!(table.get_offset(id1), 8);
|
||||
assert_eq!(table.get_offset(id2), 4);
|
||||
assert_eq!(table.get_offset(id3), 1);
|
||||
}
|
||||
}
|
||||
260
vendor/object/src/write/util.rs
vendored
Normal file
260
vendor/object/src/write/util.rs
vendored
Normal file
@@ -0,0 +1,260 @@
|
||||
use alloc::vec::Vec;
|
||||
#[cfg(feature = "std")]
|
||||
use std::{io, mem};
|
||||
|
||||
use crate::pod::{bytes_of, bytes_of_slice, Pod};
|
||||
|
||||
/// Trait for writable buffer.
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub trait WritableBuffer {
|
||||
/// Returns position/offset for data to be written at.
|
||||
///
|
||||
/// Should only be used in debug assertions
|
||||
fn len(&self) -> usize;
|
||||
|
||||
/// Reserves specified number of bytes in the buffer.
|
||||
///
|
||||
/// This will be called exactly once before writing anything to the buffer,
|
||||
/// and the given size is the exact total number of bytes that will be written.
|
||||
fn reserve(&mut self, size: usize) -> Result<(), ()>;
|
||||
|
||||
/// Writes zero bytes at the end of the buffer until the buffer
|
||||
/// has the specified length.
|
||||
fn resize(&mut self, new_len: usize);
|
||||
|
||||
/// Writes the specified slice of bytes at the end of the buffer.
|
||||
fn write_bytes(&mut self, val: &[u8]);
|
||||
|
||||
/// Writes the specified `Pod` type at the end of the buffer.
|
||||
fn write_pod<T: Pod>(&mut self, val: &T)
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.write_bytes(bytes_of(val))
|
||||
}
|
||||
|
||||
/// Writes the specified `Pod` slice at the end of the buffer.
|
||||
fn write_pod_slice<T: Pod>(&mut self, val: &[T])
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.write_bytes(bytes_of_slice(val))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> dyn WritableBuffer + 'a {
|
||||
/// Writes the specified `Pod` type at the end of the buffer.
|
||||
pub fn write<T: Pod>(&mut self, val: &T) {
|
||||
self.write_bytes(bytes_of(val))
|
||||
}
|
||||
|
||||
/// Writes the specified `Pod` slice at the end of the buffer.
|
||||
pub fn write_slice<T: Pod>(&mut self, val: &[T]) {
|
||||
self.write_bytes(bytes_of_slice(val))
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableBuffer for Vec<u8> {
|
||||
#[inline]
|
||||
fn len(&self) -> usize {
|
||||
self.len()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn reserve(&mut self, size: usize) -> Result<(), ()> {
|
||||
debug_assert!(self.is_empty());
|
||||
self.reserve(size);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn resize(&mut self, new_len: usize) {
|
||||
debug_assert!(new_len >= self.len());
|
||||
self.resize(new_len, 0);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_bytes(&mut self, val: &[u8]) {
|
||||
debug_assert!(self.len() + val.len() <= self.capacity());
|
||||
self.extend_from_slice(val)
|
||||
}
|
||||
}
|
||||
|
||||
/// A [`WritableBuffer`] that streams data to a [`Write`](std::io::Write) implementation.
|
||||
///
|
||||
/// [`Self::result`] must be called to determine if an I/O error occurred during writing.
|
||||
///
|
||||
/// It is advisable to use a buffered writer like [`BufWriter`](std::io::BufWriter)
|
||||
/// instead of an unbuffered writer like [`File`](std::fs::File).
|
||||
#[cfg(feature = "std")]
|
||||
#[derive(Debug)]
|
||||
pub struct StreamingBuffer<W> {
|
||||
writer: W,
|
||||
len: usize,
|
||||
result: Result<(), io::Error>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<W> StreamingBuffer<W> {
|
||||
/// Create a new `StreamingBuffer` backed by the given writer.
|
||||
pub fn new(writer: W) -> Self {
|
||||
StreamingBuffer {
|
||||
writer,
|
||||
len: 0,
|
||||
result: Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Unwraps this [`StreamingBuffer`] giving back the original writer.
|
||||
pub fn into_inner(self) -> W {
|
||||
self.writer
|
||||
}
|
||||
|
||||
/// Returns any error that occurred during writing.
|
||||
pub fn result(&mut self) -> Result<(), io::Error> {
|
||||
mem::replace(&mut self.result, Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<W: io::Write> WritableBuffer for StreamingBuffer<W> {
|
||||
#[inline]
|
||||
fn len(&self) -> usize {
|
||||
self.len
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn reserve(&mut self, _size: usize) -> Result<(), ()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn resize(&mut self, new_len: usize) {
|
||||
debug_assert!(self.len <= new_len);
|
||||
while self.len < new_len {
|
||||
let write_amt = (new_len - self.len - 1) % 1024 + 1;
|
||||
self.write_bytes(&[0; 1024][..write_amt]);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_bytes(&mut self, val: &[u8]) {
|
||||
if self.result.is_ok() {
|
||||
self.result = self.writer.write_all(val);
|
||||
}
|
||||
self.len += val.len();
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for mutable byte slices.
|
||||
///
|
||||
/// It provides convenience methods for `Pod` types.
|
||||
pub(crate) trait BytesMut {
|
||||
fn write_at<T: Pod>(self, offset: usize, val: &T) -> Result<(), ()>;
|
||||
}
|
||||
|
||||
impl<'a> BytesMut for &'a mut [u8] {
|
||||
#[inline]
|
||||
fn write_at<T: Pod>(self, offset: usize, val: &T) -> Result<(), ()> {
|
||||
let src = bytes_of(val);
|
||||
let dest = self.get_mut(offset..).ok_or(())?;
|
||||
let dest = dest.get_mut(..src.len()).ok_or(())?;
|
||||
dest.copy_from_slice(src);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Write an unsigned number using the LEB128 encoding to a buffer.
|
||||
///
|
||||
/// Returns the number of bytes written.
|
||||
pub(crate) fn write_uleb128(buf: &mut Vec<u8>, mut val: u64) -> usize {
|
||||
let mut len = 0;
|
||||
loop {
|
||||
let mut byte = (val & 0x7f) as u8;
|
||||
val >>= 7;
|
||||
let done = val == 0;
|
||||
if !done {
|
||||
byte |= 0x80;
|
||||
}
|
||||
|
||||
buf.push(byte);
|
||||
len += 1;
|
||||
|
||||
if done {
|
||||
return len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a signed number using the LEB128 encoding to a buffer.
|
||||
///
|
||||
/// Returns the number of bytes written.
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn write_sleb128(buf: &mut Vec<u8>, mut val: i64) -> usize {
|
||||
let mut len = 0;
|
||||
loop {
|
||||
let mut byte = val as u8;
|
||||
// Keep the sign bit for testing
|
||||
val >>= 6;
|
||||
let done = val == 0 || val == -1;
|
||||
if done {
|
||||
byte &= !0x80;
|
||||
} else {
|
||||
// Remove the sign bit
|
||||
val >>= 1;
|
||||
byte |= 0x80;
|
||||
}
|
||||
|
||||
buf.push(byte);
|
||||
len += 1;
|
||||
|
||||
if done {
|
||||
return len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn align(offset: usize, size: usize) -> usize {
|
||||
(offset + (size - 1)) & !(size - 1)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn align_u32(offset: u32, size: u32) -> u32 {
|
||||
(offset + (size - 1)) & !(size - 1)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn align_u64(offset: u64, size: u64) -> u64 {
|
||||
(offset + (size - 1)) & !(size - 1)
|
||||
}
|
||||
|
||||
pub(crate) fn write_align(buffer: &mut dyn WritableBuffer, size: usize) {
|
||||
let new_len = align(buffer.len(), size);
|
||||
buffer.resize(new_len);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn bytes_mut() {
|
||||
let data = vec![0x01, 0x23, 0x45, 0x67];
|
||||
|
||||
let mut bytes = data.clone();
|
||||
bytes.extend_from_slice(bytes_of(&u16::to_be(0x89ab)));
|
||||
assert_eq!(bytes, [0x01, 0x23, 0x45, 0x67, 0x89, 0xab]);
|
||||
|
||||
let mut bytes = data.clone();
|
||||
assert_eq!(bytes.write_at(0, &u16::to_be(0x89ab)), Ok(()));
|
||||
assert_eq!(bytes, [0x89, 0xab, 0x45, 0x67]);
|
||||
|
||||
let mut bytes = data.clone();
|
||||
assert_eq!(bytes.write_at(2, &u16::to_be(0x89ab)), Ok(()));
|
||||
assert_eq!(bytes, [0x01, 0x23, 0x89, 0xab]);
|
||||
|
||||
assert_eq!(bytes.write_at(3, &u16::to_be(0x89ab)), Err(()));
|
||||
assert_eq!(bytes.write_at(4, &u16::to_be(0x89ab)), Err(()));
|
||||
assert_eq!(vec![].write_at(0, &u32::to_be(0x89ab)), Err(()));
|
||||
}
|
||||
}
|
||||
556
vendor/object/src/write/xcoff.rs
vendored
Normal file
556
vendor/object/src/write/xcoff.rs
vendored
Normal file
@@ -0,0 +1,556 @@
|
||||
use core::mem;
|
||||
|
||||
use crate::endian::{BigEndian as BE, I16, U16, U32};
|
||||
use crate::write::string::*;
|
||||
use crate::write::util::*;
|
||||
use crate::write::*;
|
||||
|
||||
use crate::{xcoff, AddressSize};
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
struct SectionOffsets {
|
||||
address: u64,
|
||||
data_offset: usize,
|
||||
reloc_offset: usize,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
struct SymbolOffsets {
|
||||
index: usize,
|
||||
str_id: Option<StringId>,
|
||||
aux_count: u8,
|
||||
storage_class: u8,
|
||||
}
|
||||
|
||||
impl<'a> Object<'a> {
|
||||
pub(crate) fn xcoff_section_info(
|
||||
&self,
|
||||
section: StandardSection,
|
||||
) -> (&'static [u8], &'static [u8], SectionKind, SectionFlags) {
|
||||
match section {
|
||||
StandardSection::Text => (&[], &b".text"[..], SectionKind::Text, SectionFlags::None),
|
||||
StandardSection::Data => (&[], &b".data"[..], SectionKind::Data, SectionFlags::None),
|
||||
StandardSection::ReadOnlyData
|
||||
| StandardSection::ReadOnlyDataWithRel
|
||||
| StandardSection::ReadOnlyString => (
|
||||
&[],
|
||||
&b".rdata"[..],
|
||||
SectionKind::ReadOnlyData,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::UninitializedData => (
|
||||
&[],
|
||||
&b".bss"[..],
|
||||
SectionKind::UninitializedData,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::Tls => (&[], &b".tdata"[..], SectionKind::Tls, SectionFlags::None),
|
||||
StandardSection::UninitializedTls => (
|
||||
&[],
|
||||
&b".tbss"[..],
|
||||
SectionKind::UninitializedTls,
|
||||
SectionFlags::None,
|
||||
),
|
||||
StandardSection::TlsVariables => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::TlsVariables, SectionFlags::None)
|
||||
}
|
||||
StandardSection::Common => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::Common, SectionFlags::None)
|
||||
}
|
||||
StandardSection::GnuProperty => {
|
||||
// Unsupported section.
|
||||
(&[], &[], SectionKind::Note, SectionFlags::None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn xcoff_fixup_relocation(&mut self, relocation: &mut Relocation) -> i64 {
|
||||
let constant = match relocation.kind {
|
||||
RelocationKind::Relative => relocation.addend + 4,
|
||||
_ => relocation.addend,
|
||||
};
|
||||
relocation.addend -= constant;
|
||||
constant
|
||||
}
|
||||
|
||||
pub(crate) fn xcoff_write(&self, buffer: &mut dyn WritableBuffer) -> Result<()> {
|
||||
let is_64 = match self.architecture.address_size().unwrap() {
|
||||
AddressSize::U8 | AddressSize::U16 | AddressSize::U32 => false,
|
||||
AddressSize::U64 => true,
|
||||
};
|
||||
|
||||
let (hdr_size, sechdr_size, rel_size, sym_size) = if is_64 {
|
||||
(
|
||||
mem::size_of::<xcoff::FileHeader64>(),
|
||||
mem::size_of::<xcoff::SectionHeader64>(),
|
||||
mem::size_of::<xcoff::Rel64>(),
|
||||
mem::size_of::<xcoff::Symbol64>(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
mem::size_of::<xcoff::FileHeader32>(),
|
||||
mem::size_of::<xcoff::SectionHeader32>(),
|
||||
mem::size_of::<xcoff::Rel32>(),
|
||||
mem::size_of::<xcoff::Symbol32>(),
|
||||
)
|
||||
};
|
||||
|
||||
// Calculate offsets and build strtab.
|
||||
let mut offset = 0;
|
||||
let mut strtab = StringTable::default();
|
||||
// We place the shared address 0 immediately after the section header table.
|
||||
let mut address = 0;
|
||||
|
||||
// XCOFF file header.
|
||||
offset += hdr_size;
|
||||
// Section headers.
|
||||
offset += self.sections.len() * sechdr_size;
|
||||
|
||||
// Calculate size of section data.
|
||||
let mut section_offsets = vec![SectionOffsets::default(); self.sections.len()];
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let len = section.data.len();
|
||||
let sectype = section.kind;
|
||||
// Section address should be 0 for all sections except the .text, .data, and .bss sections.
|
||||
if sectype == SectionKind::Data
|
||||
|| sectype == SectionKind::Text
|
||||
|| sectype == SectionKind::UninitializedData
|
||||
{
|
||||
section_offsets[index].address = address as u64;
|
||||
address += len;
|
||||
address = align(address, 4);
|
||||
} else {
|
||||
section_offsets[index].address = 0;
|
||||
}
|
||||
if len != 0 {
|
||||
// Set the default section alignment as 4.
|
||||
offset = align(offset, 4);
|
||||
section_offsets[index].data_offset = offset;
|
||||
offset += len;
|
||||
} else {
|
||||
section_offsets[index].data_offset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate size of relocations.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let count = section.relocations.len();
|
||||
if count != 0 {
|
||||
section_offsets[index].reloc_offset = offset;
|
||||
offset += count * rel_size;
|
||||
} else {
|
||||
section_offsets[index].reloc_offset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate size of symbols.
|
||||
let mut file_str_id = None;
|
||||
let mut symbol_offsets = vec![SymbolOffsets::default(); self.symbols.len()];
|
||||
let mut symtab_count = 0;
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
symbol_offsets[index].index = symtab_count;
|
||||
symtab_count += 1;
|
||||
|
||||
let storage_class = if let SymbolFlags::Xcoff { n_sclass, .. } = symbol.flags {
|
||||
n_sclass
|
||||
} else {
|
||||
match symbol.kind {
|
||||
SymbolKind::Null => xcoff::C_NULL,
|
||||
SymbolKind::File => xcoff::C_FILE,
|
||||
SymbolKind::Text | SymbolKind::Data | SymbolKind::Tls => {
|
||||
if symbol.is_local() {
|
||||
xcoff::C_STAT
|
||||
} else if symbol.weak {
|
||||
xcoff::C_WEAKEXT
|
||||
} else {
|
||||
xcoff::C_EXT
|
||||
}
|
||||
}
|
||||
SymbolKind::Section | SymbolKind::Label | SymbolKind::Unknown => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented symbol `{}` kind {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
symbol.kind
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
symbol_offsets[index].storage_class = storage_class;
|
||||
|
||||
if storage_class == xcoff::C_FILE {
|
||||
if is_64 && file_str_id.is_none() {
|
||||
file_str_id = Some(strtab.add(b".file"));
|
||||
}
|
||||
if symbol.name.len() > 8 {
|
||||
symbol_offsets[index].str_id = Some(strtab.add(&symbol.name));
|
||||
}
|
||||
} else if is_64 || symbol.name.len() > 8 {
|
||||
symbol_offsets[index].str_id = Some(strtab.add(&symbol.name));
|
||||
}
|
||||
|
||||
symbol_offsets[index].aux_count = 0;
|
||||
match storage_class {
|
||||
xcoff::C_FILE => {
|
||||
symbol_offsets[index].aux_count = 1;
|
||||
symtab_count += 1;
|
||||
}
|
||||
xcoff::C_EXT | xcoff::C_WEAKEXT | xcoff::C_HIDEXT => {
|
||||
symbol_offsets[index].aux_count = 1;
|
||||
symtab_count += 1;
|
||||
}
|
||||
// TODO: support auxiliary entry for other types of symbol.
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let symtab_offset = offset;
|
||||
let symtab_len = symtab_count * sym_size;
|
||||
offset += symtab_len;
|
||||
|
||||
// Calculate size of strtab.
|
||||
let strtab_offset = offset;
|
||||
let mut strtab_data = Vec::new();
|
||||
// First 4 bytes of strtab are the length.
|
||||
strtab.write(4, &mut strtab_data);
|
||||
let strtab_len = strtab_data.len() + 4;
|
||||
offset += strtab_len;
|
||||
|
||||
// Start writing.
|
||||
buffer
|
||||
.reserve(offset)
|
||||
.map_err(|_| Error(String::from("Cannot allocate buffer")))?;
|
||||
|
||||
// Write file header.
|
||||
if is_64 {
|
||||
let header = xcoff::FileHeader64 {
|
||||
f_magic: U16::new(BE, xcoff::MAGIC_64),
|
||||
f_nscns: U16::new(BE, self.sections.len() as u16),
|
||||
f_timdat: U32::new(BE, 0),
|
||||
f_symptr: U64::new(BE, symtab_offset as u64),
|
||||
f_nsyms: U32::new(BE, symtab_count as u32),
|
||||
f_opthdr: U16::new(BE, 0),
|
||||
f_flags: match self.flags {
|
||||
FileFlags::Xcoff { f_flags } => U16::new(BE, f_flags),
|
||||
_ => U16::default(),
|
||||
},
|
||||
};
|
||||
buffer.write(&header);
|
||||
} else {
|
||||
let header = xcoff::FileHeader32 {
|
||||
f_magic: U16::new(BE, xcoff::MAGIC_32),
|
||||
f_nscns: U16::new(BE, self.sections.len() as u16),
|
||||
f_timdat: U32::new(BE, 0),
|
||||
f_symptr: U32::new(BE, symtab_offset as u32),
|
||||
f_nsyms: U32::new(BE, symtab_count as u32),
|
||||
f_opthdr: U16::new(BE, 0),
|
||||
f_flags: match self.flags {
|
||||
FileFlags::Xcoff { f_flags } => U16::new(BE, f_flags),
|
||||
_ => U16::default(),
|
||||
},
|
||||
};
|
||||
buffer.write(&header);
|
||||
}
|
||||
|
||||
// Write section headers.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let mut sectname = [0; 8];
|
||||
sectname
|
||||
.get_mut(..section.name.len())
|
||||
.ok_or_else(|| {
|
||||
Error(format!(
|
||||
"section name `{}` is too long",
|
||||
section.name().unwrap_or(""),
|
||||
))
|
||||
})?
|
||||
.copy_from_slice(§ion.name);
|
||||
let flags = if let SectionFlags::Xcoff { s_flags } = section.flags {
|
||||
s_flags
|
||||
} else {
|
||||
match section.kind {
|
||||
SectionKind::Text
|
||||
| SectionKind::ReadOnlyData
|
||||
| SectionKind::ReadOnlyString
|
||||
| SectionKind::ReadOnlyDataWithRel => xcoff::STYP_TEXT,
|
||||
SectionKind::Data => xcoff::STYP_DATA,
|
||||
SectionKind::UninitializedData => xcoff::STYP_BSS,
|
||||
SectionKind::Tls => xcoff::STYP_TDATA,
|
||||
SectionKind::UninitializedTls => xcoff::STYP_TBSS,
|
||||
SectionKind::OtherString => xcoff::STYP_INFO,
|
||||
SectionKind::Debug => xcoff::STYP_DEBUG,
|
||||
SectionKind::Other | SectionKind::Metadata => 0,
|
||||
SectionKind::Note
|
||||
| SectionKind::Linker
|
||||
| SectionKind::Common
|
||||
| SectionKind::Unknown
|
||||
| SectionKind::TlsVariables
|
||||
| SectionKind::Elf(_) => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented section `{}` kind {:?}",
|
||||
section.name().unwrap_or(""),
|
||||
section.kind
|
||||
)));
|
||||
}
|
||||
}
|
||||
.into()
|
||||
};
|
||||
if is_64 {
|
||||
let section_header = xcoff::SectionHeader64 {
|
||||
s_name: sectname,
|
||||
s_paddr: U64::new(BE, section_offsets[index].address),
|
||||
// This field has the same value as the s_paddr field.
|
||||
s_vaddr: U64::new(BE, section_offsets[index].address),
|
||||
s_size: U64::new(BE, section.data.len() as u64),
|
||||
s_scnptr: U64::new(BE, section_offsets[index].data_offset as u64),
|
||||
s_relptr: U64::new(BE, section_offsets[index].reloc_offset as u64),
|
||||
s_lnnoptr: U64::new(BE, 0),
|
||||
s_nreloc: U32::new(BE, section.relocations.len() as u32),
|
||||
s_nlnno: U32::new(BE, 0),
|
||||
s_flags: U32::new(BE, flags),
|
||||
s_reserve: U32::new(BE, 0),
|
||||
};
|
||||
buffer.write(§ion_header);
|
||||
} else {
|
||||
let section_header = xcoff::SectionHeader32 {
|
||||
s_name: sectname,
|
||||
s_paddr: U32::new(BE, section_offsets[index].address as u32),
|
||||
// This field has the same value as the s_paddr field.
|
||||
s_vaddr: U32::new(BE, section_offsets[index].address as u32),
|
||||
s_size: U32::new(BE, section.data.len() as u32),
|
||||
s_scnptr: U32::new(BE, section_offsets[index].data_offset as u32),
|
||||
s_relptr: U32::new(BE, section_offsets[index].reloc_offset as u32),
|
||||
s_lnnoptr: U32::new(BE, 0),
|
||||
// TODO: If more than 65,534 relocation entries are required, the field
|
||||
// value will be 65535, and an STYP_OVRFLO section header will contain
|
||||
// the actual count of relocation entries in the s_paddr field.
|
||||
s_nreloc: U16::new(BE, section.relocations.len() as u16),
|
||||
s_nlnno: U16::new(BE, 0),
|
||||
s_flags: U32::new(BE, flags),
|
||||
};
|
||||
buffer.write(§ion_header);
|
||||
}
|
||||
}
|
||||
|
||||
// Write section data.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
let len = section.data.len();
|
||||
if len != 0 {
|
||||
write_align(buffer, 4);
|
||||
debug_assert_eq!(section_offsets[index].data_offset, buffer.len());
|
||||
buffer.write_bytes(§ion.data);
|
||||
}
|
||||
}
|
||||
|
||||
// Write relocations.
|
||||
for (index, section) in self.sections.iter().enumerate() {
|
||||
if !section.relocations.is_empty() {
|
||||
debug_assert_eq!(section_offsets[index].reloc_offset, buffer.len());
|
||||
for reloc in §ion.relocations {
|
||||
let rtype = match reloc.kind {
|
||||
RelocationKind::Absolute => xcoff::R_POS,
|
||||
RelocationKind::Relative => xcoff::R_REL,
|
||||
RelocationKind::Got => xcoff::R_TOC,
|
||||
RelocationKind::Xcoff(x) => x,
|
||||
_ => {
|
||||
return Err(Error(format!("unimplemented relocation {:?}", reloc)));
|
||||
}
|
||||
};
|
||||
if is_64 {
|
||||
let xcoff_rel = xcoff::Rel64 {
|
||||
r_vaddr: U64::new(BE, reloc.offset),
|
||||
r_symndx: U32::new(BE, symbol_offsets[reloc.symbol.0].index as u32),
|
||||
// Specifies the bit length of the relocatable reference minus one.
|
||||
r_rsize: (reloc.size - 1),
|
||||
r_rtype: rtype,
|
||||
};
|
||||
buffer.write(&xcoff_rel);
|
||||
} else {
|
||||
let xcoff_rel = xcoff::Rel32 {
|
||||
r_vaddr: U32::new(BE, reloc.offset as u32),
|
||||
r_symndx: U32::new(BE, symbol_offsets[reloc.symbol.0].index as u32),
|
||||
r_rsize: (reloc.size - 1),
|
||||
r_rtype: rtype,
|
||||
};
|
||||
buffer.write(&xcoff_rel);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write symbols.
|
||||
debug_assert_eq!(symtab_offset, buffer.len());
|
||||
for (index, symbol) in self.symbols.iter().enumerate() {
|
||||
let (n_value, section_kind) = if let SymbolSection::Section(id) = symbol.section {
|
||||
(
|
||||
section_offsets[id.0].address + symbol.value,
|
||||
self.sections[id.0].kind,
|
||||
)
|
||||
} else {
|
||||
(symbol.value, SectionKind::Unknown)
|
||||
};
|
||||
let n_scnum = match symbol.section {
|
||||
SymbolSection::None => {
|
||||
debug_assert_eq!(symbol.kind, SymbolKind::File);
|
||||
xcoff::N_DEBUG
|
||||
}
|
||||
SymbolSection::Undefined | SymbolSection::Common => xcoff::N_UNDEF,
|
||||
SymbolSection::Absolute => xcoff::N_ABS,
|
||||
SymbolSection::Section(id) => id.0 as i16 + 1,
|
||||
};
|
||||
let n_sclass = symbol_offsets[index].storage_class;
|
||||
let n_type = if (symbol.scope == SymbolScope::Linkage)
|
||||
&& (n_sclass == xcoff::C_EXT
|
||||
|| n_sclass == xcoff::C_WEAKEXT
|
||||
|| n_sclass == xcoff::C_HIDEXT)
|
||||
{
|
||||
xcoff::SYM_V_HIDDEN
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let n_numaux = symbol_offsets[index].aux_count;
|
||||
if is_64 {
|
||||
let str_id = if n_sclass == xcoff::C_FILE {
|
||||
file_str_id.unwrap()
|
||||
} else {
|
||||
symbol_offsets[index].str_id.unwrap()
|
||||
};
|
||||
let xcoff_sym = xcoff::Symbol64 {
|
||||
n_value: U64::new(BE, n_value),
|
||||
n_offset: U32::new(BE, strtab.get_offset(str_id) as u32),
|
||||
n_scnum: I16::new(BE, n_scnum),
|
||||
n_type: U16::new(BE, n_type),
|
||||
n_sclass,
|
||||
n_numaux,
|
||||
};
|
||||
buffer.write(&xcoff_sym);
|
||||
} else {
|
||||
let mut sym_name = [0; 8];
|
||||
if n_sclass == xcoff::C_FILE {
|
||||
sym_name[..5].copy_from_slice(b".file");
|
||||
} else if symbol.name.len() <= 8 {
|
||||
sym_name[..symbol.name.len()].copy_from_slice(&symbol.name[..]);
|
||||
} else {
|
||||
let str_offset = strtab.get_offset(symbol_offsets[index].str_id.unwrap());
|
||||
sym_name[4..8].copy_from_slice(&u32::to_be_bytes(str_offset as u32));
|
||||
}
|
||||
let xcoff_sym = xcoff::Symbol32 {
|
||||
n_name: sym_name,
|
||||
n_value: U32::new(BE, n_value as u32),
|
||||
n_scnum: I16::new(BE, n_scnum),
|
||||
n_type: U16::new(BE, n_type),
|
||||
n_sclass,
|
||||
n_numaux,
|
||||
};
|
||||
buffer.write(&xcoff_sym);
|
||||
}
|
||||
// Generate auxiliary entries.
|
||||
if n_sclass == xcoff::C_FILE {
|
||||
debug_assert_eq!(n_numaux, 1);
|
||||
let mut x_fname = [0; 8];
|
||||
if symbol.name.len() <= 8 {
|
||||
x_fname[..symbol.name.len()].copy_from_slice(&symbol.name[..]);
|
||||
} else {
|
||||
let str_offset = strtab.get_offset(symbol_offsets[index].str_id.unwrap());
|
||||
x_fname[4..8].copy_from_slice(&u32::to_be_bytes(str_offset as u32));
|
||||
}
|
||||
if is_64 {
|
||||
let file_aux = xcoff::FileAux64 {
|
||||
x_fname,
|
||||
x_fpad: Default::default(),
|
||||
x_ftype: xcoff::XFT_FN,
|
||||
x_freserve: Default::default(),
|
||||
x_auxtype: xcoff::AUX_FILE,
|
||||
};
|
||||
buffer.write(&file_aux);
|
||||
} else {
|
||||
let file_aux = xcoff::FileAux32 {
|
||||
x_fname,
|
||||
x_fpad: Default::default(),
|
||||
x_ftype: xcoff::XFT_FN,
|
||||
x_freserve: Default::default(),
|
||||
};
|
||||
buffer.write(&file_aux);
|
||||
}
|
||||
} else if n_sclass == xcoff::C_EXT
|
||||
|| n_sclass == xcoff::C_WEAKEXT
|
||||
|| n_sclass == xcoff::C_HIDEXT
|
||||
{
|
||||
debug_assert_eq!(n_numaux, 1);
|
||||
let (x_smtyp, x_smclas) = if let SymbolFlags::Xcoff {
|
||||
x_smtyp, x_smclas, ..
|
||||
} = symbol.flags
|
||||
{
|
||||
(x_smtyp, x_smclas)
|
||||
} else {
|
||||
match symbol.kind {
|
||||
SymbolKind::Text => (xcoff::XTY_SD, xcoff::XMC_PR),
|
||||
SymbolKind::Data => {
|
||||
if section_kind == SectionKind::UninitializedData {
|
||||
(xcoff::XTY_CM, xcoff::XMC_BS)
|
||||
} else if section_kind == SectionKind::ReadOnlyData {
|
||||
(xcoff::XTY_SD, xcoff::XMC_RO)
|
||||
} else {
|
||||
(xcoff::XTY_SD, xcoff::XMC_RW)
|
||||
}
|
||||
}
|
||||
SymbolKind::Tls => {
|
||||
if section_kind == SectionKind::UninitializedTls {
|
||||
(xcoff::XTY_CM, xcoff::XMC_UL)
|
||||
} else {
|
||||
(xcoff::XTY_SD, xcoff::XMC_TL)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(Error(format!(
|
||||
"unimplemented symbol `{}` kind {:?}",
|
||||
symbol.name().unwrap_or(""),
|
||||
symbol.kind
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
let scnlen = if let SymbolFlags::Xcoff {
|
||||
containing_csect: Some(containing_csect),
|
||||
..
|
||||
} = symbol.flags
|
||||
{
|
||||
symbol_offsets[containing_csect.0].index as u64
|
||||
} else {
|
||||
symbol.size
|
||||
};
|
||||
if is_64 {
|
||||
let csect_aux = xcoff::CsectAux64 {
|
||||
x_scnlen_lo: U32::new(BE, (scnlen & 0xFFFFFFFF) as u32),
|
||||
x_scnlen_hi: U32::new(BE, ((scnlen >> 32) & 0xFFFFFFFF) as u32),
|
||||
x_parmhash: U32::new(BE, 0),
|
||||
x_snhash: U16::new(BE, 0),
|
||||
x_smtyp,
|
||||
x_smclas,
|
||||
pad: 0,
|
||||
x_auxtype: xcoff::AUX_CSECT,
|
||||
};
|
||||
buffer.write(&csect_aux);
|
||||
} else {
|
||||
let csect_aux = xcoff::CsectAux32 {
|
||||
x_scnlen: U32::new(BE, scnlen as u32),
|
||||
x_parmhash: U32::new(BE, 0),
|
||||
x_snhash: U16::new(BE, 0),
|
||||
x_smtyp,
|
||||
x_smclas,
|
||||
x_stab: U32::new(BE, 0),
|
||||
x_snstab: U16::new(BE, 0),
|
||||
};
|
||||
buffer.write(&csect_aux);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write string table.
|
||||
debug_assert_eq!(strtab_offset, buffer.len());
|
||||
buffer.write_bytes(&u32::to_be_bytes(strtab_len as u32));
|
||||
buffer.write_bytes(&strtab_data);
|
||||
|
||||
debug_assert_eq!(offset, buffer.len());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
905
vendor/object/src/xcoff.rs
vendored
Normal file
905
vendor/object/src/xcoff.rs
vendored
Normal file
@@ -0,0 +1,905 @@
|
||||
//! XCOFF definitions
|
||||
//!
|
||||
//! These definitions are independent of read/write support, although we do implement
|
||||
//! some traits useful for those.
|
||||
//!
|
||||
//! This module is the equivalent of /usr/include/xcoff.h, and is based heavily on it.
|
||||
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use crate::endian::{BigEndian as BE, I16, U16, U32, U64};
|
||||
use crate::pod::Pod;
|
||||
|
||||
/// The header at the start of every 32-bit XCOFF file.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct FileHeader32 {
|
||||
/// Magic number. Must be 0x01DF.
|
||||
pub f_magic: U16<BE>,
|
||||
/// Number of sections.
|
||||
pub f_nscns: U16<BE>,
|
||||
/// Time and date of file creation.
|
||||
pub f_timdat: U32<BE>,
|
||||
/// Byte offset to symbol table start.
|
||||
pub f_symptr: U32<BE>,
|
||||
/// Number of entries in symbol table.
|
||||
pub f_nsyms: U32<BE>,
|
||||
/// Number of bytes in optional header
|
||||
pub f_opthdr: U16<BE>,
|
||||
/// Extra flags.
|
||||
pub f_flags: U16<BE>,
|
||||
}
|
||||
|
||||
/// The header at the start of every 64-bit XCOFF file.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct FileHeader64 {
|
||||
/// Magic number. Must be 0x01F7.
|
||||
pub f_magic: U16<BE>,
|
||||
/// Number of sections.
|
||||
pub f_nscns: U16<BE>,
|
||||
/// Time and date of file creation
|
||||
pub f_timdat: U32<BE>,
|
||||
/// Byte offset to symbol table start.
|
||||
pub f_symptr: U64<BE>,
|
||||
/// Number of bytes in optional header
|
||||
pub f_opthdr: U16<BE>,
|
||||
/// Extra flags.
|
||||
pub f_flags: U16<BE>,
|
||||
/// Number of entries in symbol table.
|
||||
pub f_nsyms: U32<BE>,
|
||||
}
|
||||
|
||||
// Values for `f_magic`.
|
||||
//
|
||||
/// the 64-bit mach magic number
|
||||
pub const MAGIC_64: u16 = 0x01F7;
|
||||
/// the 32-bit mach magic number
|
||||
pub const MAGIC_32: u16 = 0x01DF;
|
||||
|
||||
// Values for `f_flags`.
|
||||
//
|
||||
/// Indicates that the relocation information for binding has been removed from
|
||||
/// the file.
|
||||
pub const F_RELFLG: u16 = 0x0001;
|
||||
/// Indicates that the file is executable. No unresolved external references exist.
|
||||
pub const F_EXEC: u16 = 0x0002;
|
||||
/// Indicates that line numbers have been stripped from the file by a utility program.
|
||||
pub const F_LNNO: u16 = 0x0004;
|
||||
/// Indicates that the file was profiled with the fdpr command.
|
||||
pub const F_FDPR_PROF: u16 = 0x0010;
|
||||
/// Indicates that the file was reordered with the fdpr command.
|
||||
pub const F_FDPR_OPTI: u16 = 0x0020;
|
||||
/// Indicates that the file uses Very Large Program Support.
|
||||
pub const F_DSA: u16 = 0x0040;
|
||||
/// Indicates that one of the members of the auxiliary header specifying the
|
||||
/// medium page sizes is non-zero.
|
||||
pub const F_VARPG: u16 = 0x0100;
|
||||
/// Indicates the file is dynamically loadable and executable. External references
|
||||
/// are resolved by way of imports, and the file might contain exports and loader
|
||||
/// relocation.
|
||||
pub const F_DYNLOAD: u16 = 0x1000;
|
||||
/// Indicates the file is a shared object (shared library). The file is separately
|
||||
/// loadable. That is, it is not normally bound with other objects, and its loader
|
||||
/// exports symbols are used as automatic import symbols for other object files.
|
||||
pub const F_SHROBJ: u16 = 0x2000;
|
||||
/// If the object file is a member of an archive, it can be loaded by the system
|
||||
/// loader, but the member is ignored by the binder. If the object file is not in
|
||||
/// an archive, this flag has no effect.
|
||||
pub const F_LOADONLY: u16 = 0x4000;
|
||||
|
||||
/// The auxiliary header immediately following file header. If the value of the
|
||||
/// f_opthdr field in the file header is 0, the auxiliary header does not exist.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct AuxHeader32 {
|
||||
/// Flags.
|
||||
pub o_mflag: U16<BE>,
|
||||
/// Version.
|
||||
pub o_vstamp: U16<BE>,
|
||||
/// Text size in bytes.
|
||||
pub o_tsize: U32<BE>,
|
||||
/// Initialized data size in bytes.
|
||||
pub o_dsize: U32<BE>,
|
||||
/// Uninitialized data size in bytes.
|
||||
pub o_bsize: U32<BE>,
|
||||
/// Entry point descriptor (virtual address).
|
||||
pub o_entry: U32<BE>,
|
||||
/// Base address of text (virtual address).
|
||||
pub o_text_start: U32<BE>,
|
||||
/// Base address of data (virtual address).
|
||||
pub o_data_start: U32<BE>,
|
||||
/// Address of TOC anchor.
|
||||
pub o_toc: U32<BE>,
|
||||
/// Section number for entry point.
|
||||
pub o_snentry: U16<BE>,
|
||||
/// Section number for .text.
|
||||
pub o_sntext: U16<BE>,
|
||||
/// Section number for .data.
|
||||
pub o_sndata: U16<BE>,
|
||||
/// Section number for TOC.
|
||||
pub o_sntoc: U16<BE>,
|
||||
/// Section number for loader data.
|
||||
pub o_snloader: U16<BE>,
|
||||
/// Section number for .bss.
|
||||
pub o_snbss: U16<BE>,
|
||||
/// Maximum alignment for .text.
|
||||
pub o_algntext: U16<BE>,
|
||||
/// Maximum alignment for .data.
|
||||
pub o_algndata: U16<BE>,
|
||||
/// Module type field.
|
||||
pub o_modtype: U16<BE>,
|
||||
/// Bit flags - cpu types of objects.
|
||||
pub o_cpuflag: u8,
|
||||
/// Reserved for CPU type.
|
||||
pub o_cputype: u8,
|
||||
/// Maximum stack size allowed (bytes).
|
||||
pub o_maxstack: U32<BE>,
|
||||
/// Maximum data size allowed (bytes).
|
||||
pub o_maxdata: U32<BE>,
|
||||
/// Reserved for debuggers.
|
||||
pub o_debugger: U32<BE>,
|
||||
/// Requested text page size.
|
||||
pub o_textpsize: u8,
|
||||
/// Requested data page size.
|
||||
pub o_datapsize: u8,
|
||||
/// Requested stack page size.
|
||||
pub o_stackpsize: u8,
|
||||
/// Flags and thread-local storage alignment.
|
||||
pub o_flags: u8,
|
||||
/// Section number for .tdata.
|
||||
pub o_sntdata: U16<BE>,
|
||||
/// Section number for .tbss.
|
||||
pub o_sntbss: U16<BE>,
|
||||
}
|
||||
|
||||
/// The auxiliary header immediately following file header. If the value of the
|
||||
/// f_opthdr field in the file header is 0, the auxiliary header does not exist.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct AuxHeader64 {
|
||||
/// Flags.
|
||||
pub o_mflag: U16<BE>,
|
||||
/// Version.
|
||||
pub o_vstamp: U16<BE>,
|
||||
/// Reserved for debuggers.
|
||||
pub o_debugger: U32<BE>,
|
||||
/// Base address of text (virtual address).
|
||||
pub o_text_start: U64<BE>,
|
||||
/// Base address of data (virtual address).
|
||||
pub o_data_start: U64<BE>,
|
||||
/// Address of TOC anchor.
|
||||
pub o_toc: U64<BE>,
|
||||
/// Section number for entry point.
|
||||
pub o_snentry: U16<BE>,
|
||||
/// Section number for .text.
|
||||
pub o_sntext: U16<BE>,
|
||||
/// Section number for .data.
|
||||
pub o_sndata: U16<BE>,
|
||||
/// Section number for TOC.
|
||||
pub o_sntoc: U16<BE>,
|
||||
/// Section number for loader data.
|
||||
pub o_snloader: U16<BE>,
|
||||
/// Section number for .bss.
|
||||
pub o_snbss: U16<BE>,
|
||||
/// Maximum alignment for .text.
|
||||
pub o_algntext: U16<BE>,
|
||||
/// Maximum alignment for .data.
|
||||
pub o_algndata: U16<BE>,
|
||||
/// Module type field.
|
||||
pub o_modtype: U16<BE>,
|
||||
/// Bit flags - cpu types of objects.
|
||||
pub o_cpuflag: u8,
|
||||
/// Reserved for CPU type.
|
||||
pub o_cputype: u8,
|
||||
/// Requested text page size.
|
||||
pub o_textpsize: u8,
|
||||
/// Requested data page size.
|
||||
pub o_datapsize: u8,
|
||||
/// Requested stack page size.
|
||||
pub o_stackpsize: u8,
|
||||
/// Flags and thread-local storage alignment.
|
||||
pub o_flags: u8,
|
||||
/// Text size in bytes.
|
||||
pub o_tsize: U64<BE>,
|
||||
/// Initialized data size in bytes.
|
||||
pub o_dsize: U64<BE>,
|
||||
/// Uninitialized data size in bytes.
|
||||
pub o_bsize: U64<BE>,
|
||||
/// Entry point descriptor (virtual address).
|
||||
pub o_entry: U64<BE>,
|
||||
/// Maximum stack size allowed (bytes).
|
||||
pub o_maxstack: U64<BE>,
|
||||
/// Maximum data size allowed (bytes).
|
||||
pub o_maxdata: U64<BE>,
|
||||
/// Section number for .tdata.
|
||||
pub o_sntdata: U16<BE>,
|
||||
/// Section number for .tbss.
|
||||
pub o_sntbss: U16<BE>,
|
||||
/// XCOFF64 flags.
|
||||
pub o_x64flags: U16<BE>,
|
||||
/// Reserved.
|
||||
pub o_resv3a: U16<BE>,
|
||||
/// Reserved.
|
||||
pub o_resv3: [U32<BE>; 2],
|
||||
}
|
||||
|
||||
/// Some AIX programs generate auxiliary headers for 32-bit object files that
|
||||
/// end after the data_start field.
|
||||
pub const AOUTHSZ_SHORT: u16 = 28;
|
||||
|
||||
/// Section header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct SectionHeader32 {
|
||||
/// Section name.
|
||||
pub s_name: [u8; 8],
|
||||
/// Physical address.
|
||||
pub s_paddr: U32<BE>,
|
||||
/// Virtual address (same as physical address).
|
||||
pub s_vaddr: U32<BE>,
|
||||
/// Section size.
|
||||
pub s_size: U32<BE>,
|
||||
/// Offset in file to raw data for section.
|
||||
pub s_scnptr: U32<BE>,
|
||||
/// Offset in file to relocation entries for section.
|
||||
pub s_relptr: U32<BE>,
|
||||
/// Offset in file to line number entries for section.
|
||||
pub s_lnnoptr: U32<BE>,
|
||||
/// Number of relocation entries.
|
||||
pub s_nreloc: U16<BE>,
|
||||
/// Number of line number entries.
|
||||
pub s_nlnno: U16<BE>,
|
||||
/// Flags to define the section type.
|
||||
pub s_flags: U32<BE>,
|
||||
}
|
||||
|
||||
/// Section header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct SectionHeader64 {
|
||||
/// Section name.
|
||||
pub s_name: [u8; 8],
|
||||
/// Physical address.
|
||||
pub s_paddr: U64<BE>,
|
||||
/// Virtual address (same as physical address).
|
||||
pub s_vaddr: U64<BE>,
|
||||
/// Section size.
|
||||
pub s_size: U64<BE>,
|
||||
/// Offset in file to raw data for section.
|
||||
pub s_scnptr: U64<BE>,
|
||||
/// Offset in file to relocation entries for section.
|
||||
pub s_relptr: U64<BE>,
|
||||
/// Offset in file to line number entries for section.
|
||||
pub s_lnnoptr: U64<BE>,
|
||||
/// Number of relocation entries.
|
||||
pub s_nreloc: U32<BE>,
|
||||
/// Number of line number entries.
|
||||
pub s_nlnno: U32<BE>,
|
||||
/// Flags to define the section type.
|
||||
pub s_flags: U32<BE>,
|
||||
/// Reserved.
|
||||
pub s_reserve: U32<BE>,
|
||||
}
|
||||
|
||||
// Values for `s_flags`.
|
||||
//
|
||||
/// "regular" section
|
||||
pub const STYP_REG: u16 = 0x00;
|
||||
/// Specifies a pad section. A section of this type is used to provide alignment
|
||||
/// padding between sections within an XCOFF executable object file. This section
|
||||
/// header type is obsolete since padding is allowed in an XCOFF file without a
|
||||
/// corresponding pad section header.
|
||||
pub const STYP_PAD: u16 = 0x08;
|
||||
/// Specifies a DWARF debugging section, which provide source file and symbol
|
||||
/// information for the symbolic debugger.
|
||||
pub const STYP_DWARF: u16 = 0x10;
|
||||
/// Specifies an executable text (code) section. A section of this type contains
|
||||
/// the executable instructions of a program.
|
||||
pub const STYP_TEXT: u16 = 0x20;
|
||||
/// Specifies an initialized data section. A section of this type contains the
|
||||
/// initialized data and the TOC of a program.
|
||||
pub const STYP_DATA: u16 = 0x40;
|
||||
/// Specifies an uninitialized data section. A section header of this type
|
||||
/// defines the uninitialized data of a program.
|
||||
pub const STYP_BSS: u16 = 0x80;
|
||||
/// Specifies an exception section. A section of this type provides information
|
||||
/// to identify the reason that a trap or exception occurred within an executable
|
||||
/// object program.
|
||||
pub const STYP_EXCEPT: u16 = 0x0100;
|
||||
/// Specifies a comment section. A section of this type provides comments or data
|
||||
/// to special processing utility programs.
|
||||
pub const STYP_INFO: u16 = 0x0200;
|
||||
/// Specifies an initialized thread-local data section.
|
||||
pub const STYP_TDATA: u16 = 0x0400;
|
||||
/// Specifies an uninitialized thread-local data section.
|
||||
pub const STYP_TBSS: u16 = 0x0800;
|
||||
/// Specifies a loader section. A section of this type contains object file
|
||||
/// information for the system loader to load an XCOFF executable. The information
|
||||
/// includes imported symbols, exported symbols, relocation data, type-check
|
||||
/// information, and shared object names.
|
||||
pub const STYP_LOADER: u16 = 0x1000;
|
||||
/// Specifies a debug section. A section of this type contains stabstring
|
||||
/// information used by the symbolic debugger.
|
||||
pub const STYP_DEBUG: u16 = 0x2000;
|
||||
/// Specifies a type-check section. A section of this type contains
|
||||
/// parameter/argument type-check strings used by the binder.
|
||||
pub const STYP_TYPCHK: u16 = 0x4000;
|
||||
/// Specifies a relocation or line-number field overflow section. A section
|
||||
/// header of this type contains the count of relocation entries and line
|
||||
/// number entries for some other section. This section header is required
|
||||
/// when either of the counts exceeds 65,534.
|
||||
pub const STYP_OVRFLO: u16 = 0x8000;
|
||||
|
||||
pub const SSUBTYP_DWINFO: u32 = 0x10000;
|
||||
pub const SSUBTYP_DWLINE: u32 = 0x20000;
|
||||
pub const SSUBTYP_DWPBNMS: u32 = 0x30000;
|
||||
pub const SSUBTYP_DWPBTYP: u32 = 0x40000;
|
||||
pub const SSUBTYP_DWARNGE: u32 = 0x50000;
|
||||
pub const SSUBTYP_DWABREV: u32 = 0x60000;
|
||||
pub const SSUBTYP_DWSTR: u32 = 0x70000;
|
||||
pub const SSUBTYP_DWRNGES: u32 = 0x80000;
|
||||
pub const SSUBTYP_DWLOC: u32 = 0x90000;
|
||||
pub const SSUBTYP_DWFRAME: u32 = 0xA0000;
|
||||
pub const SSUBTYP_DWMAC: u32 = 0xB0000;
|
||||
|
||||
pub const SIZEOF_SYMBOL: usize = 18;
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct SymbolBytes(pub [u8; SIZEOF_SYMBOL]);
|
||||
|
||||
/// Symbol table entry.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct Symbol32 {
|
||||
/// Symbol name.
|
||||
///
|
||||
/// If first 4 bytes are 0, then second 4 bytes are offset into string table.
|
||||
pub n_name: [u8; 8],
|
||||
/// Symbol value; storage class-dependent.
|
||||
pub n_value: U32<BE>,
|
||||
/// Section number of symbol.
|
||||
pub n_scnum: I16<BE>,
|
||||
/// Basic and derived type specification.
|
||||
pub n_type: U16<BE>,
|
||||
/// Storage class of symbol.
|
||||
pub n_sclass: u8,
|
||||
/// Number of auxiliary entries.
|
||||
pub n_numaux: u8,
|
||||
}
|
||||
|
||||
/// Symbol table entry.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct Symbol64 {
|
||||
/// Symbol value; storage class-dependent.
|
||||
pub n_value: U64<BE>,
|
||||
/// Offset of the name in string table or .debug section.
|
||||
pub n_offset: U32<BE>,
|
||||
/// Section number of symbol.
|
||||
pub n_scnum: I16<BE>,
|
||||
/// Basic and derived type specification.
|
||||
pub n_type: U16<BE>,
|
||||
/// Storage class of symbol.
|
||||
pub n_sclass: u8,
|
||||
/// Number of auxiliary entries.
|
||||
pub n_numaux: u8,
|
||||
}
|
||||
|
||||
// Values for `n_scnum`.
|
||||
//
|
||||
/// A special symbolic debugging symbol.
|
||||
pub const N_DEBUG: i16 = -2;
|
||||
/// An absolute symbol. The symbol has a value but is not relocatable.
|
||||
pub const N_ABS: i16 = -1;
|
||||
/// An undefined external symbol.
|
||||
pub const N_UNDEF: i16 = 0;
|
||||
|
||||
// Vlaues for `n_type`.
|
||||
//
|
||||
/// Values for visibility as they would appear when encoded in the high 4 bits
|
||||
/// of the 16-bit unsigned n_type field of symbol table entries. Valid for
|
||||
/// 32-bit XCOFF only when the o_vstamp in the auxiliary header is greater than 1.
|
||||
pub const SYM_V_MASK: u16 = 0xF000;
|
||||
pub const SYM_V_INTERNAL: u16 = 0x1000;
|
||||
pub const SYM_V_HIDDEN: u16 = 0x2000;
|
||||
pub const SYM_V_PROTECTED: u16 = 0x3000;
|
||||
pub const SYM_V_EXPORTED: u16 = 0x4000;
|
||||
|
||||
// Values for `n_sclass`.
|
||||
//
|
||||
// Storage classes used for symbolic debugging symbols.
|
||||
//
|
||||
/// Source file name and compiler information.
|
||||
pub const C_FILE: u8 = 103;
|
||||
/// Beginning of include file.
|
||||
pub const C_BINCL: u8 = 108;
|
||||
/// Ending of include file.
|
||||
pub const C_EINCL: u8 = 109;
|
||||
/// Global variable.
|
||||
pub const C_GSYM: u8 = 128;
|
||||
/// Statically allocated symbol.
|
||||
pub const C_STSYM: u8 = 133;
|
||||
/// Beginning of common block.
|
||||
pub const C_BCOMM: u8 = 135;
|
||||
/// End of common block.
|
||||
pub const C_ECOMM: u8 = 137;
|
||||
/// Alternate entry.
|
||||
pub const C_ENTRY: u8 = 141;
|
||||
/// Beginning of static block.
|
||||
pub const C_BSTAT: u8 = 143;
|
||||
/// End of static block.
|
||||
pub const C_ESTAT: u8 = 144;
|
||||
/// Global thread-local variable.
|
||||
pub const C_GTLS: u8 = 145;
|
||||
/// Static thread-local variable.
|
||||
pub const C_STTLS: u8 = 146;
|
||||
/// DWARF section symbol.
|
||||
pub const C_DWARF: u8 = 112;
|
||||
//
|
||||
// Storage classes used for absolute symbols.
|
||||
//
|
||||
/// Automatic variable allocated on stack.
|
||||
pub const C_LSYM: u8 = 129;
|
||||
/// Argument to subroutine allocated on stack.
|
||||
pub const C_PSYM: u8 = 130;
|
||||
/// Register variable.
|
||||
pub const C_RSYM: u8 = 131;
|
||||
/// Argument to function or procedure stored in register.
|
||||
pub const C_RPSYM: u8 = 132;
|
||||
/// Local member of common block.
|
||||
pub const C_ECOML: u8 = 136;
|
||||
/// Function or procedure.
|
||||
pub const C_FUN: u8 = 142;
|
||||
//
|
||||
// Storage classes used for undefined external symbols or symbols of general sections.
|
||||
//
|
||||
/// External symbol.
|
||||
pub const C_EXT: u8 = 2;
|
||||
/// Weak external symbol.
|
||||
pub const C_WEAKEXT: u8 = 111;
|
||||
//
|
||||
// Storage classes used for symbols of general sections.
|
||||
//
|
||||
/// Symbol table entry marked for deletion.
|
||||
pub const C_NULL: u8 = 0;
|
||||
/// Static.
|
||||
pub const C_STAT: u8 = 3;
|
||||
/// Beginning or end of inner block.
|
||||
pub const C_BLOCK: u8 = 100;
|
||||
/// Beginning or end of function.
|
||||
pub const C_FCN: u8 = 101;
|
||||
/// Un-named external symbol.
|
||||
pub const C_HIDEXT: u8 = 107;
|
||||
/// Comment string in .info section.
|
||||
pub const C_INFO: u8 = 110;
|
||||
/// Declaration of object (type).
|
||||
pub const C_DECL: u8 = 140;
|
||||
//
|
||||
// Storage classes - Obsolete/Undocumented.
|
||||
//
|
||||
/// Automatic variable.
|
||||
pub const C_AUTO: u8 = 1;
|
||||
/// Register variable.
|
||||
pub const C_REG: u8 = 4;
|
||||
/// External definition.
|
||||
pub const C_EXTDEF: u8 = 5;
|
||||
/// Label.
|
||||
pub const C_LABEL: u8 = 6;
|
||||
/// Undefined label.
|
||||
pub const C_ULABEL: u8 = 7;
|
||||
/// Member of structure.
|
||||
pub const C_MOS: u8 = 8;
|
||||
/// Function argument.
|
||||
pub const C_ARG: u8 = 9;
|
||||
/// Structure tag.
|
||||
pub const C_STRTAG: u8 = 10;
|
||||
/// Member of union.
|
||||
pub const C_MOU: u8 = 11;
|
||||
/// Union tag.
|
||||
pub const C_UNTAG: u8 = 12;
|
||||
/// Type definition.
|
||||
pub const C_TPDEF: u8 = 13;
|
||||
/// Undefined static.
|
||||
pub const C_USTATIC: u8 = 14;
|
||||
/// Enumeration tag.
|
||||
pub const C_ENTAG: u8 = 15;
|
||||
/// Member of enumeration.
|
||||
pub const C_MOE: u8 = 16;
|
||||
/// Register parameter.
|
||||
pub const C_REGPARM: u8 = 17;
|
||||
/// Bit field.
|
||||
pub const C_FIELD: u8 = 18;
|
||||
/// End of structure.
|
||||
pub const C_EOS: u8 = 102;
|
||||
/// Duplicate tag.
|
||||
pub const C_ALIAS: u8 = 105;
|
||||
/// Special storage class for external.
|
||||
pub const C_HIDDEN: u8 = 106;
|
||||
/// Physical end of function.
|
||||
pub const C_EFCN: u8 = 255;
|
||||
/// Reserved.
|
||||
pub const C_TCSYM: u8 = 134;
|
||||
|
||||
/// File Auxiliary Entry for C_FILE Symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct FileAux32 {
|
||||
/// The source file name or compiler-related string.
|
||||
///
|
||||
/// If first 4 bytes are 0, then second 4 bytes are offset into string table.
|
||||
pub x_fname: [u8; 8],
|
||||
/// Pad size for file name.
|
||||
pub x_fpad: [u8; 6],
|
||||
/// The source-file string type.
|
||||
pub x_ftype: u8,
|
||||
/// Reserved.
|
||||
pub x_freserve: [u8; 3],
|
||||
}
|
||||
|
||||
/// File Auxiliary Entry for C_FILE Symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct FileAux64 {
|
||||
/// The source file name or compiler-related string.
|
||||
///
|
||||
/// If first 4 bytes are 0, then second 4 bytes are offset into string table.
|
||||
pub x_fname: [u8; 8],
|
||||
/// Pad size for file name.
|
||||
pub x_fpad: [u8; 6],
|
||||
/// The source-file string type.
|
||||
pub x_ftype: u8,
|
||||
/// Reserved.
|
||||
pub x_freserve: [u8; 2],
|
||||
/// Specifies the type of auxiliary entry. Contains _AUX_FILE for this auxiliary entry.
|
||||
pub x_auxtype: u8,
|
||||
}
|
||||
|
||||
// Values for `x_ftype`.
|
||||
//
|
||||
/// Specifies the source-file name.
|
||||
pub const XFT_FN: u8 = 0;
|
||||
/// Specifies the compiler time stamp.
|
||||
pub const XFT_CT: u8 = 1;
|
||||
/// Specifies the compiler version number.
|
||||
pub const XFT_CV: u8 = 2;
|
||||
/// Specifies compiler-defined information.
|
||||
pub const XFT_CD: u8 = 128;
|
||||
|
||||
/// Csect auxiliary entry for C_EXT, C_WEAKEXT, and C_HIDEXT symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct CsectAux32 {
|
||||
/// Section length.
|
||||
pub x_scnlen: U32<BE>,
|
||||
/// Offset of parameter type-check hash in .typchk section.
|
||||
pub x_parmhash: U32<BE>,
|
||||
/// .typchk section number.
|
||||
pub x_snhash: U16<BE>,
|
||||
/// Symbol alignment and type.
|
||||
pub x_smtyp: u8,
|
||||
/// Storage mapping class.
|
||||
pub x_smclas: u8,
|
||||
/// Reserved.
|
||||
pub x_stab: U32<BE>,
|
||||
/// x_snstab.
|
||||
pub x_snstab: U16<BE>,
|
||||
}
|
||||
|
||||
/// Csect auxiliary entry for C_EXT, C_WEAKEXT, and C_HIDEXT symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct CsectAux64 {
|
||||
/// Low 4 bytes of section length.
|
||||
pub x_scnlen_lo: U32<BE>,
|
||||
/// Offset of parameter type-check hash in .typchk section.
|
||||
pub x_parmhash: U32<BE>,
|
||||
/// .typchk section number.
|
||||
pub x_snhash: U16<BE>,
|
||||
/// Symbol alignment and type.
|
||||
pub x_smtyp: u8,
|
||||
/// Storage mapping class.
|
||||
pub x_smclas: u8,
|
||||
/// High 4 bytes of section length.
|
||||
pub x_scnlen_hi: U32<BE>,
|
||||
/// Reserved.
|
||||
pub pad: u8,
|
||||
/// Contains _AUX_CSECT; indicates type of auxiliary entry.
|
||||
pub x_auxtype: u8,
|
||||
}
|
||||
|
||||
// Values for `x_smtyp`.
|
||||
//
|
||||
/// External reference.
|
||||
pub const XTY_ER: u8 = 0;
|
||||
/// Csect definition for initialized storage.
|
||||
pub const XTY_SD: u8 = 1;
|
||||
/// Defines an entry point to an initialized csect.
|
||||
pub const XTY_LD: u8 = 2;
|
||||
/// Common csect definition. For uninitialized storage.
|
||||
pub const XTY_CM: u8 = 3;
|
||||
|
||||
// Values for `x_smclas`.
|
||||
//
|
||||
// READ ONLY CLASSES
|
||||
//
|
||||
/// Program Code
|
||||
pub const XMC_PR: u8 = 0;
|
||||
/// Read Only Constant
|
||||
pub const XMC_RO: u8 = 1;
|
||||
/// Debug Dictionary Table
|
||||
pub const XMC_DB: u8 = 2;
|
||||
/// Global Linkage (Interfile Interface Code)
|
||||
pub const XMC_GL: u8 = 6;
|
||||
/// Extended Operation (Pseudo Machine Instruction)
|
||||
pub const XMC_XO: u8 = 7;
|
||||
/// Supervisor Call (32-bit process only)
|
||||
pub const XMC_SV: u8 = 8;
|
||||
/// Supervisor Call for 64-bit process
|
||||
pub const XMC_SV64: u8 = 17;
|
||||
/// Supervisor Call for both 32- and 64-bit processes
|
||||
pub const XMC_SV3264: u8 = 18;
|
||||
/// Traceback Index csect
|
||||
pub const XMC_TI: u8 = 12;
|
||||
/// Traceback Table csect
|
||||
pub const XMC_TB: u8 = 13;
|
||||
//
|
||||
// READ WRITE CLASSES
|
||||
//
|
||||
/// Read Write Data
|
||||
pub const XMC_RW: u8 = 5;
|
||||
/// TOC Anchor for TOC Addressability
|
||||
pub const XMC_TC0: u8 = 15;
|
||||
/// General TOC item
|
||||
pub const XMC_TC: u8 = 3;
|
||||
/// Scalar data item in the TOC
|
||||
pub const XMC_TD: u8 = 16;
|
||||
/// Descriptor csect
|
||||
pub const XMC_DS: u8 = 10;
|
||||
/// Unclassified - Treated as Read Write
|
||||
pub const XMC_UA: u8 = 4;
|
||||
/// BSS class (uninitialized static internal)
|
||||
pub const XMC_BS: u8 = 9;
|
||||
/// Un-named Fortran Common
|
||||
pub const XMC_UC: u8 = 11;
|
||||
/// Initialized thread-local variable
|
||||
pub const XMC_TL: u8 = 20;
|
||||
/// Uninitialized thread-local variable
|
||||
pub const XMC_UL: u8 = 21;
|
||||
/// Symbol mapped at the end of TOC
|
||||
pub const XMC_TE: u8 = 22;
|
||||
|
||||
/// Function auxiliary entry.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct FunAux32 {
|
||||
/// File offset to exception table entry.
|
||||
pub x_exptr: U32<BE>,
|
||||
/// Size of function in bytes.
|
||||
pub x_fsize: U32<BE>,
|
||||
/// File pointer to line number
|
||||
pub x_lnnoptr: U32<BE>,
|
||||
/// Symbol table index of next entry beyond this function.
|
||||
pub x_endndx: U32<BE>,
|
||||
/// Pad
|
||||
pub pad: U16<BE>,
|
||||
}
|
||||
|
||||
/// Function auxiliary entry.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct FunAux64 {
|
||||
/// File pointer to line number
|
||||
pub x_lnnoptr: U64<BE>,
|
||||
/// Size of function in bytes.
|
||||
pub x_fsize: U32<BE>,
|
||||
/// Symbol table index of next entry beyond this function.
|
||||
pub x_endndx: U32<BE>,
|
||||
/// Pad
|
||||
pub pad: u8,
|
||||
/// Contains _AUX_FCN; Type of auxiliary entry.
|
||||
pub x_auxtype: u8,
|
||||
}
|
||||
|
||||
/// Exception auxiliary entry. (XCOFF64 only)
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct ExpAux {
|
||||
/// File offset to exception table entry.
|
||||
pub x_exptr: U64<BE>,
|
||||
/// Size of function in bytes.
|
||||
pub x_fsize: U32<BE>,
|
||||
/// Symbol table index of next entry beyond this function.
|
||||
pub x_endndx: U32<BE>,
|
||||
/// Pad
|
||||
pub pad: u8,
|
||||
/// Contains _AUX_EXCEPT; Type of auxiliary entry
|
||||
pub x_auxtype: u8,
|
||||
}
|
||||
|
||||
/// Block auxiliary entry for the C_BLOCK and C_FCN Symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct BlockAux32 {
|
||||
/// Reserved.
|
||||
pub pad: [u8; 2],
|
||||
/// High-order 2 bytes of the source line number.
|
||||
pub x_lnnohi: U16<BE>,
|
||||
/// Low-order 2 bytes of the source line number.
|
||||
pub x_lnnolo: U16<BE>,
|
||||
/// Reserved.
|
||||
pub pad2: [u8; 12],
|
||||
}
|
||||
|
||||
/// Block auxiliary entry for the C_BLOCK and C_FCN Symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct BlockAux64 {
|
||||
/// Source line number.
|
||||
pub x_lnno: U32<BE>,
|
||||
/// Reserved.
|
||||
pub pad: [u8; 13],
|
||||
/// Contains _AUX_SYM; Type of auxiliary entry.
|
||||
pub x_auxtype: u8,
|
||||
}
|
||||
|
||||
/// Section auxiliary entry for the C_STAT Symbol. (XCOFF32 Only)
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct StatAux {
|
||||
/// Section length.
|
||||
pub x_scnlen: U32<BE>,
|
||||
/// Number of relocation entries.
|
||||
pub x_nreloc: U16<BE>,
|
||||
/// Number of line numbers.
|
||||
pub x_nlinno: U16<BE>,
|
||||
/// Reserved.
|
||||
pub pad: [u8; 10],
|
||||
}
|
||||
|
||||
/// Section auxiliary entry Format for C_DWARF symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct DwarfAux32 {
|
||||
/// Length of portion of section represented by symbol.
|
||||
pub x_scnlen: U32<BE>,
|
||||
/// Reserved.
|
||||
pub pad: [u8; 4],
|
||||
/// Number of relocation entries in section.
|
||||
pub x_nreloc: U32<BE>,
|
||||
/// Reserved.
|
||||
pub pad2: [u8; 6],
|
||||
}
|
||||
|
||||
/// Section auxiliary entry Format for C_DWARF symbols.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct DwarfAux64 {
|
||||
/// Length of portion of section represented by symbol.
|
||||
pub x_scnlen: U64<BE>,
|
||||
/// Number of relocation entries in section.
|
||||
pub x_nreloc: U64<BE>,
|
||||
/// Reserved.
|
||||
pub pad: u8,
|
||||
/// Contains _AUX_SECT; Type of Auxiliary entry.
|
||||
pub x_auxtype: u8,
|
||||
}
|
||||
|
||||
// Values for `x_auxtype`
|
||||
//
|
||||
/// Identifies an exception auxiliary entry.
|
||||
pub const AUX_EXCEPT: u8 = 255;
|
||||
/// Identifies a function auxiliary entry.
|
||||
pub const AUX_FCN: u8 = 254;
|
||||
/// Identifies a symbol auxiliary entry.
|
||||
pub const AUX_SYM: u8 = 253;
|
||||
/// Identifies a file auxiliary entry.
|
||||
pub const AUX_FILE: u8 = 252;
|
||||
/// Identifies a csect auxiliary entry.
|
||||
pub const AUX_CSECT: u8 = 251;
|
||||
/// Identifies a SECT auxiliary entry.
|
||||
pub const AUX_SECT: u8 = 250;
|
||||
|
||||
/// Relocation table entry
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct Rel32 {
|
||||
/// Virtual address (position) in section to be relocated.
|
||||
pub r_vaddr: U32<BE>,
|
||||
/// Symbol table index of item that is referenced.
|
||||
pub r_symndx: U32<BE>,
|
||||
/// Relocation size and information.
|
||||
pub r_rsize: u8,
|
||||
/// Relocation type.
|
||||
pub r_rtype: u8,
|
||||
}
|
||||
|
||||
/// Relocation table entry
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[repr(C)]
|
||||
pub struct Rel64 {
|
||||
/// Virtual address (position) in section to be relocated.
|
||||
pub r_vaddr: U64<BE>,
|
||||
/// Symbol table index of item that is referenced.
|
||||
pub r_symndx: U32<BE>,
|
||||
/// Relocation size and information.
|
||||
pub r_rsize: u8,
|
||||
/// Relocation type.
|
||||
pub r_rtype: u8,
|
||||
}
|
||||
|
||||
// Values for `r_rtype`.
|
||||
//
|
||||
/// Positive relocation.
|
||||
pub const R_POS: u8 = 0x00;
|
||||
/// Positive indirect load relocation.
|
||||
pub const R_RL: u8 = 0x0c;
|
||||
/// Positive load address relocation. Modifiable instruction.
|
||||
pub const R_RLA: u8 = 0x0d;
|
||||
/// Negative relocation.
|
||||
pub const R_NEG: u8 = 0x01;
|
||||
/// Relative to self relocation.
|
||||
pub const R_REL: u8 = 0x02;
|
||||
/// Relative to the TOC relocation.
|
||||
pub const R_TOC: u8 = 0x03;
|
||||
/// TOC relative indirect load relocation.
|
||||
pub const R_TRL: u8 = 0x12;
|
||||
/// Relative to the TOC or to the thread-local storage base relocation.
|
||||
pub const R_TRLA: u8 = 0x13;
|
||||
/// Global linkage-external TOC address relocation.
|
||||
pub const R_GL: u8 = 0x05;
|
||||
/// Local object TOC address relocation.
|
||||
pub const R_TCL: u8 = 0x06;
|
||||
/// A non-relocating relocation.
|
||||
pub const R_REF: u8 = 0x0f;
|
||||
/// Branch absolute relocation. References a non-modifiable instruction.
|
||||
pub const R_BA: u8 = 0x08;
|
||||
/// Branch relative to self relocation. References a non-modifiable instruction.
|
||||
pub const R_BR: u8 = 0x0a;
|
||||
/// Branch absolute relocation. References a modifiable instruction.
|
||||
pub const R_RBA: u8 = 0x18;
|
||||
/// Branch relative to self relocation. References a modifiable instruction.
|
||||
pub const R_RBR: u8 = 0x1a;
|
||||
/// General-dynamic reference to TLS symbol.
|
||||
pub const R_TLS: u8 = 0x20;
|
||||
/// Initial-exec reference to TLS symbol.
|
||||
pub const R_TLS_IE: u8 = 0x21;
|
||||
/// Local-dynamic reference to TLS symbol.
|
||||
pub const R_TLS_LD: u8 = 0x22;
|
||||
/// Local-exec reference to TLS symbol.
|
||||
pub const R_TLS_LE: u8 = 0x23;
|
||||
/// Module reference to TLS.
|
||||
pub const R_TLSM: u8 = 0x24;
|
||||
/// Module reference to the local TLS storage.
|
||||
pub const R_TLSML: u8 = 0x25;
|
||||
/// Relative to TOC upper.
|
||||
pub const R_TOCU: u8 = 0x30;
|
||||
/// Relative to TOC lower.
|
||||
pub const R_TOCL: u8 = 0x31;
|
||||
|
||||
unsafe_impl_pod!(
|
||||
FileHeader32,
|
||||
FileHeader64,
|
||||
AuxHeader32,
|
||||
AuxHeader64,
|
||||
SectionHeader32,
|
||||
SectionHeader64,
|
||||
SymbolBytes,
|
||||
Symbol32,
|
||||
Symbol64,
|
||||
FileAux32,
|
||||
FileAux64,
|
||||
CsectAux32,
|
||||
CsectAux64,
|
||||
FunAux32,
|
||||
FunAux64,
|
||||
ExpAux,
|
||||
BlockAux32,
|
||||
BlockAux64,
|
||||
StatAux,
|
||||
DwarfAux32,
|
||||
DwarfAux64,
|
||||
Rel32,
|
||||
Rel64,
|
||||
);
|
||||
2
vendor/object/tests/integration.rs
vendored
Normal file
2
vendor/object/tests/integration.rs
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
mod read;
|
||||
mod round_trip;
|
||||
25
vendor/object/tests/parse_self.rs
vendored
Normal file
25
vendor/object/tests/parse_self.rs
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
#![cfg(feature = "read")]
|
||||
use object::{File, Object};
|
||||
use std::{env, fs};
|
||||
|
||||
#[test]
|
||||
fn parse_self() {
|
||||
let exe = env::current_exe().unwrap();
|
||||
let data = fs::read(exe).unwrap();
|
||||
let object = File::parse(&*data).unwrap();
|
||||
assert!(object.entry() != 0);
|
||||
assert!(object.sections().count() != 0);
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[test]
|
||||
fn parse_self_cache() {
|
||||
use object::read::{ReadCache, ReadRef};
|
||||
let exe = env::current_exe().unwrap();
|
||||
let file = fs::File::open(exe).unwrap();
|
||||
let cache = ReadCache::new(file);
|
||||
let data = cache.range(0, cache.len().unwrap());
|
||||
let object = File::parse(data).unwrap();
|
||||
assert!(object.entry() != 0);
|
||||
assert!(object.sections().count() != 0);
|
||||
}
|
||||
23
vendor/object/tests/read/coff.rs
vendored
Normal file
23
vendor/object/tests/read/coff.rs
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
use object::{pe, read, Object, ObjectSection};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(feature = "coff")]
|
||||
#[test]
|
||||
fn coff_extended_relocations() {
|
||||
let path_to_obj: PathBuf = ["testfiles", "coff", "relocs_overflow.o"].iter().collect();
|
||||
let contents = fs::read(&path_to_obj).expect("Could not read relocs_overflow.o");
|
||||
let file =
|
||||
read::coff::CoffFile::<_>::parse(&contents[..]).expect("Could not parse relocs_overflow.o");
|
||||
let code_section = file
|
||||
.section_by_name(".text")
|
||||
.expect("Could not find .text section in relocs_overflow.o");
|
||||
match code_section.flags() {
|
||||
object::SectionFlags::Coff { characteristics } => {
|
||||
assert!(characteristics & pe::IMAGE_SCN_LNK_NRELOC_OVFL != 0)
|
||||
}
|
||||
_ => panic!("Invalid section flags flavour."),
|
||||
};
|
||||
let relocations = code_section.relocations().collect::<Vec<_>>();
|
||||
assert_eq!(relocations.len(), 65536);
|
||||
}
|
||||
3
vendor/object/tests/read/mod.rs
vendored
Normal file
3
vendor/object/tests/read/mod.rs
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
#![cfg(feature = "read")]
|
||||
|
||||
mod coff;
|
||||
255
vendor/object/tests/round_trip/bss.rs
vendored
Normal file
255
vendor/object/tests/round_trip/bss.rs
vendored
Normal file
@@ -0,0 +1,255 @@
|
||||
#![cfg(all(feature = "read", feature = "write"))]
|
||||
|
||||
use object::read::{Object, ObjectSection, ObjectSymbol};
|
||||
use object::{read, write};
|
||||
use object::{
|
||||
Architecture, BinaryFormat, Endianness, SectionKind, SymbolFlags, SymbolKind, SymbolScope,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn coff_x86_64_bss() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let section = object.section_id(write::StandardSection::UninitializedData);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"v1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 18, 4);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"v2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 34, 8);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"bss.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let bss = sections.next().unwrap();
|
||||
println!("{:?}", bss);
|
||||
let bss_index = bss.index();
|
||||
assert_eq!(bss.name(), Ok(".bss"));
|
||||
assert_eq!(bss.kind(), SectionKind::UninitializedData);
|
||||
assert_eq!(bss.size(), 58);
|
||||
assert_eq!(bss.data(), Ok(&[][..]));
|
||||
|
||||
let section = sections.next();
|
||||
assert!(section.is_none(), "unexpected section {:?}", section);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 24);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_x86_64_bss() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let section = object.section_id(write::StandardSection::UninitializedData);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"v1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 18, 4);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"v2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 34, 8);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"bss.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
assert_eq!(section.name(), Ok(""));
|
||||
assert_eq!(section.kind(), SectionKind::Metadata);
|
||||
assert_eq!(section.address(), 0);
|
||||
assert_eq!(section.size(), 0);
|
||||
|
||||
let bss = sections.next().unwrap();
|
||||
println!("{:?}", bss);
|
||||
let bss_index = bss.index();
|
||||
assert_eq!(bss.name(), Ok(".bss"));
|
||||
assert_eq!(bss.kind(), SectionKind::UninitializedData);
|
||||
assert_eq!(bss.size(), 58);
|
||||
assert_eq!(bss.data(), Ok(&[][..]));
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(""));
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.size(), 18);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 24);
|
||||
assert_eq!(symbol.size(), 34);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macho_x86_64_bss() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
let section = object.section_id(write::StandardSection::UninitializedData);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"v1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 18, 4);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"v2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 34, 8);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"bss.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::MachO);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let bss = sections.next().unwrap();
|
||||
println!("{:?}", bss);
|
||||
let bss_index = bss.index();
|
||||
assert_eq!(bss.name(), Ok("__bss"));
|
||||
assert_eq!(bss.segment_name(), Ok(Some("__DATA")));
|
||||
assert_eq!(bss.kind(), SectionKind::UninitializedData);
|
||||
assert_eq!(bss.size(), 58);
|
||||
assert_eq!(bss.data(), Ok(&[][..]));
|
||||
|
||||
let section = sections.next();
|
||||
assert!(section.is_none(), "unexpected section {:?}", section);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("_v1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("_v2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 24);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
}
|
||||
56
vendor/object/tests/round_trip/coff.rs
vendored
Normal file
56
vendor/object/tests/round_trip/coff.rs
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
use object::read::{Object, ObjectSection};
|
||||
use object::{read, write};
|
||||
use object::{
|
||||
Architecture, BinaryFormat, Endianness, RelocationEncoding, RelocationKind, SymbolFlags,
|
||||
SymbolKind, SymbolScope,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn reloc_overflow() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);
|
||||
let text = object.section_id(write::StandardSection::Text);
|
||||
object.append_section_data(text, &[0; 4], 4);
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"f".to_vec(),
|
||||
value: 0,
|
||||
size: 4,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(text),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
for i in 0..0x10000 {
|
||||
object
|
||||
.add_relocation(
|
||||
text,
|
||||
write::Relocation {
|
||||
offset: i,
|
||||
size: 64,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"reloc_overflow.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let section = object.sections().next().unwrap();
|
||||
assert_eq!(section.name(), Ok(".text"));
|
||||
|
||||
let mut i = 0;
|
||||
for (offset, _relocation) in section.relocations() {
|
||||
assert_eq!(offset, i);
|
||||
i += 1;
|
||||
}
|
||||
assert_eq!(i, 0x10000);
|
||||
}
|
||||
225
vendor/object/tests/round_trip/comdat.rs
vendored
Normal file
225
vendor/object/tests/round_trip/comdat.rs
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
#![cfg(all(feature = "read", feature = "write"))]
|
||||
|
||||
use object::pe;
|
||||
use object::read::{Object, ObjectComdat, ObjectSection, ObjectSymbol};
|
||||
use object::{read, write};
|
||||
use object::{
|
||||
Architecture, BinaryFormat, ComdatKind, Endianness, SectionKind, SymbolFlags, SymbolKind,
|
||||
SymbolScope,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn coff_x86_64_comdat() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let (section1, offset) =
|
||||
object.add_subsection(write::StandardSection::Text, b"s1", &[0, 1, 2, 3], 4);
|
||||
object.section_symbol(section1);
|
||||
let (section2, _) =
|
||||
object.add_subsection(write::StandardSection::Data, b"s1", &[0, 1, 2, 3], 4);
|
||||
object.section_symbol(section2);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"s1".to_vec(),
|
||||
value: offset,
|
||||
size: 4,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(section1),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
|
||||
object.add_comdat(write::Comdat {
|
||||
kind: ComdatKind::NoDuplicates,
|
||||
symbol,
|
||||
sections: vec![section1, section2],
|
||||
});
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"comdat.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section1 = sections.next().unwrap();
|
||||
println!("{:?}", section1);
|
||||
let section1_index = section1.index();
|
||||
assert_eq!(section1.name(), Ok(".text$s1"));
|
||||
assert_eq!(section1.kind(), SectionKind::Text);
|
||||
assert_eq!(section1.address(), 0);
|
||||
assert_eq!(section1.size(), 4);
|
||||
|
||||
let section2 = sections.next().unwrap();
|
||||
println!("{:?}", section2);
|
||||
let section2_index = section2.index();
|
||||
assert_eq!(section2.name(), Ok(".data$s1"));
|
||||
assert_eq!(section2.kind(), SectionKind::Data);
|
||||
assert_eq!(section2.address(), 0);
|
||||
assert_eq!(section2.size(), 4);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(".text$s1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Section);
|
||||
assert_eq!(
|
||||
symbol.section(),
|
||||
read::SymbolSection::Section(section1.index())
|
||||
);
|
||||
assert_eq!(
|
||||
symbol.flags(),
|
||||
SymbolFlags::CoffSection {
|
||||
selection: pe::IMAGE_COMDAT_SELECT_NODUPLICATES,
|
||||
associative_section: None
|
||||
}
|
||||
);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(".data$s1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Section);
|
||||
assert_eq!(
|
||||
symbol.section(),
|
||||
read::SymbolSection::Section(section2.index())
|
||||
);
|
||||
assert_eq!(
|
||||
symbol.flags(),
|
||||
SymbolFlags::CoffSection {
|
||||
selection: pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE,
|
||||
associative_section: Some(section1_index)
|
||||
}
|
||||
);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
let symbol_index = symbol.index();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("s1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(
|
||||
symbol.section(),
|
||||
read::SymbolSection::Section(section1.index())
|
||||
);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
|
||||
let mut comdats = object.comdats();
|
||||
|
||||
let comdat = comdats.next().unwrap();
|
||||
println!("{:?}", comdat);
|
||||
assert_eq!(comdat.kind(), ComdatKind::NoDuplicates);
|
||||
assert_eq!(comdat.symbol(), symbol_index);
|
||||
|
||||
let mut comdat_sections = comdat.sections();
|
||||
assert_eq!(comdat_sections.next(), Some(section1_index));
|
||||
assert_eq!(comdat_sections.next(), Some(section2_index));
|
||||
assert_eq!(comdat_sections.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_x86_64_comdat() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let (section1, offset) =
|
||||
object.add_subsection(write::StandardSection::Text, b"s1", &[0, 1, 2, 3], 4);
|
||||
let (section2, _) =
|
||||
object.add_subsection(write::StandardSection::Data, b"s1", &[0, 1, 2, 3], 4);
|
||||
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"s1".to_vec(),
|
||||
value: offset,
|
||||
size: 4,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(section1),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
|
||||
object.add_comdat(write::Comdat {
|
||||
kind: ComdatKind::Any,
|
||||
symbol,
|
||||
sections: vec![section1, section2],
|
||||
});
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"comdat.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
assert_eq!(section.name(), Ok(""));
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
assert_eq!(section.name(), Ok(".group"));
|
||||
|
||||
let section1 = sections.next().unwrap();
|
||||
println!("{:?}", section1);
|
||||
let section1_index = section1.index();
|
||||
assert_eq!(section1.name(), Ok(".text.s1"));
|
||||
assert_eq!(section1.kind(), SectionKind::Text);
|
||||
assert_eq!(section1.address(), 0);
|
||||
assert_eq!(section1.size(), 4);
|
||||
|
||||
let section2 = sections.next().unwrap();
|
||||
println!("{:?}", section2);
|
||||
let section2_index = section2.index();
|
||||
assert_eq!(section2.name(), Ok(".data.s1"));
|
||||
assert_eq!(section2.kind(), SectionKind::Data);
|
||||
assert_eq!(section2.address(), 0);
|
||||
assert_eq!(section2.size(), 4);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(""));
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
let symbol_index = symbol.index();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("s1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(
|
||||
symbol.section(),
|
||||
read::SymbolSection::Section(section1.index())
|
||||
);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
|
||||
let mut comdats = object.comdats();
|
||||
|
||||
let comdat = comdats.next().unwrap();
|
||||
println!("{:?}", comdat);
|
||||
assert_eq!(comdat.kind(), ComdatKind::Any);
|
||||
assert_eq!(comdat.symbol(), symbol_index);
|
||||
|
||||
let mut comdat_sections = comdat.sections();
|
||||
assert_eq!(comdat_sections.next(), Some(section1_index));
|
||||
assert_eq!(comdat_sections.next(), Some(section2_index));
|
||||
assert_eq!(comdat_sections.next(), None);
|
||||
}
|
||||
245
vendor/object/tests/round_trip/common.rs
vendored
Normal file
245
vendor/object/tests/round_trip/common.rs
vendored
Normal file
@@ -0,0 +1,245 @@
|
||||
#![cfg(all(feature = "read", feature = "write"))]
|
||||
|
||||
use object::read::{Object, ObjectSection, ObjectSymbol};
|
||||
use object::{read, write};
|
||||
use object::{
|
||||
Architecture, BinaryFormat, Endianness, SectionKind, SymbolFlags, SymbolKind, SymbolScope,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn coff_x86_64_common() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let symbol = write::Symbol {
|
||||
name: b"v1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_common_symbol(symbol, 4, 4);
|
||||
|
||||
let symbol = write::Symbol {
|
||||
name: b"v2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_common_symbol(symbol, 8, 8);
|
||||
|
||||
// Also check undefined symbols, which are very similar.
|
||||
let symbol = write::Symbol {
|
||||
name: b"v3".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_symbol(symbol);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"common.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section(), read::SymbolSection::Common);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.size(), 4);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section(), read::SymbolSection::Common);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.size(), 8);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v3"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section(), read::SymbolSection::Undefined);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), true);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.size(), 0);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_x86_64_common() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let symbol = write::Symbol {
|
||||
name: b"v1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_common_symbol(symbol, 4, 4);
|
||||
|
||||
let symbol = write::Symbol {
|
||||
name: b"v2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_common_symbol(symbol, 8, 8);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"common.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(""));
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section(), read::SymbolSection::Common);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.size(), 4);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("v2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section(), read::SymbolSection::Common);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.size(), 8);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macho_x86_64_common() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
let symbol = write::Symbol {
|
||||
name: b"v1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_common_symbol(symbol, 4, 4);
|
||||
|
||||
let symbol = write::Symbol {
|
||||
name: b"v2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Data,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
};
|
||||
object.add_common_symbol(symbol, 8, 8);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"common.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::MachO);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let common = sections.next().unwrap();
|
||||
println!("{:?}", common);
|
||||
let common_index = common.index();
|
||||
assert_eq!(common.name(), Ok("__common"));
|
||||
assert_eq!(common.segment_name(), Ok(Some("__DATA")));
|
||||
assert_eq!(common.kind(), SectionKind::Common);
|
||||
assert_eq!(common.size(), 16);
|
||||
assert_eq!(common.data(), Ok(&[][..]));
|
||||
|
||||
let section = sections.next();
|
||||
assert!(section.is_none(), "unexpected section {:?}", section);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("_v1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(common_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 0);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("_v2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(common_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.address(), 8);
|
||||
|
||||
let symbol = symbols.next();
|
||||
assert!(symbol.is_none(), "unexpected symbol {:?}", symbol);
|
||||
}
|
||||
289
vendor/object/tests/round_trip/elf.rs
vendored
Normal file
289
vendor/object/tests/round_trip/elf.rs
vendored
Normal file
@@ -0,0 +1,289 @@
|
||||
use object::read::elf::{FileHeader, SectionHeader};
|
||||
use object::read::{Object, ObjectSection, ObjectSymbol};
|
||||
use object::{
|
||||
elf, read, write, Architecture, BinaryFormat, Endianness, LittleEndian, SectionIndex,
|
||||
SectionKind, SymbolFlags, SymbolKind, SymbolScope, SymbolSection, U32,
|
||||
};
|
||||
use std::io::Write;
|
||||
|
||||
#[test]
|
||||
fn symtab_shndx() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
for i in 0..0x10000 {
|
||||
let name = format!("func{}", i).into_bytes();
|
||||
let (section, offset) =
|
||||
object.add_subsection(write::StandardSection::Text, &name, &[0xcc], 1);
|
||||
object.add_symbol(write::Symbol {
|
||||
name,
|
||||
value: offset,
|
||||
size: 1,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(section),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
}
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"symtab_shndx.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
for symbol in object.symbols().skip(1) {
|
||||
assert_eq!(
|
||||
symbol.section(),
|
||||
SymbolSection::Section(SectionIndex(symbol.index().0))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aligned_sections() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let text_section_id = object.add_section(vec![], b".text".to_vec(), SectionKind::Text);
|
||||
let text_section = object.section_mut(text_section_id);
|
||||
text_section.set_data(&[][..], 4096);
|
||||
|
||||
let data_section_id = object.add_section(vec![], b".data".to_vec(), SectionKind::Data);
|
||||
let data_section = object.section_mut(data_section_id);
|
||||
data_section.set_data(&b"1234"[..], 16);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
let _ = sections.next().unwrap();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
assert_eq!(section.name(), Ok(".text"));
|
||||
assert_eq!(section.file_range(), Some((4096, 0)));
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
assert_eq!(section.name(), Ok(".data"));
|
||||
assert_eq!(section.file_range(), Some((4096, 4)));
|
||||
}
|
||||
|
||||
#[cfg(feature = "compression")]
|
||||
#[test]
|
||||
fn compression_zlib() {
|
||||
use object::read::ObjectSection;
|
||||
use object::LittleEndian as LE;
|
||||
|
||||
let data = b"test data data data";
|
||||
let len = data.len() as u64;
|
||||
|
||||
let mut ch = object::elf::CompressionHeader64::<LE>::default();
|
||||
ch.ch_type.set(LE, object::elf::ELFCOMPRESS_ZLIB);
|
||||
ch.ch_size.set(LE, len);
|
||||
ch.ch_addralign.set(LE, 1);
|
||||
|
||||
let mut buf = Vec::new();
|
||||
buf.write(object::bytes_of(&ch)).unwrap();
|
||||
let mut encoder = flate2::write::ZlibEncoder::new(buf, flate2::Compression::default());
|
||||
encoder.write_all(data).unwrap();
|
||||
let compressed = encoder.finish().unwrap();
|
||||
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
let section = object.add_section(
|
||||
Vec::new(),
|
||||
b".debug_info".to_vec(),
|
||||
object::SectionKind::Other,
|
||||
);
|
||||
object.section_mut(section).set_data(compressed, 1);
|
||||
object.section_mut(section).flags = object::SectionFlags::Elf {
|
||||
sh_flags: object::elf::SHF_COMPRESSED.into(),
|
||||
};
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"compression.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let section = object.section_by_name(".debug_info").unwrap();
|
||||
let uncompressed = section.uncompressed_data().unwrap();
|
||||
assert_eq!(data, &*uncompressed);
|
||||
}
|
||||
|
||||
#[cfg(feature = "compression")]
|
||||
#[test]
|
||||
fn compression_gnu() {
|
||||
use object::read::ObjectSection;
|
||||
use std::io::Write;
|
||||
|
||||
let data = b"test data data data";
|
||||
let len = data.len() as u32;
|
||||
|
||||
let mut buf = Vec::new();
|
||||
buf.write_all(b"ZLIB\0\0\0\0").unwrap();
|
||||
buf.write_all(&len.to_be_bytes()).unwrap();
|
||||
let mut encoder = flate2::write::ZlibEncoder::new(buf, flate2::Compression::default());
|
||||
encoder.write_all(data).unwrap();
|
||||
let compressed = encoder.finish().unwrap();
|
||||
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
let section = object.add_section(
|
||||
Vec::new(),
|
||||
b".zdebug_info".to_vec(),
|
||||
object::SectionKind::Other,
|
||||
);
|
||||
object.section_mut(section).set_data(compressed, 1);
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"compression.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let section = object.section_by_name(".zdebug_info").unwrap();
|
||||
let uncompressed = section.uncompressed_data().unwrap();
|
||||
assert_eq!(data, &*uncompressed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn note() {
|
||||
let endian = Endianness::Little;
|
||||
let mut object = write::Object::new(BinaryFormat::Elf, Architecture::X86_64, endian);
|
||||
|
||||
// Add note section with align = 4.
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
buffer
|
||||
.write(object::bytes_of(&elf::NoteHeader32 {
|
||||
n_namesz: U32::new(endian, 6),
|
||||
n_descsz: U32::new(endian, 11),
|
||||
n_type: U32::new(endian, 1),
|
||||
}))
|
||||
.unwrap();
|
||||
buffer.write(b"name1\0\0\0").unwrap();
|
||||
buffer.write(b"descriptor\0\0").unwrap();
|
||||
|
||||
buffer
|
||||
.write(object::bytes_of(&elf::NoteHeader32 {
|
||||
n_namesz: U32::new(endian, 6),
|
||||
n_descsz: U32::new(endian, 11),
|
||||
n_type: U32::new(endian, 2),
|
||||
}))
|
||||
.unwrap();
|
||||
buffer.write(b"name2\0\0\0").unwrap();
|
||||
buffer.write(b"descriptor\0\0").unwrap();
|
||||
|
||||
let section = object.add_section(Vec::new(), b".note4".to_vec(), SectionKind::Note);
|
||||
object.section_mut(section).set_data(buffer, 4);
|
||||
|
||||
// Add note section with align = 8.
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
buffer
|
||||
.write(object::bytes_of(&elf::NoteHeader32 {
|
||||
n_namesz: U32::new(endian, 6),
|
||||
n_descsz: U32::new(endian, 11),
|
||||
n_type: U32::new(endian, 1),
|
||||
}))
|
||||
.unwrap();
|
||||
buffer.write(b"name1\0\0\0\0\0\0\0").unwrap();
|
||||
buffer.write(b"descriptor\0\0\0\0\0\0").unwrap();
|
||||
|
||||
buffer
|
||||
.write(object::bytes_of(&elf::NoteHeader32 {
|
||||
n_namesz: U32::new(endian, 4),
|
||||
n_descsz: U32::new(endian, 11),
|
||||
n_type: U32::new(endian, 2),
|
||||
}))
|
||||
.unwrap();
|
||||
buffer.write(b"abc\0").unwrap();
|
||||
buffer.write(b"descriptor\0\0\0\0\0\0").unwrap();
|
||||
|
||||
let section = object.add_section(Vec::new(), b".note8".to_vec(), SectionKind::Note);
|
||||
object.section_mut(section).set_data(buffer, 8);
|
||||
|
||||
let bytes = &*object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"note.o", &bytes).unwrap();
|
||||
|
||||
let header = elf::FileHeader64::parse(bytes).unwrap();
|
||||
let endian: LittleEndian = header.endian().unwrap();
|
||||
let sections = header.sections(endian, bytes).unwrap();
|
||||
|
||||
let section = sections.section(SectionIndex(1)).unwrap();
|
||||
assert_eq!(sections.section_name(endian, section).unwrap(), b".note4");
|
||||
assert_eq!(section.sh_addralign(endian), 4);
|
||||
let mut notes = section.notes(endian, bytes).unwrap().unwrap();
|
||||
let note = notes.next().unwrap().unwrap();
|
||||
assert_eq!(note.name(), b"name1");
|
||||
assert_eq!(note.desc(), b"descriptor\0");
|
||||
assert_eq!(note.n_type(endian), 1);
|
||||
let note = notes.next().unwrap().unwrap();
|
||||
assert_eq!(note.name(), b"name2");
|
||||
assert_eq!(note.desc(), b"descriptor\0");
|
||||
assert_eq!(note.n_type(endian), 2);
|
||||
assert!(notes.next().unwrap().is_none());
|
||||
|
||||
let section = sections.section(SectionIndex(2)).unwrap();
|
||||
assert_eq!(sections.section_name(endian, section).unwrap(), b".note8");
|
||||
assert_eq!(section.sh_addralign(endian), 8);
|
||||
let mut notes = section.notes(endian, bytes).unwrap().unwrap();
|
||||
let note = notes.next().unwrap().unwrap();
|
||||
assert_eq!(note.name(), b"name1");
|
||||
assert_eq!(note.desc(), b"descriptor\0");
|
||||
assert_eq!(note.n_type(endian), 1);
|
||||
let note = notes.next().unwrap().unwrap();
|
||||
assert_eq!(note.name(), b"abc");
|
||||
assert_eq!(note.desc(), b"descriptor\0");
|
||||
assert_eq!(note.n_type(endian), 2);
|
||||
assert!(notes.next().unwrap().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gnu_property() {
|
||||
gnu_property_inner::<elf::FileHeader32<Endianness>>(Architecture::I386);
|
||||
gnu_property_inner::<elf::FileHeader64<Endianness>>(Architecture::X86_64);
|
||||
}
|
||||
|
||||
fn gnu_property_inner<Elf: FileHeader<Endian = Endianness>>(architecture: Architecture) {
|
||||
let endian = Endianness::Little;
|
||||
let mut object = write::Object::new(BinaryFormat::Elf, architecture, endian);
|
||||
object.add_elf_gnu_property_u32(
|
||||
elf::GNU_PROPERTY_X86_FEATURE_1_AND,
|
||||
elf::GNU_PROPERTY_X86_FEATURE_1_IBT | elf::GNU_PROPERTY_X86_FEATURE_1_SHSTK,
|
||||
);
|
||||
|
||||
let bytes = &*object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"note.o", &bytes).unwrap();
|
||||
|
||||
let header = Elf::parse(bytes).unwrap();
|
||||
assert_eq!(header.endian().unwrap(), endian);
|
||||
let sections = header.sections(endian, bytes).unwrap();
|
||||
let section = sections.section(SectionIndex(1)).unwrap();
|
||||
assert_eq!(
|
||||
sections.section_name(endian, section).unwrap(),
|
||||
b".note.gnu.property"
|
||||
);
|
||||
assert_eq!(section.sh_flags(endian).into(), u64::from(elf::SHF_ALLOC));
|
||||
let mut notes = section.notes(endian, bytes).unwrap().unwrap();
|
||||
let note = notes.next().unwrap().unwrap();
|
||||
let mut props = note.gnu_properties(endian).unwrap();
|
||||
let prop = props.next().unwrap().unwrap();
|
||||
assert_eq!(prop.pr_type(), elf::GNU_PROPERTY_X86_FEATURE_1_AND);
|
||||
assert_eq!(
|
||||
prop.data_u32(endian).unwrap(),
|
||||
elf::GNU_PROPERTY_X86_FEATURE_1_IBT | elf::GNU_PROPERTY_X86_FEATURE_1_SHSTK
|
||||
);
|
||||
assert!(props.next().unwrap().is_none());
|
||||
assert!(notes.next().unwrap().is_none());
|
||||
}
|
||||
64
vendor/object/tests/round_trip/macho.rs
vendored
Normal file
64
vendor/object/tests/round_trip/macho.rs
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
use object::read::macho::MachHeader;
|
||||
use object::read::{Object, ObjectSection};
|
||||
use object::{macho, read, write, Architecture, BinaryFormat, Endianness};
|
||||
|
||||
// Test that segment size is valid when the first section needs alignment.
|
||||
#[test]
|
||||
fn issue_286_segment_file_size() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
let text = object.section_id(write::StandardSection::Text);
|
||||
object.append_section_data(text, &[1; 30], 0x1000);
|
||||
|
||||
let bytes = &*object.write().unwrap();
|
||||
let header = macho::MachHeader64::parse(bytes, 0).unwrap();
|
||||
let endian: Endianness = header.endian().unwrap();
|
||||
let mut commands = header.load_commands(endian, bytes, 0).unwrap();
|
||||
let command = commands.next().unwrap().unwrap();
|
||||
let (segment, _) = command.segment_64().unwrap().unwrap();
|
||||
assert_eq!(segment.vmsize.get(endian), 30);
|
||||
assert_eq!(segment.filesize.get(endian), 30);
|
||||
}
|
||||
|
||||
// We were emitting section file alignment padding that didn't match the address alignment padding.
|
||||
#[test]
|
||||
fn issue_552_section_file_alignment() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
// The starting file offset is not a multiple of 32 (checked later).
|
||||
// Length of 32 ensures that the file offset of the end of this section is still not a
|
||||
// multiple of 32.
|
||||
let section = object.add_section(vec![], vec![], object::SectionKind::ReadOnlyDataWithRel);
|
||||
object.append_section_data(section, &vec![0u8; 32], 1);
|
||||
|
||||
// Address is already aligned correctly, so there must not any padding,
|
||||
// even though file offset is not aligned.
|
||||
let section = object.add_section(vec![], vec![], object::SectionKind::ReadOnlyData);
|
||||
object.append_section_data(section, &vec![0u8; 1], 32);
|
||||
|
||||
let bytes = &*object.write().unwrap();
|
||||
//std::fs::write(&"align.o", &bytes).unwrap();
|
||||
let object = read::File::parse(bytes).unwrap();
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
let offset = section.file_range().unwrap().0;
|
||||
// Check file offset is not aligned to 32.
|
||||
assert_ne!(offset % 32, 0);
|
||||
assert_eq!(section.address(), 0);
|
||||
assert_eq!(section.size(), 32);
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
// Check there is no padding.
|
||||
assert_eq!(section.file_range(), Some((offset + 32, 1)));
|
||||
assert_eq!(section.address(), 32);
|
||||
assert_eq!(section.size(), 1);
|
||||
}
|
||||
686
vendor/object/tests/round_trip/mod.rs
vendored
Normal file
686
vendor/object/tests/round_trip/mod.rs
vendored
Normal file
@@ -0,0 +1,686 @@
|
||||
#![cfg(all(feature = "read", feature = "write"))]
|
||||
|
||||
use object::read::{Object, ObjectSection, ObjectSymbol};
|
||||
use object::{read, write, SectionIndex, SubArchitecture};
|
||||
use object::{
|
||||
Architecture, BinaryFormat, Endianness, RelocationEncoding, RelocationKind, SectionKind,
|
||||
SymbolFlags, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
mod bss;
|
||||
mod coff;
|
||||
mod comdat;
|
||||
mod common;
|
||||
mod elf;
|
||||
mod macho;
|
||||
mod section_flags;
|
||||
mod tls;
|
||||
|
||||
#[test]
|
||||
fn coff_any() {
|
||||
for (arch, sub_arch) in [
|
||||
(Architecture::Aarch64, None),
|
||||
(Architecture::Aarch64, Some(SubArchitecture::Arm64EC)),
|
||||
(Architecture::Arm, None),
|
||||
(Architecture::I386, None),
|
||||
(Architecture::X86_64, None),
|
||||
]
|
||||
.iter()
|
||||
.copied()
|
||||
{
|
||||
let mut object = write::Object::new(BinaryFormat::Coff, arch, Endianness::Little);
|
||||
object.set_sub_architecture(sub_arch);
|
||||
|
||||
object.add_file_symbol(b"file.c".to_vec());
|
||||
|
||||
let text = object.section_id(write::StandardSection::Text);
|
||||
object.append_section_data(text, &[1; 30], 4);
|
||||
|
||||
let func1_offset = object.append_section_data(text, &[1; 30], 4);
|
||||
assert_eq!(func1_offset, 32);
|
||||
let func1_symbol = object.add_symbol(write::Symbol {
|
||||
name: b"func1".to_vec(),
|
||||
value: func1_offset,
|
||||
size: 32,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(text),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
let func2_offset = object.append_section_data(text, &[1; 30], 4);
|
||||
assert_eq!(func2_offset, 64);
|
||||
object.add_symbol(write::Symbol {
|
||||
name: b"func2_long".to_vec(),
|
||||
value: func2_offset,
|
||||
size: 32,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(text),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object
|
||||
.add_relocation(
|
||||
text,
|
||||
write::Relocation {
|
||||
offset: 8,
|
||||
size: arch.address_size().unwrap().bytes() * 8,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol: func1_symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), arch);
|
||||
assert_eq!(object.sub_architecture(), sub_arch);
|
||||
assert_eq!(object.endianness(), Endianness::Little);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let text = sections.next().unwrap();
|
||||
println!("{:?}", text);
|
||||
let text_index = text.index();
|
||||
assert_eq!(text.name(), Ok(".text"));
|
||||
assert_eq!(text.kind(), SectionKind::Text);
|
||||
assert_eq!(text.address(), 0);
|
||||
assert_eq!(text.size(), 94);
|
||||
assert_eq!(&text.data().unwrap()[..30], &[1; 30]);
|
||||
assert_eq!(&text.data().unwrap()[32..62], &[1; 30]);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("file.c"));
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.kind(), SymbolKind::File);
|
||||
assert_eq!(symbol.section(), SymbolSection::None);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Compilation);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
|
||||
let decorated_name = |name: &str| {
|
||||
if arch == Architecture::I386 {
|
||||
format!("_{name}")
|
||||
} else {
|
||||
name.to_owned()
|
||||
}
|
||||
};
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let func1_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok(decorated_name("func1").as_str()));
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.kind(), SymbolKind::Text);
|
||||
assert_eq!(symbol.section_index(), Some(text_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(decorated_name("func2_long").as_str()));
|
||||
assert_eq!(symbol.address(), func2_offset);
|
||||
assert_eq!(symbol.kind(), SymbolKind::Text);
|
||||
assert_eq!(symbol.section_index(), Some(text_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let mut relocations = text.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 8);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), arch.address_size().unwrap().bytes() * 8);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(func1_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
let map = object.symbol_map();
|
||||
let symbol = map.get(func1_offset + 1).unwrap();
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.name(), decorated_name("func1"));
|
||||
assert_eq!(map.get(func1_offset - 1), None);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_x86_64() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
object.add_file_symbol(b"file.c".to_vec());
|
||||
|
||||
let text = object.section_id(write::StandardSection::Text);
|
||||
object.append_section_data(text, &[1; 30], 4);
|
||||
|
||||
let func1_offset = object.append_section_data(text, &[1; 30], 4);
|
||||
assert_eq!(func1_offset, 32);
|
||||
let func1_symbol = object.add_symbol(write::Symbol {
|
||||
name: b"func1".to_vec(),
|
||||
value: func1_offset,
|
||||
size: 32,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(text),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object
|
||||
.add_relocation(
|
||||
text,
|
||||
write::Relocation {
|
||||
offset: 8,
|
||||
size: 64,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol: func1_symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
assert_eq!(object.endianness(), Endianness::Little);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
assert_eq!(section.name(), Ok(""));
|
||||
assert_eq!(section.kind(), SectionKind::Metadata);
|
||||
assert_eq!(section.address(), 0);
|
||||
assert_eq!(section.size(), 0);
|
||||
|
||||
let text = sections.next().unwrap();
|
||||
println!("{:?}", text);
|
||||
let text_index = text.index();
|
||||
assert_eq!(text.name(), Ok(".text"));
|
||||
assert_eq!(text.kind(), SectionKind::Text);
|
||||
assert_eq!(text.address(), 0);
|
||||
assert_eq!(text.size(), 62);
|
||||
assert_eq!(&text.data().unwrap()[..30], &[1; 30]);
|
||||
assert_eq!(&text.data().unwrap()[32..62], &[1; 30]);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(""));
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.kind(), SymbolKind::Null);
|
||||
assert_eq!(symbol.section_index(), None);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Unknown);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), true);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("file.c"));
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.kind(), SymbolKind::File);
|
||||
assert_eq!(symbol.section(), SymbolSection::None);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Compilation);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let func1_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok("func1"));
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.kind(), SymbolKind::Text);
|
||||
assert_eq!(symbol.section_index(), Some(text_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let mut relocations = text.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 8);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(func1_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
let map = object.symbol_map();
|
||||
let symbol = map.get(func1_offset + 1).unwrap();
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.name(), "func1");
|
||||
assert_eq!(map.get(func1_offset - 1), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_any() {
|
||||
for (arch, endian) in [
|
||||
(Architecture::Aarch64, Endianness::Little),
|
||||
(Architecture::Aarch64_Ilp32, Endianness::Little),
|
||||
(Architecture::Arm, Endianness::Little),
|
||||
(Architecture::Avr, Endianness::Little),
|
||||
(Architecture::Bpf, Endianness::Little),
|
||||
(Architecture::Csky, Endianness::Little),
|
||||
(Architecture::I386, Endianness::Little),
|
||||
(Architecture::X86_64, Endianness::Little),
|
||||
(Architecture::X86_64_X32, Endianness::Little),
|
||||
(Architecture::Hexagon, Endianness::Little),
|
||||
(Architecture::LoongArch64, Endianness::Little),
|
||||
(Architecture::Mips, Endianness::Little),
|
||||
(Architecture::Mips64, Endianness::Little),
|
||||
(Architecture::Msp430, Endianness::Little),
|
||||
(Architecture::PowerPc, Endianness::Big),
|
||||
(Architecture::PowerPc64, Endianness::Big),
|
||||
(Architecture::Riscv32, Endianness::Little),
|
||||
(Architecture::Riscv64, Endianness::Little),
|
||||
(Architecture::S390x, Endianness::Big),
|
||||
(Architecture::Sbf, Endianness::Little),
|
||||
(Architecture::Sparc64, Endianness::Big),
|
||||
(Architecture::Xtensa, Endianness::Little),
|
||||
]
|
||||
.iter()
|
||||
.copied()
|
||||
{
|
||||
let mut object = write::Object::new(BinaryFormat::Elf, arch, endian);
|
||||
|
||||
let section = object.section_id(write::StandardSection::Data);
|
||||
object.append_section_data(section, &[1; 30], 4);
|
||||
let symbol = object.section_symbol(section);
|
||||
|
||||
object
|
||||
.add_relocation(
|
||||
section,
|
||||
write::Relocation {
|
||||
offset: 8,
|
||||
size: 32,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
if arch.address_size().unwrap().bytes() >= 8 {
|
||||
object
|
||||
.add_relocation(
|
||||
section,
|
||||
write::Relocation {
|
||||
offset: 16,
|
||||
size: 64,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
println!("{:?}", object.architecture());
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), arch);
|
||||
assert_eq!(object.endianness(), endian);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
assert_eq!(section.name(), Ok(""));
|
||||
assert_eq!(section.kind(), SectionKind::Metadata);
|
||||
assert_eq!(section.address(), 0);
|
||||
assert_eq!(section.size(), 0);
|
||||
|
||||
let data = sections.next().unwrap();
|
||||
println!("{:?}", data);
|
||||
assert_eq!(data.name(), Ok(".data"));
|
||||
assert_eq!(data.kind(), SectionKind::Data);
|
||||
|
||||
let mut relocations = data.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 8);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 32);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
if arch.address_size().unwrap().bytes() >= 8 {
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 16);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macho_x86_64() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
object.add_file_symbol(b"file.c".to_vec());
|
||||
|
||||
let text = object.section_id(write::StandardSection::Text);
|
||||
object.append_section_data(text, &[1; 30], 4);
|
||||
|
||||
let func1_offset = object.append_section_data(text, &[1; 30], 4);
|
||||
assert_eq!(func1_offset, 32);
|
||||
let func1_symbol = object.add_symbol(write::Symbol {
|
||||
name: b"func1".to_vec(),
|
||||
value: func1_offset,
|
||||
size: 32,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(text),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object
|
||||
.add_relocation(
|
||||
text,
|
||||
write::Relocation {
|
||||
offset: 8,
|
||||
size: 64,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol: func1_symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
object
|
||||
.add_relocation(
|
||||
text,
|
||||
write::Relocation {
|
||||
offset: 16,
|
||||
size: 32,
|
||||
kind: RelocationKind::Relative,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol: func1_symbol,
|
||||
addend: -4,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::MachO);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
assert_eq!(object.endianness(), Endianness::Little);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let text = sections.next().unwrap();
|
||||
println!("{:?}", text);
|
||||
let text_index = text.index();
|
||||
assert_eq!(text.name(), Ok("__text"));
|
||||
assert_eq!(text.segment_name(), Ok(Some("__TEXT")));
|
||||
assert_eq!(text.kind(), SectionKind::Text);
|
||||
assert_eq!(text.address(), 0);
|
||||
assert_eq!(text.size(), 62);
|
||||
assert_eq!(&text.data().unwrap()[..30], &[1; 30]);
|
||||
assert_eq!(&text.data().unwrap()[32..62], &[1; 30]);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let func1_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok("_func1"));
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.kind(), SymbolKind::Text);
|
||||
assert_eq!(symbol.section_index(), Some(text_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let mut relocations = text.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 8);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(func1_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 16);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Relative);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::X86RipRelative);
|
||||
assert_eq!(relocation.size(), 32);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(func1_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), -4);
|
||||
|
||||
let map = object.symbol_map();
|
||||
let symbol = map.get(func1_offset + 1).unwrap();
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.name(), "_func1");
|
||||
assert_eq!(map.get(func1_offset - 1), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macho_any() {
|
||||
for (arch, subarch, endian) in [
|
||||
(Architecture::Aarch64, None, Endianness::Little),
|
||||
(
|
||||
Architecture::Aarch64,
|
||||
Some(SubArchitecture::Arm64E),
|
||||
Endianness::Little,
|
||||
),
|
||||
(Architecture::Aarch64_Ilp32, None, Endianness::Little),
|
||||
/* TODO:
|
||||
(Architecture::Arm, None, Endianness::Little),
|
||||
*/
|
||||
(Architecture::I386, None, Endianness::Little),
|
||||
(Architecture::X86_64, None, Endianness::Little),
|
||||
/* TODO:
|
||||
(Architecture::PowerPc, None, Endianness::Big),
|
||||
(Architecture::PowerPc64, None, Endianness::Big),
|
||||
*/
|
||||
]
|
||||
.iter()
|
||||
.copied()
|
||||
{
|
||||
let mut object = write::Object::new(BinaryFormat::MachO, arch, endian);
|
||||
object.set_sub_architecture(subarch);
|
||||
|
||||
let section = object.section_id(write::StandardSection::Data);
|
||||
object.append_section_data(section, &[1; 30], 4);
|
||||
let symbol = object.section_symbol(section);
|
||||
|
||||
object
|
||||
.add_relocation(
|
||||
section,
|
||||
write::Relocation {
|
||||
offset: 8,
|
||||
size: 32,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
if arch.address_size().unwrap().bytes() >= 8 {
|
||||
object
|
||||
.add_relocation(
|
||||
section,
|
||||
write::Relocation {
|
||||
offset: 16,
|
||||
size: 64,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
println!("{:?}", object.architecture());
|
||||
assert_eq!(object.format(), BinaryFormat::MachO);
|
||||
assert_eq!(object.architecture(), arch);
|
||||
assert_eq!(object.sub_architecture(), subarch);
|
||||
assert_eq!(object.endianness(), endian);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let data = sections.next().unwrap();
|
||||
println!("{:?}", data);
|
||||
assert_eq!(data.segment_name(), Ok(Some("__DATA")));
|
||||
assert_eq!(data.name(), Ok("__data"));
|
||||
assert_eq!(data.kind(), SectionKind::Data);
|
||||
|
||||
let mut relocations = data.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 8);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 32);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
if arch.address_size().unwrap().bytes() >= 8 {
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 16);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "xcoff")]
|
||||
#[test]
|
||||
fn xcoff_powerpc() {
|
||||
for arch in [Architecture::PowerPc, Architecture::PowerPc64] {
|
||||
let mut object = write::Object::new(BinaryFormat::Xcoff, arch, Endianness::Big);
|
||||
|
||||
object.add_file_symbol(b"file.c".to_vec());
|
||||
|
||||
let text = object.section_id(write::StandardSection::Text);
|
||||
object.append_section_data(text, &[1; 30], 4);
|
||||
|
||||
let func1_offset = object.append_section_data(text, &[1; 30], 4);
|
||||
assert_eq!(func1_offset, 32);
|
||||
let func1_symbol = object.add_symbol(write::Symbol {
|
||||
name: b"func1".to_vec(),
|
||||
value: func1_offset,
|
||||
size: 32,
|
||||
kind: SymbolKind::Text,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Section(text),
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
|
||||
object
|
||||
.add_relocation(
|
||||
text,
|
||||
write::Relocation {
|
||||
offset: 8,
|
||||
size: 64,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol: func1_symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Xcoff);
|
||||
assert_eq!(object.architecture(), arch);
|
||||
assert_eq!(object.endianness(), Endianness::Big);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let text = sections.next().unwrap();
|
||||
println!("{:?}", text);
|
||||
let text_index = text.index().0;
|
||||
assert_eq!(text.name(), Ok(".text"));
|
||||
assert_eq!(text.kind(), SectionKind::Text);
|
||||
assert_eq!(text.address(), 0);
|
||||
assert_eq!(text.size(), 62);
|
||||
assert_eq!(&text.data().unwrap()[..30], &[1; 30]);
|
||||
assert_eq!(&text.data().unwrap()[32..62], &[1; 30]);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let mut symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("file.c"));
|
||||
assert_eq!(symbol.address(), 0);
|
||||
assert_eq!(symbol.kind(), SymbolKind::File);
|
||||
assert_eq!(symbol.section_index(), None);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Compilation);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let func1_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok("func1"));
|
||||
assert_eq!(symbol.address(), func1_offset);
|
||||
assert_eq!(symbol.kind(), SymbolKind::Text);
|
||||
assert_eq!(symbol.section_index(), Some(SectionIndex(text_index)));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let mut relocations = text.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 8);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(func1_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
}
|
||||
}
|
||||
90
vendor/object/tests/round_trip/section_flags.rs
vendored
Normal file
90
vendor/object/tests/round_trip/section_flags.rs
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
#![cfg(all(feature = "read", feature = "write"))]
|
||||
|
||||
use object::read::{Object, ObjectSection};
|
||||
use object::{read, write};
|
||||
use object::{Architecture, BinaryFormat, Endianness, SectionFlags, SectionKind};
|
||||
|
||||
#[test]
|
||||
fn coff_x86_64_section_flags() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let section = object.add_section(Vec::new(), b".text".to_vec(), SectionKind::Text);
|
||||
object.section_mut(section).flags = SectionFlags::Coff {
|
||||
characteristics: object::pe::IMAGE_SCN_MEM_WRITE,
|
||||
};
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
let section = sections.next().unwrap();
|
||||
assert_eq!(section.name(), Ok(".text"));
|
||||
assert_eq!(
|
||||
section.flags(),
|
||||
SectionFlags::Coff {
|
||||
characteristics: object::pe::IMAGE_SCN_MEM_WRITE | object::pe::IMAGE_SCN_ALIGN_1BYTES,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_x86_64_section_flags() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let section = object.add_section(Vec::new(), b".text".to_vec(), SectionKind::Text);
|
||||
object.section_mut(section).flags = SectionFlags::Elf {
|
||||
sh_flags: object::elf::SHF_WRITE.into(),
|
||||
};
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
sections.next().unwrap();
|
||||
let section = sections.next().unwrap();
|
||||
assert_eq!(section.name(), Ok(".text"));
|
||||
assert_eq!(
|
||||
section.flags(),
|
||||
SectionFlags::Elf {
|
||||
sh_flags: object::elf::SHF_WRITE.into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macho_x86_64_section_flags() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
let section = object.add_section(Vec::new(), b".text".to_vec(), SectionKind::Text);
|
||||
object.section_mut(section).flags = SectionFlags::MachO {
|
||||
flags: object::macho::S_ATTR_SELF_MODIFYING_CODE,
|
||||
};
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::MachO);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
let section = sections.next().unwrap();
|
||||
assert_eq!(section.name(), Ok(".text"));
|
||||
assert_eq!(
|
||||
section.flags(),
|
||||
SectionFlags::MachO {
|
||||
flags: object::macho::S_ATTR_SELF_MODIFYING_CODE,
|
||||
}
|
||||
);
|
||||
}
|
||||
316
vendor/object/tests/round_trip/tls.rs
vendored
Normal file
316
vendor/object/tests/round_trip/tls.rs
vendored
Normal file
@@ -0,0 +1,316 @@
|
||||
#![cfg(all(feature = "read", feature = "write"))]
|
||||
|
||||
use object::read::{Object, ObjectSection, ObjectSymbol};
|
||||
use object::{read, write};
|
||||
use object::{
|
||||
Architecture, BinaryFormat, Endianness, RelocationEncoding, RelocationKind, SectionKind,
|
||||
SymbolFlags, SymbolKind, SymbolScope,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn coff_x86_64_tls() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let section = object.section_id(write::StandardSection::Tls);
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"tls1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Tls,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_data(symbol, section, &[1; 30], 4);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"tls.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Coff);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
let tls_index = section.index();
|
||||
assert_eq!(section.name(), Ok(".tls$"));
|
||||
assert_eq!(section.kind(), SectionKind::Data);
|
||||
assert_eq!(section.size(), 30);
|
||||
assert_eq!(§ion.data().unwrap()[..], &[1; 30]);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("tls1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Data);
|
||||
assert_eq!(symbol.section_index(), Some(tls_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn elf_x86_64_tls() {
|
||||
let mut object =
|
||||
write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);
|
||||
|
||||
let section = object.section_id(write::StandardSection::Tls);
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"tls1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Tls,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_data(symbol, section, &[1; 30], 4);
|
||||
|
||||
let section = object.section_id(write::StandardSection::UninitializedTls);
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"tls2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Tls,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 31, 4);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"tls.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::Elf);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
assert_eq!(section.name(), Ok(""));
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
let tdata_index = section.index();
|
||||
assert_eq!(section.name(), Ok(".tdata"));
|
||||
assert_eq!(section.kind(), SectionKind::Tls);
|
||||
assert_eq!(section.size(), 30);
|
||||
assert_eq!(§ion.data().unwrap()[..], &[1; 30]);
|
||||
|
||||
let section = sections.next().unwrap();
|
||||
println!("{:?}", section);
|
||||
let tbss_index = section.index();
|
||||
assert_eq!(section.name(), Ok(".tbss"));
|
||||
assert_eq!(section.kind(), SectionKind::UninitializedTls);
|
||||
assert_eq!(section.size(), 31);
|
||||
assert_eq!(§ion.data().unwrap()[..], &[]);
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok(""));
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("tls1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Tls);
|
||||
assert_eq!(symbol.section_index(), Some(tdata_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.size(), 30);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("tls2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Tls);
|
||||
assert_eq!(symbol.section_index(), Some(tbss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
assert_eq!(symbol.size(), 31);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macho_x86_64_tls() {
|
||||
let mut object = write::Object::new(
|
||||
BinaryFormat::MachO,
|
||||
Architecture::X86_64,
|
||||
Endianness::Little,
|
||||
);
|
||||
|
||||
let section = object.section_id(write::StandardSection::Tls);
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"tls1".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Tls,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_data(symbol, section, &[1; 30], 4);
|
||||
|
||||
let section = object.section_id(write::StandardSection::UninitializedTls);
|
||||
let symbol = object.add_symbol(write::Symbol {
|
||||
name: b"tls2".to_vec(),
|
||||
value: 0,
|
||||
size: 0,
|
||||
kind: SymbolKind::Tls,
|
||||
scope: SymbolScope::Linkage,
|
||||
weak: false,
|
||||
section: write::SymbolSection::Undefined,
|
||||
flags: SymbolFlags::None,
|
||||
});
|
||||
object.add_symbol_bss(symbol, section, 31, 4);
|
||||
|
||||
let bytes = object.write().unwrap();
|
||||
|
||||
//std::fs::write(&"tls.o", &bytes).unwrap();
|
||||
|
||||
let object = read::File::parse(&*bytes).unwrap();
|
||||
assert_eq!(object.format(), BinaryFormat::MachO);
|
||||
assert_eq!(object.architecture(), Architecture::X86_64);
|
||||
|
||||
let mut sections = object.sections();
|
||||
|
||||
let thread_data = sections.next().unwrap();
|
||||
println!("{:?}", thread_data);
|
||||
let thread_data_index = thread_data.index();
|
||||
assert_eq!(thread_data.name(), Ok("__thread_data"));
|
||||
assert_eq!(thread_data.segment_name(), Ok(Some("__DATA")));
|
||||
assert_eq!(thread_data.kind(), SectionKind::Tls);
|
||||
assert_eq!(thread_data.size(), 30);
|
||||
assert_eq!(&thread_data.data().unwrap()[..], &[1; 30]);
|
||||
|
||||
let thread_vars = sections.next().unwrap();
|
||||
println!("{:?}", thread_vars);
|
||||
let thread_vars_index = thread_vars.index();
|
||||
assert_eq!(thread_vars.name(), Ok("__thread_vars"));
|
||||
assert_eq!(thread_vars.segment_name(), Ok(Some("__DATA")));
|
||||
assert_eq!(thread_vars.kind(), SectionKind::TlsVariables);
|
||||
assert_eq!(thread_vars.size(), 2 * 3 * 8);
|
||||
assert_eq!(&thread_vars.data().unwrap()[..], &[0; 48][..]);
|
||||
|
||||
let thread_bss = sections.next().unwrap();
|
||||
println!("{:?}", thread_bss);
|
||||
let thread_bss_index = thread_bss.index();
|
||||
assert_eq!(thread_bss.name(), Ok("__thread_bss"));
|
||||
assert_eq!(thread_bss.segment_name(), Ok(Some("__DATA")));
|
||||
assert_eq!(thread_bss.kind(), SectionKind::UninitializedTls);
|
||||
assert_eq!(thread_bss.size(), 31);
|
||||
assert_eq!(thread_bss.data(), Ok(&[][..]));
|
||||
|
||||
let mut symbols = object.symbols();
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let tls1_init_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok("_tls1$tlv$init"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Tls);
|
||||
assert_eq!(symbol.section_index(), Some(thread_data_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Compilation);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let tls2_init_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok("_tls2$tlv$init"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Tls);
|
||||
assert_eq!(symbol.section_index(), Some(thread_bss_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Compilation);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("_tls1"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Tls);
|
||||
assert_eq!(symbol.section_index(), Some(thread_vars_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
assert_eq!(symbol.name(), Ok("_tls2"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Tls);
|
||||
assert_eq!(symbol.section_index(), Some(thread_vars_index));
|
||||
assert_eq!(symbol.scope(), SymbolScope::Linkage);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), false);
|
||||
|
||||
let symbol = symbols.next().unwrap();
|
||||
println!("{:?}", symbol);
|
||||
let tlv_bootstrap_symbol = symbol.index();
|
||||
assert_eq!(symbol.name(), Ok("__tlv_bootstrap"));
|
||||
assert_eq!(symbol.kind(), SymbolKind::Unknown);
|
||||
assert_eq!(symbol.section_index(), None);
|
||||
assert_eq!(symbol.scope(), SymbolScope::Unknown);
|
||||
assert_eq!(symbol.is_weak(), false);
|
||||
assert_eq!(symbol.is_undefined(), true);
|
||||
|
||||
let mut relocations = thread_vars.relocations();
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 0);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(tlv_bootstrap_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 16);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(tls1_init_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 24);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(tlv_bootstrap_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
|
||||
let (offset, relocation) = relocations.next().unwrap();
|
||||
println!("{:?}", relocation);
|
||||
assert_eq!(offset, 40);
|
||||
assert_eq!(relocation.kind(), RelocationKind::Absolute);
|
||||
assert_eq!(relocation.encoding(), RelocationEncoding::Generic);
|
||||
assert_eq!(relocation.size(), 64);
|
||||
assert_eq!(
|
||||
relocation.target(),
|
||||
read::RelocationTarget::Symbol(tls2_init_symbol)
|
||||
);
|
||||
assert_eq!(relocation.addend(), 0);
|
||||
}
|
||||
Reference in New Issue
Block a user