mirror of
https://github.com/flipperdevices/flipperzero-firmware.git
synced 2025-12-12 20:59:50 +04:00
Add new map analyser (#3648)
* Add new map analyser * Fix typos Co-authored-by: hedger <hedger@users.noreply.github.com> Co-authored-by: あく <alleteam@gmail.com>
This commit is contained in:
29
.github/workflows/build.yml
vendored
29
.github/workflows/build.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
target: [f7, f18]
|
target: [f7, f18]
|
||||||
steps:
|
steps:
|
||||||
- name: 'Wipe workspace'
|
- name: 'Wipe workspace'
|
||||||
run: find ./ -mount -maxdepth 1 -exec rm -rf {} \;
|
run: find ./ -mount -maxdepth 1 -exec rm -rf {} \;
|
||||||
@@ -103,29 +103,12 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cp build/${DEFAULT_TARGET}-firmware-*/firmware.elf.map map_analyser_files/firmware.elf.map
|
cp build/${DEFAULT_TARGET}-firmware-*/firmware.elf.map map_analyser_files/firmware.elf.map
|
||||||
cp build/${DEFAULT_TARGET}-firmware-*/firmware.elf map_analyser_files/firmware.elf
|
cp build/${DEFAULT_TARGET}-firmware-*/firmware.elf map_analyser_files/firmware.elf
|
||||||
cp ${{ github.event_path }} map_analyser_files/event.json
|
|
||||||
source scripts/toolchain/fbtenv.sh
|
source scripts/toolchain/fbtenv.sh
|
||||||
get_size()
|
python3 scripts/map_analyse_upload.py \
|
||||||
{
|
"--elf_file=map_analyser_files/firmware.elf" \
|
||||||
SECTION="$1";
|
"--map_file=map_analyser_files/firmware.elf.map" \
|
||||||
arm-none-eabi-size \
|
"--analyser_url=${{ secrets.ANALYSER_URL }}" \
|
||||||
-A map_analyser_files/firmware.elf \
|
"--analyser_token=${{ secrets.ANALYSER_TOKEN }}";
|
||||||
| grep "^$SECTION" | awk '{print $2}'
|
|
||||||
}
|
|
||||||
export BSS_SIZE="$(get_size ".bss")"
|
|
||||||
export TEXT_SIZE="$(get_size ".text")"
|
|
||||||
export RODATA_SIZE="$(get_size ".rodata")"
|
|
||||||
export DATA_SIZE="$(get_size ".data")"
|
|
||||||
export FREE_FLASH_SIZE="$(get_size ".free_flash")"
|
|
||||||
python3 -m pip install mariadb==1.1.6 cxxfilt==0.3.0
|
|
||||||
python3 scripts/map_parser.py map_analyser_files/firmware.elf.map map_analyser_files/firmware.elf.map.all
|
|
||||||
python3 scripts/map_mariadb_insert.py \
|
|
||||||
${{ secrets.AMAP_MARIADB_USER }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_PASSWORD }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_HOST }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_PORT }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_DATABASE }} \
|
|
||||||
map_analyser_files/firmware.elf.map.all
|
|
||||||
|
|
||||||
- name: 'Find previous comment'
|
- name: 'Find previous comment'
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork && matrix.target == env.DEFAULT_TARGET && github.event.pull_request }}
|
if: ${{ !github.event.pull_request.head.repo.fork && matrix.target == env.DEFAULT_TARGET && github.event.pull_request }}
|
||||||
|
|||||||
86
scripts/map_analyse_upload.py
Executable file
86
scripts/map_analyse_upload.py
Executable file
@@ -0,0 +1,86 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# usage:
|
||||||
|
# COMMIT_HASH, COMMIT_MSG, BRANCH_NAME,
|
||||||
|
# PULL_ID(optional), PULL_NAME(optional) must be set as envs
|
||||||
|
# maybe from sctipts/get_env.py
|
||||||
|
# other args must be set via command line args
|
||||||
|
|
||||||
|
|
||||||
|
class AnalyseRequest:
|
||||||
|
def __init__(self):
|
||||||
|
self.commit_hash = os.environ["COMMIT_HASH"]
|
||||||
|
self.commit_msg = os.environ["COMMIT_MSG"]
|
||||||
|
self.branch_name = os.environ["BRANCH_NAME"]
|
||||||
|
self.pull_id = os.getenv("PULL_ID", default=None)
|
||||||
|
self.pull_name = os.getenv("PULL_NAME", default=None)
|
||||||
|
|
||||||
|
def get_payload(self):
|
||||||
|
return vars(self)
|
||||||
|
|
||||||
|
|
||||||
|
class AnalyseUploader:
|
||||||
|
def __init__(self):
|
||||||
|
self.args = self.parse_args()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_sections_size(elf_file) -> dict:
|
||||||
|
ret = dict()
|
||||||
|
all_sizes = subprocess.check_output(
|
||||||
|
["arm-none-eabi-size", "-A", elf_file], shell=False
|
||||||
|
)
|
||||||
|
all_sizes = all_sizes.splitlines()
|
||||||
|
|
||||||
|
sections_to_keep = (".text", ".rodata", ".data", ".bss", ".free_flash")
|
||||||
|
for line in all_sizes:
|
||||||
|
line = line.decode("utf-8")
|
||||||
|
parts = line.split()
|
||||||
|
if len(parts) != 3:
|
||||||
|
continue
|
||||||
|
section, size, _ = parts
|
||||||
|
if section not in sections_to_keep:
|
||||||
|
continue
|
||||||
|
section_size_payload_name = (
|
||||||
|
section[1:] if section.startswith(".") else section
|
||||||
|
)
|
||||||
|
section_size_payload_name += "_size"
|
||||||
|
ret[section_size_payload_name] = size
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--elf_file", help="Firmware ELF file", required=True)
|
||||||
|
parser.add_argument("--map_file", help="Firmware MAP file", required=True)
|
||||||
|
parser.add_argument(
|
||||||
|
"--analyser_token", help="Analyser auth token", required=True
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--analyser_url", help="Analyser analyse url", required=True
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
return args
|
||||||
|
|
||||||
|
def upload_analyse_request(self):
|
||||||
|
payload = AnalyseRequest().get_payload() | self.get_sections_size(
|
||||||
|
self.args.elf_file
|
||||||
|
)
|
||||||
|
headers = {"Authorization": f"Bearer {self.args.analyser_token}"}
|
||||||
|
file = {"map_file": open(self.args.map_file, "rb")}
|
||||||
|
response = requests.post(
|
||||||
|
self.args.analyser_url, data=payload, files=file, headers=headers
|
||||||
|
)
|
||||||
|
if not response.ok:
|
||||||
|
raise Exception(
|
||||||
|
f"Failed to upload map file, code: {response.status_code}, reason: {response.text}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
analyzer = AnalyseUploader()
|
||||||
|
analyzer.upload_analyse_request()
|
||||||
@@ -1,139 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Requiremets:
|
|
||||||
# mariadb==1.1.6
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
import argparse
|
|
||||||
import mariadb
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def parseArgs():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("db_user", help="MariaDB user")
|
|
||||||
parser.add_argument("db_pass", help="MariaDB password")
|
|
||||||
parser.add_argument("db_host", help="MariaDB hostname")
|
|
||||||
parser.add_argument("db_port", type=int, help="MariaDB port")
|
|
||||||
parser.add_argument("db_name", help="MariaDB database")
|
|
||||||
parser.add_argument("report_file", help="Report file(.map.all)")
|
|
||||||
args = parser.parse_args()
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def mariadbConnect(args):
|
|
||||||
try:
|
|
||||||
conn = mariadb.connect(
|
|
||||||
user=args.db_user,
|
|
||||||
password=args.db_pass,
|
|
||||||
host=args.db_host,
|
|
||||||
port=args.db_port,
|
|
||||||
database=args.db_name,
|
|
||||||
)
|
|
||||||
except mariadb.Error as e:
|
|
||||||
print(f"Error connecting to MariaDB: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
return conn
|
|
||||||
|
|
||||||
|
|
||||||
def parseEnv():
|
|
||||||
outArr = []
|
|
||||||
outArr.append(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
||||||
outArr.append(os.getenv("COMMIT_HASH", default=None))
|
|
||||||
outArr.append(os.getenv("COMMIT_MSG", default=None))
|
|
||||||
outArr.append(os.getenv("BRANCH_NAME", default=None))
|
|
||||||
outArr.append(os.getenv("BSS_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("TEXT_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("RODATA_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("DATA_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("FREE_FLASH_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("PULL_ID", default=None))
|
|
||||||
outArr.append(os.getenv("PULL_NAME", default=None))
|
|
||||||
return outArr
|
|
||||||
|
|
||||||
|
|
||||||
def createTables(cur, conn):
|
|
||||||
headerTable = "CREATE TABLE IF NOT EXISTS `header` ( \
|
|
||||||
`id` int(10) unsigned NOT NULL AUTO_INCREMENT, \
|
|
||||||
`datetime` datetime NOT NULL, \
|
|
||||||
`commit` varchar(40) NOT NULL, \
|
|
||||||
`commit_msg` text NOT NULL, \
|
|
||||||
`branch_name` text NOT NULL, \
|
|
||||||
`bss_size` int(10) unsigned NOT NULL, \
|
|
||||||
`text_size` int(10) unsigned NOT NULL, \
|
|
||||||
`rodata_size` int(10) unsigned NOT NULL, \
|
|
||||||
`data_size` int(10) unsigned NOT NULL, \
|
|
||||||
`free_flash_size` int(10) unsigned NOT NULL, \
|
|
||||||
`pullrequest_id` int(10) unsigned DEFAULT NULL, \
|
|
||||||
`pullrequest_name` text DEFAULT NULL, \
|
|
||||||
PRIMARY KEY (`id`), \
|
|
||||||
KEY `header_id_index` (`id`) )"
|
|
||||||
dataTable = "CREATE TABLE IF NOT EXISTS `data` ( \
|
|
||||||
`header_id` int(10) unsigned NOT NULL, \
|
|
||||||
`id` int(10) unsigned NOT NULL AUTO_INCREMENT, \
|
|
||||||
`section` text NOT NULL, \
|
|
||||||
`address` text NOT NULL, \
|
|
||||||
`size` int(10) unsigned NOT NULL, \
|
|
||||||
`name` text NOT NULL, \
|
|
||||||
`lib` text NOT NULL, \
|
|
||||||
`obj_name` text NOT NULL, \
|
|
||||||
PRIMARY KEY (`id`), \
|
|
||||||
KEY `data_id_index` (`id`), \
|
|
||||||
KEY `data_header_id_index` (`header_id`), \
|
|
||||||
CONSTRAINT `data_header_id_foreign` FOREIGN KEY (`header_id`) REFERENCES `header` (`id`) )"
|
|
||||||
cur.execute(headerTable)
|
|
||||||
cur.execute(dataTable)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def insertHeader(data, cur, conn):
|
|
||||||
query = "INSERT INTO `header` ( \
|
|
||||||
datetime, commit, commit_msg, branch_name, bss_size, text_size, \
|
|
||||||
rodata_size, data_size, free_flash_size, pullrequest_id, pullrequest_name) \
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
|
||||||
cur.execute(query, data)
|
|
||||||
conn.commit()
|
|
||||||
return cur.lastrowid
|
|
||||||
|
|
||||||
|
|
||||||
def parseFile(fileObj, headerID):
|
|
||||||
arr = []
|
|
||||||
fileLines = fileObj.readlines()
|
|
||||||
for line in fileLines:
|
|
||||||
lineArr = []
|
|
||||||
tempLineArr = line.split("\t")
|
|
||||||
lineArr.append(headerID)
|
|
||||||
lineArr.append(tempLineArr[0]) # section
|
|
||||||
lineArr.append(int(tempLineArr[2], 16)) # address hex
|
|
||||||
lineArr.append(int(tempLineArr[3])) # size
|
|
||||||
lineArr.append(tempLineArr[4]) # name
|
|
||||||
lineArr.append(tempLineArr[5]) # lib
|
|
||||||
lineArr.append(tempLineArr[6]) # obj_name
|
|
||||||
arr.append(tuple(lineArr))
|
|
||||||
return arr
|
|
||||||
|
|
||||||
|
|
||||||
def insertData(data, cur, conn):
|
|
||||||
query = "INSERT INTO `data` ( \
|
|
||||||
header_id, section, address, size, \
|
|
||||||
name, lib, obj_name) \
|
|
||||||
VALUES (?, ?, ?, ?, ? ,?, ?)"
|
|
||||||
cur.executemany(query, data)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = parseArgs()
|
|
||||||
dbConn = mariadbConnect(args)
|
|
||||||
reportFile = open(args.report_file)
|
|
||||||
dbCurs = dbConn.cursor()
|
|
||||||
createTables(dbCurs, dbConn)
|
|
||||||
headerID = insertHeader(parseEnv(), dbCurs, dbConn)
|
|
||||||
insertData(parseFile(reportFile, headerID), dbCurs, dbConn)
|
|
||||||
reportFile.close()
|
|
||||||
dbCurs.close()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,274 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Requiremets:
|
|
||||||
# cxxfilt==0.3.0
|
|
||||||
|
|
||||||
# Most part of this code written by Lars-Dominik Braun <lars@6xq.net> https://github.com/PromyLOPh/linkermapviz
|
|
||||||
# and distributes under MIT licence
|
|
||||||
|
|
||||||
# Copyright (c) 2017 Lars-Dominik Braun <lars@6xq.net>
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to deal
|
|
||||||
# in the Software without restriction, including without limitation the rights
|
|
||||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
# copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
# THE SOFTWARE.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
from typing import TextIO
|
|
||||||
from cxxfilt import demangle
|
|
||||||
|
|
||||||
|
|
||||||
class Objectfile:
|
|
||||||
def __init__(self, section: str, offset: int, size: int, comment: str):
|
|
||||||
self.section = section.strip()
|
|
||||||
self.offset = offset
|
|
||||||
self.size = size
|
|
||||||
self.path = (None, None)
|
|
||||||
self.basepath = None
|
|
||||||
|
|
||||||
if comment:
|
|
||||||
self.path = re.match(r"^(.+?)(?:\(([^\)]+)\))?$", comment).groups()
|
|
||||||
self.basepath = os.path.basename(self.path[0])
|
|
||||||
|
|
||||||
self.children = []
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<Objectfile {self.section} {self.offset:x} {self.size:x} {self.path} {repr(self.children)}>"
|
|
||||||
|
|
||||||
|
|
||||||
def update_children_size(children: list[list], subsection_size: int) -> list:
|
|
||||||
# set subsection size to an only child
|
|
||||||
if len(children) == 1:
|
|
||||||
children[0][1] = subsection_size
|
|
||||||
return children
|
|
||||||
|
|
||||||
rest_size = subsection_size
|
|
||||||
|
|
||||||
for index in range(1, len(children)):
|
|
||||||
if rest_size > 0:
|
|
||||||
# current size = current address - previous child address
|
|
||||||
child_size = children[index][0] - children[index - 1][0]
|
|
||||||
rest_size -= child_size
|
|
||||||
children[index - 1][1] = child_size
|
|
||||||
|
|
||||||
# if there is rest size, set it to the last child element
|
|
||||||
if rest_size > 0:
|
|
||||||
children[-1][1] = rest_size
|
|
||||||
|
|
||||||
return children
|
|
||||||
|
|
||||||
|
|
||||||
def parse_sections(file_name: str) -> list:
|
|
||||||
"""
|
|
||||||
Quick&Dirty parsing for GNU ld’s linker map output, needs LANG=C, because
|
|
||||||
some messages are localized.
|
|
||||||
"""
|
|
||||||
|
|
||||||
sections = []
|
|
||||||
with open(file_name, "r") as file:
|
|
||||||
# skip until memory map is found
|
|
||||||
found = False
|
|
||||||
|
|
||||||
while True:
|
|
||||||
line = file.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
if line.strip() == "Memory Configuration":
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if not found:
|
|
||||||
raise Exception(f"Memory configuration is not found in the{input_file}")
|
|
||||||
|
|
||||||
# long section names result in a linebreak afterwards
|
|
||||||
sectionre = re.compile(
|
|
||||||
"(?P<section>.+?|.{14,}\n)[ ]+0x(?P<offset>[0-9a-f]+)[ ]+0x(?P<size>[0-9a-f]+)(?:[ ]+(?P<comment>.+))?\n+",
|
|
||||||
re.I,
|
|
||||||
)
|
|
||||||
subsectionre = re.compile(
|
|
||||||
"[ ]{16}0x(?P<offset>[0-9a-f]+)[ ]+(?P<function>.+)\n+", re.I
|
|
||||||
)
|
|
||||||
s = file.read()
|
|
||||||
pos = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
m = sectionre.match(s, pos)
|
|
||||||
if not m:
|
|
||||||
# skip that line
|
|
||||||
try:
|
|
||||||
nextpos = s.index("\n", pos) + 1
|
|
||||||
pos = nextpos
|
|
||||||
continue
|
|
||||||
except ValueError:
|
|
||||||
break
|
|
||||||
|
|
||||||
pos = m.end()
|
|
||||||
section = m.group("section")
|
|
||||||
v = m.group("offset")
|
|
||||||
offset = int(v, 16) if v is not None else None
|
|
||||||
v = m.group("size")
|
|
||||||
size = int(v, 16) if v is not None else None
|
|
||||||
comment = m.group("comment")
|
|
||||||
|
|
||||||
if section != "*default*" and size > 0:
|
|
||||||
of = Objectfile(section, offset, size, comment)
|
|
||||||
|
|
||||||
if section.startswith(" "):
|
|
||||||
children = []
|
|
||||||
sections[-1].children.append(of)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
m = subsectionre.match(s, pos)
|
|
||||||
if not m:
|
|
||||||
break
|
|
||||||
pos = m.end()
|
|
||||||
offset, function = m.groups()
|
|
||||||
offset = int(offset, 16)
|
|
||||||
if sections and sections[-1].children:
|
|
||||||
children.append([offset, 0, function])
|
|
||||||
|
|
||||||
if children:
|
|
||||||
children = update_children_size(
|
|
||||||
children=children, subsection_size=of.size
|
|
||||||
)
|
|
||||||
|
|
||||||
sections[-1].children[-1].children.extend(children)
|
|
||||||
|
|
||||||
else:
|
|
||||||
sections.append(of)
|
|
||||||
|
|
||||||
return sections
|
|
||||||
|
|
||||||
|
|
||||||
def get_subsection_name(section_name: str, subsection: Objectfile) -> str:
|
|
||||||
subsection_split_names = subsection.section.split(".")
|
|
||||||
if subsection.section.startswith("."):
|
|
||||||
subsection_split_names = subsection_split_names[1:]
|
|
||||||
|
|
||||||
return (
|
|
||||||
f".{subsection_split_names[1]}"
|
|
||||||
if len(subsection_split_names) > 2
|
|
||||||
else section_name
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def write_subsection(
|
|
||||||
section_name: str,
|
|
||||||
subsection_name: str,
|
|
||||||
address: str,
|
|
||||||
size: int,
|
|
||||||
demangled_name: str,
|
|
||||||
module_name: str,
|
|
||||||
file_name: str,
|
|
||||||
mangled_name: str,
|
|
||||||
write_file_object: TextIO,
|
|
||||||
) -> None:
|
|
||||||
write_file_object.write(
|
|
||||||
f"{section_name}\t"
|
|
||||||
f"{subsection_name}\t"
|
|
||||||
f"{address}\t"
|
|
||||||
f"{size}\t"
|
|
||||||
f"{demangled_name}\t"
|
|
||||||
f"{module_name}\t"
|
|
||||||
f"{file_name}\t"
|
|
||||||
f"{mangled_name}\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def save_subsection(
|
|
||||||
section_name: str, subsection: Objectfile, write_file_object: TextIO
|
|
||||||
) -> None:
|
|
||||||
subsection_name = get_subsection_name(section_name, subsection)
|
|
||||||
module_name = subsection.path[0]
|
|
||||||
file_name = subsection.path[1]
|
|
||||||
|
|
||||||
if not file_name:
|
|
||||||
file_name, module_name = module_name, ""
|
|
||||||
|
|
||||||
if not subsection.children:
|
|
||||||
address = f"{subsection.offset:x}"
|
|
||||||
size = subsection.size
|
|
||||||
mangled_name = (
|
|
||||||
""
|
|
||||||
if subsection.section == section_name
|
|
||||||
else subsection.section.split(".")[-1]
|
|
||||||
)
|
|
||||||
demangled_name = demangle(mangled_name) if mangled_name else mangled_name
|
|
||||||
|
|
||||||
write_subsection(
|
|
||||||
section_name=section_name,
|
|
||||||
subsection_name=subsection_name,
|
|
||||||
address=address,
|
|
||||||
size=size,
|
|
||||||
demangled_name=demangled_name,
|
|
||||||
module_name=module_name,
|
|
||||||
file_name=file_name,
|
|
||||||
mangled_name=mangled_name,
|
|
||||||
write_file_object=write_file_object,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
for subsection_child in subsection.children:
|
|
||||||
address = f"{subsection_child[0]:x}"
|
|
||||||
size = subsection_child[1]
|
|
||||||
mangled_name = subsection_child[2]
|
|
||||||
demangled_name = demangle(mangled_name)
|
|
||||||
|
|
||||||
write_subsection(
|
|
||||||
section_name=section_name,
|
|
||||||
subsection_name=subsection_name,
|
|
||||||
address=address,
|
|
||||||
size=size,
|
|
||||||
demangled_name=demangled_name,
|
|
||||||
module_name=module_name,
|
|
||||||
file_name=file_name,
|
|
||||||
mangled_name=mangled_name,
|
|
||||||
write_file_object=write_file_object,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def save_section(section: Objectfile, write_file_object: TextIO) -> None:
|
|
||||||
section_name = section.section
|
|
||||||
for subsection in section.children:
|
|
||||||
save_subsection(
|
|
||||||
section_name=section_name,
|
|
||||||
subsection=subsection,
|
|
||||||
write_file_object=write_file_object,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def save_parsed_data(parsed_data: list[Objectfile], output_file_name: str) -> None:
|
|
||||||
with open(output_file_name, "w") as write_file_object:
|
|
||||||
for section in parsed_data:
|
|
||||||
if section.children:
|
|
||||||
save_section(section=section, write_file_object=write_file_object)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) < 3:
|
|
||||||
raise Exception(f"Usage: {sys.argv[0]} <input file> <output file>")
|
|
||||||
|
|
||||||
input_file = sys.argv[1]
|
|
||||||
output_file = sys.argv[2]
|
|
||||||
|
|
||||||
parsed_sections = parse_sections(input_file)
|
|
||||||
|
|
||||||
if parsed_sections is None:
|
|
||||||
raise Exception(f"Memory configuration is not {input_file}")
|
|
||||||
|
|
||||||
save_parsed_data(parsed_sections, output_file)
|
|
||||||
Reference in New Issue
Block a user