From 3d3db9f5b09aa3c23d66cb4b839edbd09c3f1439 Mon Sep 17 00:00:00 2001 From: Max Andreev Date: Wed, 15 May 2024 14:25:02 +0300 Subject: [PATCH] Add new map analyser (#3648) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add new map analyser * Fix typos Co-authored-by: hedger Co-authored-by: あく --- .github/workflows/build.yml | 29 +--- scripts/map_analyse_upload.py | 86 +++++++++++ scripts/map_mariadb_insert.py | 139 ----------------- scripts/map_parser.py | 274 ---------------------------------- 4 files changed, 92 insertions(+), 436 deletions(-) create mode 100755 scripts/map_analyse_upload.py delete mode 100755 scripts/map_mariadb_insert.py delete mode 100755 scripts/map_parser.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 252310af6d..2bc2178aea 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -19,7 +19,7 @@ jobs: strategy: fail-fast: false matrix: - target: [f7, f18] + target: [f7, f18] steps: - name: 'Wipe workspace' run: find ./ -mount -maxdepth 1 -exec rm -rf {} \; @@ -103,29 +103,12 @@ jobs: run: | cp build/${DEFAULT_TARGET}-firmware-*/firmware.elf.map map_analyser_files/firmware.elf.map cp build/${DEFAULT_TARGET}-firmware-*/firmware.elf map_analyser_files/firmware.elf - cp ${{ github.event_path }} map_analyser_files/event.json source scripts/toolchain/fbtenv.sh - get_size() - { - SECTION="$1"; - arm-none-eabi-size \ - -A map_analyser_files/firmware.elf \ - | grep "^$SECTION" | awk '{print $2}' - } - export BSS_SIZE="$(get_size ".bss")" - export TEXT_SIZE="$(get_size ".text")" - export RODATA_SIZE="$(get_size ".rodata")" - export DATA_SIZE="$(get_size ".data")" - export FREE_FLASH_SIZE="$(get_size ".free_flash")" - python3 -m pip install mariadb==1.1.6 cxxfilt==0.3.0 - python3 scripts/map_parser.py map_analyser_files/firmware.elf.map map_analyser_files/firmware.elf.map.all - python3 scripts/map_mariadb_insert.py \ - ${{ secrets.AMAP_MARIADB_USER }} \ - ${{ secrets.AMAP_MARIADB_PASSWORD }} \ - ${{ secrets.AMAP_MARIADB_HOST }} \ - ${{ secrets.AMAP_MARIADB_PORT }} \ - ${{ secrets.AMAP_MARIADB_DATABASE }} \ - map_analyser_files/firmware.elf.map.all + python3 scripts/map_analyse_upload.py \ + "--elf_file=map_analyser_files/firmware.elf" \ + "--map_file=map_analyser_files/firmware.elf.map" \ + "--analyser_url=${{ secrets.ANALYSER_URL }}" \ + "--analyser_token=${{ secrets.ANALYSER_TOKEN }}"; - name: 'Find previous comment' if: ${{ !github.event.pull_request.head.repo.fork && matrix.target == env.DEFAULT_TARGET && github.event.pull_request }} diff --git a/scripts/map_analyse_upload.py b/scripts/map_analyse_upload.py new file mode 100755 index 0000000000..38d9618796 --- /dev/null +++ b/scripts/map_analyse_upload.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +import os +import requests +import argparse +import subprocess + +# usage: +# COMMIT_HASH, COMMIT_MSG, BRANCH_NAME, +# PULL_ID(optional), PULL_NAME(optional) must be set as envs +# maybe from sctipts/get_env.py +# other args must be set via command line args + + +class AnalyseRequest: + def __init__(self): + self.commit_hash = os.environ["COMMIT_HASH"] + self.commit_msg = os.environ["COMMIT_MSG"] + self.branch_name = os.environ["BRANCH_NAME"] + self.pull_id = os.getenv("PULL_ID", default=None) + self.pull_name = os.getenv("PULL_NAME", default=None) + + def get_payload(self): + return vars(self) + + +class AnalyseUploader: + def __init__(self): + self.args = self.parse_args() + + @staticmethod + def get_sections_size(elf_file) -> dict: + ret = dict() + all_sizes = subprocess.check_output( + ["arm-none-eabi-size", "-A", elf_file], shell=False + ) + all_sizes = all_sizes.splitlines() + + sections_to_keep = (".text", ".rodata", ".data", ".bss", ".free_flash") + for line in all_sizes: + line = line.decode("utf-8") + parts = line.split() + if len(parts) != 3: + continue + section, size, _ = parts + if section not in sections_to_keep: + continue + section_size_payload_name = ( + section[1:] if section.startswith(".") else section + ) + section_size_payload_name += "_size" + ret[section_size_payload_name] = size + return ret + + @staticmethod + def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--elf_file", help="Firmware ELF file", required=True) + parser.add_argument("--map_file", help="Firmware MAP file", required=True) + parser.add_argument( + "--analyser_token", help="Analyser auth token", required=True + ) + parser.add_argument( + "--analyser_url", help="Analyser analyse url", required=True + ) + args = parser.parse_args() + return args + + def upload_analyse_request(self): + payload = AnalyseRequest().get_payload() | self.get_sections_size( + self.args.elf_file + ) + headers = {"Authorization": f"Bearer {self.args.analyser_token}"} + file = {"map_file": open(self.args.map_file, "rb")} + response = requests.post( + self.args.analyser_url, data=payload, files=file, headers=headers + ) + if not response.ok: + raise Exception( + f"Failed to upload map file, code: {response.status_code}, reason: {response.text}" + ) + + +if __name__ == "__main__": + analyzer = AnalyseUploader() + analyzer.upload_analyse_request() diff --git a/scripts/map_mariadb_insert.py b/scripts/map_mariadb_insert.py deleted file mode 100755 index a4c9ed5c78..0000000000 --- a/scripts/map_mariadb_insert.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python3 - -# Requiremets: -# mariadb==1.1.6 - -from datetime import datetime -import argparse -import mariadb -import sys -import os - - -def parseArgs(): - parser = argparse.ArgumentParser() - parser.add_argument("db_user", help="MariaDB user") - parser.add_argument("db_pass", help="MariaDB password") - parser.add_argument("db_host", help="MariaDB hostname") - parser.add_argument("db_port", type=int, help="MariaDB port") - parser.add_argument("db_name", help="MariaDB database") - parser.add_argument("report_file", help="Report file(.map.all)") - args = parser.parse_args() - return args - - -def mariadbConnect(args): - try: - conn = mariadb.connect( - user=args.db_user, - password=args.db_pass, - host=args.db_host, - port=args.db_port, - database=args.db_name, - ) - except mariadb.Error as e: - print(f"Error connecting to MariaDB: {e}") - sys.exit(1) - return conn - - -def parseEnv(): - outArr = [] - outArr.append(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) - outArr.append(os.getenv("COMMIT_HASH", default=None)) - outArr.append(os.getenv("COMMIT_MSG", default=None)) - outArr.append(os.getenv("BRANCH_NAME", default=None)) - outArr.append(os.getenv("BSS_SIZE", default=None)) - outArr.append(os.getenv("TEXT_SIZE", default=None)) - outArr.append(os.getenv("RODATA_SIZE", default=None)) - outArr.append(os.getenv("DATA_SIZE", default=None)) - outArr.append(os.getenv("FREE_FLASH_SIZE", default=None)) - outArr.append(os.getenv("PULL_ID", default=None)) - outArr.append(os.getenv("PULL_NAME", default=None)) - return outArr - - -def createTables(cur, conn): - headerTable = "CREATE TABLE IF NOT EXISTS `header` ( \ - `id` int(10) unsigned NOT NULL AUTO_INCREMENT, \ - `datetime` datetime NOT NULL, \ - `commit` varchar(40) NOT NULL, \ - `commit_msg` text NOT NULL, \ - `branch_name` text NOT NULL, \ - `bss_size` int(10) unsigned NOT NULL, \ - `text_size` int(10) unsigned NOT NULL, \ - `rodata_size` int(10) unsigned NOT NULL, \ - `data_size` int(10) unsigned NOT NULL, \ - `free_flash_size` int(10) unsigned NOT NULL, \ - `pullrequest_id` int(10) unsigned DEFAULT NULL, \ - `pullrequest_name` text DEFAULT NULL, \ - PRIMARY KEY (`id`), \ - KEY `header_id_index` (`id`) )" - dataTable = "CREATE TABLE IF NOT EXISTS `data` ( \ - `header_id` int(10) unsigned NOT NULL, \ - `id` int(10) unsigned NOT NULL AUTO_INCREMENT, \ - `section` text NOT NULL, \ - `address` text NOT NULL, \ - `size` int(10) unsigned NOT NULL, \ - `name` text NOT NULL, \ - `lib` text NOT NULL, \ - `obj_name` text NOT NULL, \ - PRIMARY KEY (`id`), \ - KEY `data_id_index` (`id`), \ - KEY `data_header_id_index` (`header_id`), \ - CONSTRAINT `data_header_id_foreign` FOREIGN KEY (`header_id`) REFERENCES `header` (`id`) )" - cur.execute(headerTable) - cur.execute(dataTable) - conn.commit() - - -def insertHeader(data, cur, conn): - query = "INSERT INTO `header` ( \ - datetime, commit, commit_msg, branch_name, bss_size, text_size, \ - rodata_size, data_size, free_flash_size, pullrequest_id, pullrequest_name) \ - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" - cur.execute(query, data) - conn.commit() - return cur.lastrowid - - -def parseFile(fileObj, headerID): - arr = [] - fileLines = fileObj.readlines() - for line in fileLines: - lineArr = [] - tempLineArr = line.split("\t") - lineArr.append(headerID) - lineArr.append(tempLineArr[0]) # section - lineArr.append(int(tempLineArr[2], 16)) # address hex - lineArr.append(int(tempLineArr[3])) # size - lineArr.append(tempLineArr[4]) # name - lineArr.append(tempLineArr[5]) # lib - lineArr.append(tempLineArr[6]) # obj_name - arr.append(tuple(lineArr)) - return arr - - -def insertData(data, cur, conn): - query = "INSERT INTO `data` ( \ - header_id, section, address, size, \ - name, lib, obj_name) \ - VALUES (?, ?, ?, ?, ? ,?, ?)" - cur.executemany(query, data) - conn.commit() - - -def main(): - args = parseArgs() - dbConn = mariadbConnect(args) - reportFile = open(args.report_file) - dbCurs = dbConn.cursor() - createTables(dbCurs, dbConn) - headerID = insertHeader(parseEnv(), dbCurs, dbConn) - insertData(parseFile(reportFile, headerID), dbCurs, dbConn) - reportFile.close() - dbCurs.close() - - -if __name__ == "__main__": - main() diff --git a/scripts/map_parser.py b/scripts/map_parser.py deleted file mode 100755 index 1efc4fe82f..0000000000 --- a/scripts/map_parser.py +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/env python3 - -# Requiremets: -# cxxfilt==0.3.0 - -# Most part of this code written by Lars-Dominik Braun https://github.com/PromyLOPh/linkermapviz -# and distributes under MIT licence - -# Copyright (c) 2017 Lars-Dominik Braun -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import sys -import re -import os -from typing import TextIO -from cxxfilt import demangle - - -class Objectfile: - def __init__(self, section: str, offset: int, size: int, comment: str): - self.section = section.strip() - self.offset = offset - self.size = size - self.path = (None, None) - self.basepath = None - - if comment: - self.path = re.match(r"^(.+?)(?:\(([^\)]+)\))?$", comment).groups() - self.basepath = os.path.basename(self.path[0]) - - self.children = [] - - def __repr__(self) -> str: - return f"" - - -def update_children_size(children: list[list], subsection_size: int) -> list: - # set subsection size to an only child - if len(children) == 1: - children[0][1] = subsection_size - return children - - rest_size = subsection_size - - for index in range(1, len(children)): - if rest_size > 0: - # current size = current address - previous child address - child_size = children[index][0] - children[index - 1][0] - rest_size -= child_size - children[index - 1][1] = child_size - - # if there is rest size, set it to the last child element - if rest_size > 0: - children[-1][1] = rest_size - - return children - - -def parse_sections(file_name: str) -> list: - """ - Quick&Dirty parsing for GNU ld’s linker map output, needs LANG=C, because - some messages are localized. - """ - - sections = [] - with open(file_name, "r") as file: - # skip until memory map is found - found = False - - while True: - line = file.readline() - if not line: - break - if line.strip() == "Memory Configuration": - found = True - break - - if not found: - raise Exception(f"Memory configuration is not found in the{input_file}") - - # long section names result in a linebreak afterwards - sectionre = re.compile( - "(?P
.+?|.{14,}\n)[ ]+0x(?P[0-9a-f]+)[ ]+0x(?P[0-9a-f]+)(?:[ ]+(?P.+))?\n+", - re.I, - ) - subsectionre = re.compile( - "[ ]{16}0x(?P[0-9a-f]+)[ ]+(?P.+)\n+", re.I - ) - s = file.read() - pos = 0 - - while True: - m = sectionre.match(s, pos) - if not m: - # skip that line - try: - nextpos = s.index("\n", pos) + 1 - pos = nextpos - continue - except ValueError: - break - - pos = m.end() - section = m.group("section") - v = m.group("offset") - offset = int(v, 16) if v is not None else None - v = m.group("size") - size = int(v, 16) if v is not None else None - comment = m.group("comment") - - if section != "*default*" and size > 0: - of = Objectfile(section, offset, size, comment) - - if section.startswith(" "): - children = [] - sections[-1].children.append(of) - - while True: - m = subsectionre.match(s, pos) - if not m: - break - pos = m.end() - offset, function = m.groups() - offset = int(offset, 16) - if sections and sections[-1].children: - children.append([offset, 0, function]) - - if children: - children = update_children_size( - children=children, subsection_size=of.size - ) - - sections[-1].children[-1].children.extend(children) - - else: - sections.append(of) - - return sections - - -def get_subsection_name(section_name: str, subsection: Objectfile) -> str: - subsection_split_names = subsection.section.split(".") - if subsection.section.startswith("."): - subsection_split_names = subsection_split_names[1:] - - return ( - f".{subsection_split_names[1]}" - if len(subsection_split_names) > 2 - else section_name - ) - - -def write_subsection( - section_name: str, - subsection_name: str, - address: str, - size: int, - demangled_name: str, - module_name: str, - file_name: str, - mangled_name: str, - write_file_object: TextIO, -) -> None: - write_file_object.write( - f"{section_name}\t" - f"{subsection_name}\t" - f"{address}\t" - f"{size}\t" - f"{demangled_name}\t" - f"{module_name}\t" - f"{file_name}\t" - f"{mangled_name}\n" - ) - - -def save_subsection( - section_name: str, subsection: Objectfile, write_file_object: TextIO -) -> None: - subsection_name = get_subsection_name(section_name, subsection) - module_name = subsection.path[0] - file_name = subsection.path[1] - - if not file_name: - file_name, module_name = module_name, "" - - if not subsection.children: - address = f"{subsection.offset:x}" - size = subsection.size - mangled_name = ( - "" - if subsection.section == section_name - else subsection.section.split(".")[-1] - ) - demangled_name = demangle(mangled_name) if mangled_name else mangled_name - - write_subsection( - section_name=section_name, - subsection_name=subsection_name, - address=address, - size=size, - demangled_name=demangled_name, - module_name=module_name, - file_name=file_name, - mangled_name=mangled_name, - write_file_object=write_file_object, - ) - return - - for subsection_child in subsection.children: - address = f"{subsection_child[0]:x}" - size = subsection_child[1] - mangled_name = subsection_child[2] - demangled_name = demangle(mangled_name) - - write_subsection( - section_name=section_name, - subsection_name=subsection_name, - address=address, - size=size, - demangled_name=demangled_name, - module_name=module_name, - file_name=file_name, - mangled_name=mangled_name, - write_file_object=write_file_object, - ) - - -def save_section(section: Objectfile, write_file_object: TextIO) -> None: - section_name = section.section - for subsection in section.children: - save_subsection( - section_name=section_name, - subsection=subsection, - write_file_object=write_file_object, - ) - - -def save_parsed_data(parsed_data: list[Objectfile], output_file_name: str) -> None: - with open(output_file_name, "w") as write_file_object: - for section in parsed_data: - if section.children: - save_section(section=section, write_file_object=write_file_object) - - -if __name__ == "__main__": - if len(sys.argv) < 3: - raise Exception(f"Usage: {sys.argv[0]} ") - - input_file = sys.argv[1] - output_file = sys.argv[2] - - parsed_sections = parse_sections(input_file) - - if parsed_sections is None: - raise Exception(f"Memory configuration is not {input_file}") - - save_parsed_data(parsed_sections, output_file)