1
mirror of https://github.com/DarkFlippers/unleashed-firmware.git synced 2025-12-12 04:34:43 +04:00

Merge branch 'ofw_dev' into nfcrefactoring

This commit is contained in:
MX
2023-11-01 21:07:33 +03:00
462 changed files with 2679 additions and 2077 deletions

View File

@@ -52,10 +52,10 @@ ob.py set
# Assets delivery
Run in the root folder of the repo:
Build the firmware and run in the root folder of the repo:
```bash
python scripts/storage.py -p <flipper_cli_port> send assets/resources /ext
python scripts/storage.py -p <flipper_cli_port> send build/latest/resources /ext
```

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env python3
import os
import shutil
from flipper.app import App
from flipper.assets.icon import file2image
@@ -220,6 +221,7 @@ class Main(App):
if not os.path.isdir(directory_path):
self.logger.error(f'"{directory_path}" is not a directory')
exit(255)
manifest_file = os.path.join(directory_path, "Manifest")
old_manifest = Manifest()
if os.path.exists(manifest_file):
@@ -234,13 +236,15 @@ class Main(App):
self.logger.info("Comparing new manifest with existing")
only_in_old, changed, only_in_new = Manifest.compare(old_manifest, new_manifest)
for record in only_in_old:
self.logger.info(f"Only in old: {record}")
self.logger.debug(f"Only in old: {record}")
for record in changed:
self.logger.info(f"Changed: {record}")
for record in only_in_new:
self.logger.info(f"Only in new: {record}")
self.logger.debug(f"Only in new: {record}")
if any((only_in_old, changed, only_in_new)):
self.logger.warning("Manifests are different, updating")
self.logger.info(
f"Manifest updated ({len(only_in_new)} new, {len(only_in_old)} removed, {len(changed)} changed)"
)
new_manifest.save(manifest_file)
else:
self.logger.info("Manifest is up-to-date!")

View File

@@ -64,6 +64,7 @@ class FlipperApplication:
order: int = 0
sdk_headers: List[str] = field(default_factory=list)
targets: List[str] = field(default_factory=lambda: ["all"])
resources: Optional[str] = None
# .fap-specific
sources: List[str] = field(default_factory=lambda: ["*.c*"])
@@ -272,11 +273,15 @@ class AppBuildset:
self._check_unsatisfied() # unneeded?
self._check_target_match()
self._group_plugins()
self.apps = sorted(
self._apps = sorted(
list(map(self.appmgr.get, self.appnames)),
key=lambda app: app.appid,
)
@property
def apps(self):
return list(self._apps)
def _is_missing_dep(self, dep_name: str):
return dep_name not in self.appnames
@@ -385,13 +390,13 @@ class AppBuildset:
def get_apps_cdefs(self):
cdefs = set()
for app in self.apps:
for app in self._apps:
cdefs.update(app.cdefines)
return sorted(list(cdefs))
def get_sdk_headers(self):
sdk_headers = []
for app in self.apps:
for app in self._apps:
sdk_headers.extend(
[
src._appdir.File(header)
@@ -405,14 +410,14 @@ class AppBuildset:
return sorted(
filter(
lambda app: app.apptype == apptype,
self.appmgr.known_apps.values() if all_known else self.apps,
self.appmgr.known_apps.values() if all_known else self._apps,
),
key=lambda app: app.order,
)
def get_builtin_apps(self):
return list(
filter(lambda app: app.apptype in self.BUILTIN_APP_TYPES, self.apps)
filter(lambda app: app.apptype in self.BUILTIN_APP_TYPES, self._apps)
)
def get_builtin_app_folders(self):

View File

@@ -5,9 +5,10 @@ from ansi.color import fg
from SCons.Action import Action
from SCons.Builder import Builder
from SCons.Errors import StopError
from SCons.Node.FS import File
def icons_emitter(target, source, env):
def _icons_emitter(target, source, env):
icons_src = env.GlobRecursive("*.png", env["ICON_SRC_DIR"])
icons_src += env.GlobRecursive("**/frame_rate", env["ICON_SRC_DIR"])
@@ -18,7 +19,7 @@ def icons_emitter(target, source, env):
return target, icons_src
def proto_emitter(target, source, env):
def _proto_emitter(target, source, env):
target = []
for src in source:
basename = os.path.splitext(src.name)[0]
@@ -27,25 +28,26 @@ def proto_emitter(target, source, env):
return target, source
def dolphin_emitter(target, source, env):
def _dolphin_emitter(target, source, env):
res_root_dir = source[0].Dir(env["DOLPHIN_RES_TYPE"])
source = [res_root_dir]
source = list()
source.extend(env.GlobRecursive("*.*", res_root_dir.srcnode()))
target_base_dir = target[0]
env.Replace(_DOLPHIN_OUT_DIR=target[0])
env.Replace(_DOLPHIN_SRC_DIR=res_root_dir)
if env["DOLPHIN_RES_TYPE"] == "external":
target = [target_base_dir.File("manifest.txt")]
## A detailed list of files to be generated
## works better if we just leave target the folder
# target = []
# Not used ATM, becasuse it inflates the internal dependency graph too much
# Preserve original paths, do .png -> .bm conversion
# target.extend(
# map(
# lambda node: target_base_dir.File(
# res_root_dir.rel_path(node).replace(".png", ".bm")
# ),
# filter(lambda node: isinstance(node, SCons.Node.FS.File), source),
# filter(lambda node: isinstance(node, File), source),
# )
# )
else:
@@ -55,7 +57,7 @@ def dolphin_emitter(target, source, env):
target_base_dir.File(asset_basename + ".h"),
]
# Debug output
## Debug output
# print(
# f"Dolphin res type: {env['DOLPHIN_RES_TYPE']},\ntarget files:",
# list(f.path for f in target),
@@ -65,7 +67,7 @@ def dolphin_emitter(target, source, env):
return target, source
def _invoke_git(args, source_dir):
def __invoke_git(args, source_dir):
cmd = ["git"]
cmd.extend(args)
return (
@@ -75,11 +77,11 @@ def _invoke_git(args, source_dir):
)
def proto_ver_generator(target, source, env):
def _proto_ver_generator(target, source, env):
target_file = target[0]
src_dir = source[0].dir.abspath
try:
_invoke_git(
__invoke_git(
["fetch", "--tags"],
source_dir=src_dir,
)
@@ -88,7 +90,7 @@ def proto_ver_generator(target, source, env):
print(fg.boldred("Git: fetch failed"))
try:
git_describe = _invoke_git(
git_describe = __invoke_git(
["describe", "--tags", "--abbrev=0"],
source_dir=src_dir,
)
@@ -127,7 +129,6 @@ def generate(env):
ICONSCOMSTR="\tICONS\t${TARGET}",
PROTOCOMSTR="\tPROTO\t${SOURCE}",
DOLPHINCOMSTR="\tDOLPHIN\t${DOLPHIN_RES_TYPE}",
RESMANIFESTCOMSTR="\tMANIFEST\t${TARGET}",
PBVERCOMSTR="\tPBVER\t${TARGET}",
)
@@ -135,37 +136,74 @@ def generate(env):
BUILDERS={
"IconBuilder": Builder(
action=Action(
'${PYTHON3} ${ASSETS_COMPILER} icons ${ICON_SRC_DIR} ${TARGET.dir} --filename "${ICON_FILE_NAME}"',
[
[
"${PYTHON3}",
"${ASSETS_COMPILER}",
"icons",
"${ICON_SRC_DIR}",
"${TARGET.dir}",
"--filename",
"${ICON_FILE_NAME}",
],
],
"${ICONSCOMSTR}",
),
emitter=icons_emitter,
emitter=_icons_emitter,
),
"ProtoBuilder": Builder(
action=Action(
"${PYTHON3} ${NANOPB_COMPILER} -q -I${SOURCE.dir.posix} -D${TARGET.dir.posix} ${SOURCES.posix}",
[
[
"${PYTHON3}",
"${NANOPB_COMPILER}",
"-q",
"-I${SOURCE.dir.posix}",
"-D${TARGET.dir.posix}",
"${SOURCES.posix}",
],
],
"${PROTOCOMSTR}",
),
emitter=proto_emitter,
emitter=_proto_emitter,
suffix=".pb.c",
src_suffix=".proto",
),
"DolphinSymBuilder": Builder(
action=Action(
"${PYTHON3} ${ASSETS_COMPILER} dolphin -s dolphin_${DOLPHIN_RES_TYPE} ${SOURCE} ${_DOLPHIN_OUT_DIR}",
[
[
"${PYTHON3}",
"${ASSETS_COMPILER}",
"dolphin",
"-s",
"dolphin_${DOLPHIN_RES_TYPE}",
"${_DOLPHIN_SRC_DIR}",
"${_DOLPHIN_OUT_DIR}",
],
],
"${DOLPHINCOMSTR}",
),
emitter=dolphin_emitter,
emitter=_dolphin_emitter,
),
"DolphinExtBuilder": Builder(
action=Action(
"${PYTHON3} ${ASSETS_COMPILER} dolphin ${SOURCE} ${_DOLPHIN_OUT_DIR}",
[
[
"${PYTHON3}",
"${ASSETS_COMPILER}",
"dolphin",
"${_DOLPHIN_SRC_DIR}",
"${_DOLPHIN_OUT_DIR}",
],
],
"${DOLPHINCOMSTR}",
),
emitter=dolphin_emitter,
emitter=_dolphin_emitter,
),
"ProtoVerBuilder": Builder(
action=Action(
proto_ver_generator,
_proto_ver_generator,
"${PBVERCOMSTR}",
),
),

View File

@@ -96,7 +96,21 @@ def DistCommand(env, name, source, **kw):
command = env.Command(
target,
source,
'@${PYTHON3} "${DIST_SCRIPT}" copy -p ${DIST_PROJECTS} -s "${DIST_SUFFIX}" ${DIST_EXTRA}',
action=Action(
[
[
"${PYTHON3}",
"${DIST_SCRIPT}",
"copy",
"-p",
"${DIST_PROJECTS}",
"-s",
"${DIST_SUFFIX}",
"${DIST_EXTRA}",
]
],
"${DISTCOMSTR}",
),
**kw,
)
env.Pseudo(target)
@@ -106,7 +120,10 @@ def DistCommand(env, name, source, **kw):
def generate(env):
if not env["VERBOSE"]:
env.SetDefault(COPROCOMSTR="\tCOPRO\t${TARGET}")
env.SetDefault(
COPROCOMSTR="\tCOPRO\t${TARGET}",
DISTCOMSTR="\tDIST\t${TARGET}",
)
env.AddMethod(AddFwProject)
env.AddMethod(DistCommand)
env.AddMethod(AddFwFlashTarget)

View File

@@ -1,7 +1,5 @@
import itertools
import os
import pathlib
import shutil
from dataclasses import dataclass, field
from typing import Dict, List, Optional
@@ -149,16 +147,10 @@ class AppBuilder:
CPPPATH=[self.app_work_dir, self.app._appdir],
)
app_sources = list(
itertools.chain.from_iterable(
self.app_env.GlobRecursive(
source_type,
self.app_work_dir,
exclude="lib",
)
for source_type in self.app.sources
)
app_sources = self.app_env.GatherSources(
[self.app.sources, "!lib"], self.app_work_dir
)
if not app_sources:
raise UserError(f"No source files found for {self.app.appid}")
@@ -290,7 +282,7 @@ def prepare_app_metadata(target, source, env):
)
def validate_app_imports(target, source, env):
def _validate_app_imports(target, source, env):
sdk_cache = SdkCache(env["SDK_DEFINITION"].path, load_version_only=False)
app_syms = set()
with open(target[0].path, "rt") as f:
@@ -342,35 +334,7 @@ def GetExtAppByIdOrPath(env, app_dir):
return app_artifacts
def resources_fap_dist_emitter(target, source, env):
# Initially we have a single target - target dir
# Here we inject pairs of (target, source) for each file
resources_root = target[0]
target = []
for app_artifacts in env["EXT_APPS"].values():
for _, dist_path in filter(
lambda dist_entry: dist_entry[0], app_artifacts.dist_entries
):
source.append(app_artifacts.compact)
target.append(resources_root.File(dist_path))
assert len(target) == len(source)
return (target, source)
def resources_fap_dist_action(target, source, env):
# FIXME: find a proper way to remove stale files
target_dir = env.Dir("${RESOURCES_ROOT}/apps")
shutil.rmtree(target_dir.path, ignore_errors=True)
# Iterate over pairs generated in emitter
for src, target in zip(source, target):
os.makedirs(os.path.dirname(target.path), exist_ok=True)
shutil.copy(src.path, target.path)
def embed_app_metadata_emitter(target, source, env):
def _embed_app_metadata_emitter(target, source, env):
app = env["APP"]
# Hack: change extension for fap libs
@@ -407,33 +371,52 @@ def generate_embed_app_metadata_actions(source, target, env, for_signature):
Action(prepare_app_metadata, "$APPMETA_COMSTR"),
]
objcopy_str = (
"${OBJCOPY} "
"--remove-section .ARM.attributes "
"--add-section ${_FAP_META_SECTION}=${APP._section_fapmeta} "
)
objcopy_args = [
"${OBJCOPY}",
"--remove-section",
".ARM.attributes",
"--add-section",
"${_FAP_META_SECTION}=${APP._section_fapmeta}",
"--set-section-flags",
"${_FAP_META_SECTION}=contents,noload,readonly,data",
]
if app._section_fapfileassets:
actions.append(Action(prepare_app_file_assets, "$APPFILE_COMSTR"))
objcopy_str += (
"--add-section ${_FAP_FILEASSETS_SECTION}=${APP._section_fapfileassets} "
objcopy_args.extend(
(
"--add-section",
"${_FAP_FILEASSETS_SECTION}=${APP._section_fapfileassets}",
"--set-section-flags",
"${_FAP_FILEASSETS_SECTION}=contents,noload,readonly,data",
)
)
objcopy_str += (
"--set-section-flags ${_FAP_META_SECTION}=contents,noload,readonly,data "
"--strip-debug --strip-unneeded "
"--add-gnu-debuglink=${SOURCE} "
"${SOURCES} ${TARGET}"
objcopy_args.extend(
(
"--strip-debug",
"--strip-unneeded",
"--add-gnu-debuglink=${SOURCE}",
"${SOURCES}",
"${TARGET}",
)
)
actions.extend(
(
Action(
objcopy_str,
[objcopy_args],
"$APPMETAEMBED_COMSTR",
),
Action(
"${PYTHON3} ${FBT_SCRIPT_DIR}/fastfap.py ${TARGET} ${OBJCOPY}",
[
[
"${PYTHON3}",
"${FBT_SCRIPT_DIR}/fastfap.py",
"${TARGET}",
"${OBJCOPY}",
]
],
"$FASTFAP_COMSTR",
),
)
@@ -511,7 +494,6 @@ def generate(env, **kw):
)
if not env["VERBOSE"]:
env.SetDefault(
FAPDISTCOMSTR="\tFAPDIST\t${TARGET}",
APPMETA_COMSTR="\tAPPMETA\t${TARGET}",
APPFILE_COMSTR="\tAPPFILE\t${TARGET}",
APPMETAEMBED_COMSTR="\tFAP\t${TARGET}",
@@ -534,18 +516,11 @@ def generate(env, **kw):
env.Append(
BUILDERS={
"FapDist": Builder(
action=Action(
resources_fap_dist_action,
"$FAPDISTCOMSTR",
),
emitter=resources_fap_dist_emitter,
),
"EmbedAppMetadata": Builder(
generator=generate_embed_app_metadata_actions,
suffix=".fap",
src_suffix=".elf",
emitter=embed_app_metadata_emitter,
emitter=_embed_app_metadata_emitter,
),
"ValidateAppImports": Builder(
action=[
@@ -554,7 +529,7 @@ def generate(env, **kw):
None, # "$APPDUMP_COMSTR",
),
Action(
validate_app_imports,
_validate_app_imports,
"$APPCHECK_COMSTR",
),
],

View File

@@ -2,9 +2,9 @@ import json
class HardwareTargetLoader:
def __init__(self, env, target_scons_dir, target_id):
def __init__(self, env, root_target_scons_dir, target_id):
self.env = env
self.target_scons_dir = target_scons_dir
self.all_targets_root_dir = root_target_scons_dir
self.target_dir = self._getTargetDir(target_id)
# self.target_id = target_id
self.layered_target_dirs = []
@@ -23,7 +23,7 @@ class HardwareTargetLoader:
self._processTargetDefinitions(target_id)
def _getTargetDir(self, target_id):
return self.target_scons_dir.Dir(f"f{target_id}")
return self.all_targets_root_dir.Dir(f"f{target_id}")
def _loadDescription(self, target_id):
target_json_file = self._getTargetDir(target_id).File("target.json")
@@ -34,14 +34,14 @@ class HardwareTargetLoader:
return vals
def _processTargetDefinitions(self, target_id):
self.layered_target_dirs.append(f"targets/f{target_id}")
target_dir = self._getTargetDir(target_id)
self.layered_target_dirs.append(target_dir)
config = self._loadDescription(target_id)
for path_list in ("include_paths", "sdk_header_paths"):
getattr(self, path_list).extend(
f"#/firmware/targets/f{target_id}/{p}"
for p in config.get(path_list, [])
target_dir.Dir(p) for p in config.get(path_list, [])
)
self.excluded_sources.extend(config.get("excluded_sources", []))
@@ -50,7 +50,7 @@ class HardwareTargetLoader:
file_attrs = (
# (name, use_src_node)
("startup_script", False),
("startup_script", True),
("linker_script_flash", True),
("linker_script_ram", True),
("linker_script_app", True),
@@ -59,9 +59,10 @@ class HardwareTargetLoader:
for attr_name, use_src_node in file_attrs:
if (val := config.get(attr_name)) and not getattr(self, attr_name):
node = self.env.File(f"firmware/targets/f{target_id}/{val}")
node = target_dir.File(val)
if use_src_node:
node = node.srcnode()
# print(f"Got node {node}, {node.path} for {attr_name}")
setattr(self, attr_name, node)
for attr_name in ("linker_dependencies",):
@@ -84,8 +85,8 @@ class HardwareTargetLoader:
)
seen_filenames.update(f.name for f in accepted_sources)
sources.extend(accepted_sources)
# print(f"Found {len(sources)} sources: {list(f.name for f in sources)}")
return sources
# print(f"Found {len(sources)} sources: {list(f.path for f in sources)}")
return list(f.get_path(self.all_targets_root_dir) for f in sources)
def gatherSdkHeaders(self):
sdk_headers = []
@@ -101,7 +102,7 @@ class HardwareTargetLoader:
def ConfigureForTarget(env, target_id):
target_loader = HardwareTargetLoader(env, env.Dir("#/firmware/targets"), target_id)
target_loader = HardwareTargetLoader(env, env["TARGETS_ROOT"], target_id)
env.Replace(
TARGET_CFG=target_loader,
SDK_DEFINITION=target_loader.sdk_symbols,

View File

@@ -0,0 +1,117 @@
import os
import shutil
from SCons.Action import Action
from SCons.Builder import Builder
from SCons.Errors import StopError
from SCons.Node.FS import Dir, File
def __generate_resources_dist_entries(env):
src_target_entries = []
resources_root = env.Dir(env["RESOURCES_ROOT"])
for app_artifacts in env["FW_EXTAPPS"].application_map.values():
for _, dist_path in filter(
lambda dist_entry: dist_entry[0], app_artifacts.dist_entries
):
src_target_entries.append(
(
app_artifacts.compact,
resources_root.File(dist_path),
)
)
# Deploy apps' resources too
for app in env["APPBUILD"].apps:
if not app.resources:
continue
apps_resource_dir = app._appdir.Dir(app.resources)
for res_file in env.GlobRecursive("*", apps_resource_dir):
if not isinstance(res_file, File):
continue
src_target_entries.append(
(
res_file,
resources_root.File(
res_file.get_path(apps_resource_dir),
),
)
)
# Deploy other stuff from _EXTRA_DIST
for extra_dist in env["_EXTRA_DIST"]:
if isinstance(extra_dist, Dir):
src_target_entries.append(
(
extra_dist,
resources_root.Dir(extra_dist.name),
)
)
else:
raise StopError(f"Unsupported extra dist type: {type(extra_dist)}")
return src_target_entries
def _resources_dist_emitter(target, source, env):
src_target_entries = __generate_resources_dist_entries(env)
source = list(map(lambda entry: entry[0], src_target_entries))
return (target, source)
def _resources_dist_action(target, source, env):
dist_entries = __generate_resources_dist_entries(env)
assert len(dist_entries) == len(source)
shutil.rmtree(env.Dir(env["RESOURCES_ROOT"]).abspath, ignore_errors=True)
for src, target in dist_entries:
if isinstance(src, File):
os.makedirs(os.path.dirname(target.path), exist_ok=True)
shutil.copy(src.path, target.path)
elif isinstance(src, Dir):
shutil.copytree(src.path, target.path)
else:
raise StopError(f"Unsupported dist entry type: {type(src)}")
def generate(env, **kw):
env.SetDefault(
ASSETS_COMPILER="${FBT_SCRIPT_DIR}/assets.py",
)
if not env["VERBOSE"]:
env.SetDefault(
RESOURCEDISTCOMSTR="\tRESDIST\t${RESOURCES_ROOT}",
RESMANIFESTCOMSTR="\tMANIFST\t${TARGET}",
)
env.Append(
BUILDERS={
"ManifestBuilder": Builder(
action=[
Action(
_resources_dist_action,
"${RESOURCEDISTCOMSTR}",
),
Action(
[
[
"${PYTHON3}",
"${ASSETS_COMPILER}",
"manifest",
"${TARGET.dir.posix}",
"--timestamp=${GIT_UNIX_TIMESTAMP}",
]
],
"${RESMANIFESTCOMSTR}",
),
],
emitter=_resources_dist_emitter,
),
}
)
def exists(env):
return True

View File

@@ -37,13 +37,13 @@ def ProcessSdkDepends(env, filename):
return depends
def api_amalgam_emitter(target, source, env):
def _api_amalgam_emitter(target, source, env):
target.append(env.ChangeFileExtension(target[0], ".d"))
target.append(env.ChangeFileExtension(target[0], ".i.c"))
return target, source
def api_amalgam_gen_origin_header(target, source, env):
def _api_amalgam_gen_origin_header(target, source, env):
mega_file = env.subst("${TARGET}.c", target=target[0])
with open(mega_file, "wt") as sdk_c:
sdk_c.write(
@@ -183,12 +183,12 @@ class SdkTreeBuilder:
self._generate_sdk_meta()
def deploy_sdk_header_tree_action(target, source, env):
def _deploy_sdk_header_tree_action(target, source, env):
sdk_tree = SdkTreeBuilder(env, target, source)
return sdk_tree.deploy_action()
def deploy_sdk_header_tree_emitter(target, source, env):
def _deploy_sdk_header_tree_emitter(target, source, env):
sdk_tree = SdkTreeBuilder(env, target, source)
return sdk_tree.emitter(target, source, env)
@@ -227,7 +227,7 @@ def _check_sdk_is_up2date(sdk_cache: SdkCache):
)
def validate_api_cache(source, target, env):
def _validate_api_cache(source, target, env):
# print(f"Generating SDK for {source[0]} to {target[0]}")
current_sdk = SdkCollector()
current_sdk.process_source_file_for_sdk(source[0].path)
@@ -240,7 +240,7 @@ def validate_api_cache(source, target, env):
_check_sdk_is_up2date(sdk_cache)
def generate_api_table(source, target, env):
def _generate_api_table(source, target, env):
sdk_cache = SdkCache(source[0].path)
_check_sdk_is_up2date(sdk_cache)
@@ -278,10 +278,10 @@ def generate(env, **kw):
env.Append(
BUILDERS={
"ApiAmalgamator": Builder(
emitter=api_amalgam_emitter,
emitter=_api_amalgam_emitter,
action=[
Action(
api_amalgam_gen_origin_header,
_api_amalgam_gen_origin_header,
"$SDK_AMALGAMATE_HEADER_COMSTR",
),
Action(
@@ -293,15 +293,15 @@ def generate(env, **kw):
),
"SDKHeaderTreeExtractor": Builder(
action=Action(
deploy_sdk_header_tree_action,
_deploy_sdk_header_tree_action,
"$SDKTREE_COMSTR",
),
emitter=deploy_sdk_header_tree_emitter,
emitter=_deploy_sdk_header_tree_emitter,
src_suffix=".d",
),
"ApiTableValidator": Builder(
action=Action(
validate_api_cache,
_validate_api_cache,
"$SDKSYM_UPDATER_COMSTR",
),
suffix=".csv",
@@ -309,7 +309,7 @@ def generate(env, **kw):
),
"ApiSymbolTable": Builder(
action=Action(
generate_api_table,
_generate_api_table,
"$APITABLE_GENERATOR_COMSTR",
),
suffix=".h",

View File

@@ -2,7 +2,7 @@ from SCons.Action import Action
from SCons.Builder import Builder
def version_emitter(target, source, env):
def _version_emitter(target, source, env):
target_dir = target[0]
target = [
target_dir.File("version.inc.h"),
@@ -24,7 +24,7 @@ def generate(env):
'-o ${TARGET.dir.posix} --dir "${ROOT_DIR}"',
"${VERSIONCOMSTR}",
),
emitter=version_emitter,
emitter=_version_emitter,
),
}
)

View File

@@ -17,7 +17,7 @@ def _set_browser_action(target, source, env):
__no_browser = True
def emit_pvsreport(target, source, env):
def _emit_pvsreport(target, source, env):
target_dir = env["REPORT_DIR"]
if env["PLATFORM"] == "win32":
# Report generator on Windows emits to a subfolder of given output folder
@@ -96,7 +96,7 @@ def generate(env):
],
"${PVSCONVCOMSTR}",
),
emitter=emit_pvsreport,
emitter=_emit_pvsreport,
src_suffix=".log",
),
}

View File

@@ -1,7 +1,9 @@
import itertools
import SCons
from fbt.util import GLOB_FILE_EXCLUSION
from SCons.Script import Flatten
from SCons.Node.FS import has_glob_magic
from SCons.Script import Flatten
def GlobRecursive(env, pattern, node=".", exclude=[]):
@@ -23,12 +25,35 @@ def GlobRecursive(env, pattern, node=".", exclude=[]):
# Otherwise, just assume that file at path exists
else:
results.append(node.File(pattern))
## Debug
# print(f"Glob result for {pattern} from {node}: {results}")
return results
def GatherSources(env, sources_list, node="."):
sources_list = list(set(Flatten(sources_list)))
include_sources = list(filter(lambda x: not x.startswith("!"), sources_list))
exclude_sources = list(x[1:] for x in sources_list if x.startswith("!"))
gathered_sources = list(
itertools.chain.from_iterable(
env.GlobRecursive(
source_type,
node,
exclude=exclude_sources,
)
for source_type in include_sources
)
)
## Debug
# print(
# f"Gathered sources for {sources_list} from {node}: {list(f.path for f in gathered_sources)}"
# )
return gathered_sources
def generate(env):
env.AddMethod(GlobRecursive)
env.AddMethod(GatherSources)
def exists(env):

View File

@@ -55,7 +55,7 @@ class DolphinBubbleAnimation:
if not os.path.isfile(meta_filename):
raise Exception(f"Animation meta file doesn't exist: { meta_filename }")
self.logger.info(f"Loading meta from {meta_filename}")
self.logger.debug(f"Loading meta from {meta_filename}")
file = FlipperFormatFile()
file.load(meta_filename)

View File

@@ -1,60 +0,0 @@
#!/usr/bin/env python3
import json
from flipper.app import App
class Main(App):
def init(self):
self.subparsers = self.parser.add_subparsers(help="sub-command help")
# generate
self.parser_generate = self.subparsers.add_parser(
"generate", help="Generate JSON meta file"
)
self.parser_generate.add_argument("-p", dest="project", required=True)
self.parser_generate.add_argument(
"-DBUILD_DATE", dest="build_date", required=True
)
self.parser_generate.add_argument("-DGIT_COMMIT", dest="commit", required=True)
self.parser_generate.add_argument("-DGIT_BRANCH", dest="branch", required=True)
self.parser_generate.add_argument(
"-DTARGET", dest="target", type=int, required=True
)
self.parser_generate.set_defaults(func=self.generate)
# merge
self.parser_merge = self.subparsers.add_parser(
"merge", help="Merge JSON meta files"
)
self.parser_merge.add_argument(
"-i", dest="input", action="append", nargs="+", required=True
)
self.parser_merge.set_defaults(func=self.merge)
def generate(self):
meta = {}
for k, v in vars(self.args).items():
if k in ["project", "func", "debug"]:
continue
if isinstance(v, str):
v = v.strip('"')
meta[self.args.project + "_" + k] = v
print(json.dumps(meta, indent=4))
return 0
def merge(self):
full = {}
for path in self.args.input[0]:
with open(path, mode="r") as file:
dict = json.loads(file.read())
full.update(dict)
print(json.dumps(full, indent=4))
return 0
if __name__ == "__main__":
Main()()

View File

@@ -27,9 +27,9 @@ jobs:
name: 'ufbt: Build for ${{ matrix.name }}'
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Build with ufbt
uses: flipperdevices/flipperzero-ufbt-action@v0.1.1
uses: flipperdevices/flipperzero-ufbt-action@v0.1
id: build-app
with:
sdk-channel: ${{ matrix.sdk-channel }}