[FL-2832] fbt: more fixes & improvements (#1854)

* github: bundling debug folder with scripts; docs: fixes & updates; fbt: added FAP_EXAMPLES variable to enable building example apps. Disabled by default. fbt: added TERM to list of proxied environment variables
* fbt: better help output; disabled implicit_deps_unchanged; added color to import validator reports
* fbt: moved debug configuration to separate tool
* fbt: proper dependency tracker for SDK source file; renamed linker script for external apps
* fbt: fixed debug elf path
* fbt: packaging sdk archive
* scripts: fixed sconsdist.py
* fbt: reworked sdk packing; docs: updates
* docs: info on cli target; linter fixes
* fbt: moved main code to scripts folder
* scripts: packing update into .tgz
* fbt, scripts: reworked copro_dist to build .tgz
* scripts: fixed naming for archived updater package
* Scripts: fix ぐるぐる回る

Co-authored-by: Aleksandr Kutuzov <alleteam@gmail.com>
This commit is contained in:
hedger
2022-10-12 20:12:05 +04:00
committed by GitHub
parent afff1adf8f
commit eb4ff3c0fd
41 changed files with 413 additions and 272 deletions

View File

@@ -81,46 +81,41 @@ vars.AddVariables(
help="Enable debug tools to be built",
default=False,
),
)
vars.Add(
"DIST_SUFFIX",
help="Suffix for binaries in build output for dist targets",
default="local",
)
vars.Add(
"UPDATE_VERSION_STRING",
help="Version string for updater package",
default="${DIST_SUFFIX}",
)
vars.Add(
"COPRO_CUBE_VERSION",
help="Cube version",
default="",
)
vars.Add(
"COPRO_STACK_ADDR",
help="Core2 Firmware address",
default="0",
)
vars.Add(
"COPRO_STACK_BIN",
help="Core2 Firmware file name",
default="",
)
vars.Add(
"COPRO_DISCLAIMER",
help="Value to pass to bundling script to confirm dangerous operations",
default="",
)
vars.AddVariables(
BoolVariable(
"FAP_EXAMPLES",
help="Enable example applications to be built",
default=False,
),
(
"DIST_SUFFIX",
"Suffix for binaries in build output for dist targets",
"local",
),
(
"UPDATE_VERSION_STRING",
"Version string for updater package",
"${DIST_SUFFIX}",
),
(
"COPRO_CUBE_VERSION",
"Cube version",
"",
),
(
"COPRO_STACK_ADDR",
"Core2 Firmware address",
"0",
),
(
"COPRO_STACK_BIN",
"Core2 Firmware file name",
"",
),
(
"COPRO_DISCLAIMER",
"Value to pass to bundling script to confirm dangerous operations",
"",
),
PathVariable(
"COPRO_OB_DATA",
help="Path to OB reference data",
@@ -161,86 +156,75 @@ vars.AddVariables(
validator=PathVariable.PathAccept,
default="",
),
)
vars.Add(
"FBT_TOOLCHAIN_VERSIONS",
help="Whitelisted toolchain versions (leave empty for no check)",
default=tuple(),
)
vars.Add(
"OPENOCD_OPTS",
help="Options to pass to OpenOCD",
default="",
)
vars.Add(
"BLACKMAGIC",
help="Blackmagic probe location",
default="auto",
)
vars.Add(
"UPDATE_SPLASH",
help="Directory name with slideshow frames to render after installing update package",
default="update_default",
)
vars.Add(
"LOADER_AUTOSTART",
help="Application name to automatically run on Flipper boot",
default="",
)
vars.Add(
"FIRMWARE_APPS",
help="Map of (configuration_name->application_list)",
default={
"default": (
# Svc
"basic_services",
# Apps
"main_apps",
"system_apps",
# Settings
"settings_apps",
# Plugins
# "basic_plugins",
# Debug
# "debug_apps",
)
},
)
vars.Add(
"FIRMWARE_APP_SET",
help="Application set to use from FIRMWARE_APPS",
default="default",
)
vars.Add(
"APPSRC",
help="Application source directory for app to build & upload",
default="",
)
# List of tuples (directory, add_to_global_include_path)
vars.Add(
"APPDIRS",
help="Directories to search for firmware components & external apps",
default=[
("applications", False),
("applications/services", True),
("applications/main", True),
("applications/settings", False),
("applications/system", False),
("applications/debug", False),
("applications/plugins", False),
("applications/examples", False),
("applications_user", False),
],
(
"FBT_TOOLCHAIN_VERSIONS",
"Whitelisted toolchain versions (leave empty for no check)",
tuple(),
),
(
"OPENOCD_OPTS",
"Options to pass to OpenOCD",
"",
),
(
"BLACKMAGIC",
"Blackmagic probe location",
"auto",
),
(
"UPDATE_SPLASH",
"Directory name with slideshow frames to render after installing update package",
"update_default",
),
(
"LOADER_AUTOSTART",
"Application name to automatically run on Flipper boot",
"",
),
(
"FIRMWARE_APPS",
"Map of (configuration_name->application_list)",
{
"default": (
# Svc
"basic_services",
# Apps
"main_apps",
"system_apps",
# Settings
"settings_apps",
# Plugins
# "basic_plugins",
# Debug
# "debug_apps",
)
},
),
(
"FIRMWARE_APP_SET",
"Application set to use from FIRMWARE_APPS",
"default",
),
(
"APPSRC",
"Application source directory for app to build & upload",
"",
),
# List of tuples (directory, add_to_global_include_path)
(
"APPDIRS",
"Directories to search for firmware components & external apps",
[
("applications", False),
("applications/services", True),
("applications/main", True),
("applications/settings", False),
("applications/system", False),
("applications/debug", False),
("applications/plugins", False),
("applications_user", False),
],
),
)
Return("vars")

View File

@@ -1,6 +1,5 @@
import SCons
from SCons.Platform import TempFileMunge
from fbt import util
from fbt.util import tempfile_arg_esc_func, single_quote, wrap_tempfile
import os
import multiprocessing
@@ -13,14 +12,18 @@ forward_os_env = {
}
# Proxying CI environment to child processes & scripts
variables_to_forward = [
# CI/CD variables
"WORKFLOW_BRANCH_OR_TAG",
"DIST_SUFFIX",
# Python & other tools
"HOME",
"APPDATA",
"PYTHONHOME",
"PYTHONNOUSERSITE",
"TMP",
"TEMP",
# Colors for tools
"TERM",
]
if proxy_env := GetOption("proxy_env"):
variables_to_forward.extend(proxy_env.split(","))
@@ -79,7 +82,7 @@ if not coreenv["VERBOSE"]:
SetOption("num_jobs", multiprocessing.cpu_count())
# Avoiding re-scan of all sources on every startup
SetOption("implicit_cache", True)
SetOption("implicit_deps_unchanged", True)
# SetOption("implicit_deps_unchanged", True)
# More aggressive caching
SetOption("max_drift", 1)
# Random task queue - to discover isses with build logic faster
@@ -87,10 +90,10 @@ SetOption("max_drift", 1)
# Setting up temp file parameters - to overcome command line length limits
coreenv["TEMPFILEARGESCFUNC"] = util.tempfile_arg_esc_func
util.wrap_tempfile(coreenv, "LINKCOM")
util.wrap_tempfile(coreenv, "ARCOM")
coreenv["TEMPFILEARGESCFUNC"] = tempfile_arg_esc_func
wrap_tempfile(coreenv, "LINKCOM")
wrap_tempfile(coreenv, "ARCOM")
coreenv["SINGLEQUOTEFUNC"] = util.single_quote
coreenv["SINGLEQUOTEFUNC"] = single_quote
Return("coreenv")

View File

@@ -21,7 +21,7 @@ appenv = ENV.Clone(
)
appenv.Replace(
LINKER_SCRIPT="application-ext",
LINKER_SCRIPT="application_ext",
)
appenv.AppendUnique(
@@ -106,6 +106,7 @@ appenv.PhonyTarget("firmware_extapps", appenv.Action(legacy_app_build_stub, None
Alias("faps", extapps["compact"].values())
Alias("faps", extapps["validators"].values())
if appsrc := appenv.subst("$APPSRC"):
app_manifest, fap_file, app_validator = appenv.GetExtAppFromPath(appsrc)

View File

@@ -1,316 +0,0 @@
from dataclasses import dataclass, field
from typing import List, Optional, Tuple
from enum import Enum
import os
class FlipperManifestException(Exception):
pass
class FlipperAppType(Enum):
SERVICE = "Service"
SYSTEM = "System"
APP = "App"
PLUGIN = "Plugin"
DEBUG = "Debug"
ARCHIVE = "Archive"
SETTINGS = "Settings"
STARTUP = "StartupHook"
EXTERNAL = "External"
METAPACKAGE = "Package"
@dataclass
class FlipperApplication:
@dataclass
class ExternallyBuiltFile:
path: str
command: str
@dataclass
class Library:
name: str
fap_include_paths: List[str] = field(default_factory=lambda: ["."])
sources: List[str] = field(default_factory=lambda: ["*.c*"])
cflags: List[str] = field(default_factory=list)
cdefines: List[str] = field(default_factory=list)
cincludes: List[str] = field(default_factory=list)
PRIVATE_FIELD_PREFIX = "_"
appid: str
apptype: FlipperAppType
name: Optional[str] = ""
entry_point: Optional[str] = None
flags: List[str] = field(default_factory=lambda: ["Default"])
cdefines: List[str] = field(default_factory=list)
requires: List[str] = field(default_factory=list)
conflicts: List[str] = field(default_factory=list)
provides: List[str] = field(default_factory=list)
stack_size: int = 2048
icon: Optional[str] = None
order: int = 0
sdk_headers: List[str] = field(default_factory=list)
# .fap-specific
sources: List[str] = field(default_factory=lambda: ["*.c*"])
fap_version: Tuple[int] = field(default_factory=lambda: (0, 1))
fap_icon: Optional[str] = None
fap_libs: List[str] = field(default_factory=list)
fap_category: str = ""
fap_description: str = ""
fap_author: str = ""
fap_weburl: str = ""
fap_icon_assets: Optional[str] = None
fap_extbuild: List[ExternallyBuiltFile] = field(default_factory=list)
fap_private_libs: List[Library] = field(default_factory=list)
# Internally used by fbt
_appdir: Optional[object] = None
_apppath: Optional[str] = None
class AppManager:
def __init__(self):
self.known_apps = {}
def get(self, appname: str):
try:
return self.known_apps[appname]
except KeyError as _:
raise FlipperManifestException(
f"Missing application manifest for '{appname}'"
)
def find_by_appdir(self, appdir: str):
for app in self.known_apps.values():
if app._appdir.name == appdir:
return app
return None
def load_manifest(self, app_manifest_path: str, app_dir_node: object):
if not os.path.exists(app_manifest_path):
raise FlipperManifestException(
f"App manifest not found at path {app_manifest_path}"
)
# print("Loading", app_manifest_path)
app_manifests = []
def App(*args, **kw):
nonlocal app_manifests
app_manifests.append(
FlipperApplication(
*args,
**kw,
_appdir=app_dir_node,
_apppath=os.path.dirname(app_manifest_path),
),
)
def ExtFile(*args, **kw):
return FlipperApplication.ExternallyBuiltFile(*args, **kw)
def Lib(*args, **kw):
return FlipperApplication.Library(*args, **kw)
try:
with open(app_manifest_path, "rt") as manifest_file:
exec(manifest_file.read())
except Exception as e:
raise FlipperManifestException(
f"Failed parsing manifest '{app_manifest_path}' : {e}"
)
if len(app_manifests) == 0:
raise FlipperManifestException(
f"App manifest '{app_manifest_path}' is malformed"
)
# print("Built", app_manifests)
for app in app_manifests:
self._add_known_app(app)
def _add_known_app(self, app: FlipperApplication):
if self.known_apps.get(app.appid, None):
raise FlipperManifestException(f"Duplicate app declaration: {app.appid}")
self.known_apps[app.appid] = app
def filter_apps(self, applist: List[str]):
return AppBuildset(self, applist)
class AppBuilderException(Exception):
pass
class AppBuildset:
BUILTIN_APP_TYPES = (
FlipperAppType.SERVICE,
FlipperAppType.SYSTEM,
FlipperAppType.APP,
FlipperAppType.PLUGIN,
FlipperAppType.DEBUG,
FlipperAppType.ARCHIVE,
FlipperAppType.SETTINGS,
FlipperAppType.STARTUP,
)
def __init__(self, appmgr: AppManager, appnames: List[str]):
self.appmgr = appmgr
self.appnames = set(appnames)
self._orig_appnames = appnames
self._process_deps()
self._check_conflicts()
self._check_unsatisfied() # unneeded?
self.apps = sorted(
list(map(self.appmgr.get, self.appnames)),
key=lambda app: app.appid,
)
def _is_missing_dep(self, dep_name: str):
return dep_name not in self.appnames
def _process_deps(self):
while True:
provided = []
for app in self.appnames:
# print(app)
provided.extend(
filter(
self._is_missing_dep,
self.appmgr.get(app).provides + self.appmgr.get(app).requires,
)
)
# print("provides round", provided)
if len(provided) == 0:
break
self.appnames.update(provided)
def _check_conflicts(self):
conflicts = []
for app in self.appnames:
# print(app)
if conflict_app_name := list(
filter(
lambda dep_name: dep_name in self.appnames,
self.appmgr.get(app).conflicts,
)
):
conflicts.append((app, conflict_app_name))
if len(conflicts):
raise AppBuilderException(
f"App conflicts for {', '.join(f'{conflict_dep[0]}: {conflict_dep[1]}' for conflict_dep in conflicts)}"
)
def _check_unsatisfied(self):
unsatisfied = []
for app in self.appnames:
if missing_dep := list(
filter(self._is_missing_dep, self.appmgr.get(app).requires)
):
unsatisfied.append((app, missing_dep))
if len(unsatisfied):
raise AppBuilderException(
f"Unsatisfied dependencies for {', '.join(f'{missing_dep[0]}: {missing_dep[1]}' for missing_dep in unsatisfied)}"
)
def get_apps_cdefs(self):
cdefs = set()
for app in self.apps:
cdefs.update(app.cdefines)
return sorted(list(cdefs))
def get_sdk_headers(self):
sdk_headers = []
for app in self.apps:
sdk_headers.extend([app._appdir.File(header) for header in app.sdk_headers])
return sdk_headers
def get_apps_of_type(self, apptype: FlipperAppType, all_known: bool = False):
return sorted(
filter(
lambda app: app.apptype == apptype,
self.appmgr.known_apps.values() if all_known else self.apps,
),
key=lambda app: app.order,
)
def get_builtin_apps(self):
return list(
filter(lambda app: app.apptype in self.BUILTIN_APP_TYPES, self.apps)
)
def get_builtin_app_folders(self):
return sorted(
set(
(app._appdir, source_type)
for app in self.get_builtin_apps()
for source_type in app.sources
)
)
class ApplicationsCGenerator:
APP_TYPE_MAP = {
FlipperAppType.SERVICE: ("FlipperApplication", "FLIPPER_SERVICES"),
FlipperAppType.SYSTEM: ("FlipperApplication", "FLIPPER_SYSTEM_APPS"),
FlipperAppType.APP: ("FlipperApplication", "FLIPPER_APPS"),
FlipperAppType.PLUGIN: ("FlipperApplication", "FLIPPER_PLUGINS"),
FlipperAppType.DEBUG: ("FlipperApplication", "FLIPPER_DEBUG_APPS"),
FlipperAppType.SETTINGS: ("FlipperApplication", "FLIPPER_SETTINGS_APPS"),
FlipperAppType.STARTUP: ("FlipperOnStartHook", "FLIPPER_ON_SYSTEM_START"),
}
def __init__(self, buildset: AppBuildset, autorun_app: str = ""):
self.buildset = buildset
self.autorun = autorun_app
def get_app_ep_forward(self, app: FlipperApplication):
if app.apptype == FlipperAppType.STARTUP:
return f"extern void {app.entry_point}();"
return f"extern int32_t {app.entry_point}(void* p);"
def get_app_descr(self, app: FlipperApplication):
if app.apptype == FlipperAppType.STARTUP:
return app.entry_point
return f"""
{{.app = {app.entry_point},
.name = "{app.name}",
.stack_size = {app.stack_size},
.icon = {f"&{app.icon}" if app.icon else "NULL"},
.flags = {'|'.join(f"FlipperApplicationFlag{flag}" for flag in app.flags)} }}"""
def generate(self):
contents = [
'#include "applications.h"',
"#include <assets_icons.h>",
f'const char* FLIPPER_AUTORUN_APP_NAME = "{self.autorun}";',
]
for apptype in self.APP_TYPE_MAP:
contents.extend(
map(self.get_app_ep_forward, self.buildset.get_apps_of_type(apptype))
)
entry_type, entry_block = self.APP_TYPE_MAP[apptype]
contents.append(f"const {entry_type} {entry_block}[] = {{")
contents.append(
",\n".join(
map(self.get_app_descr, self.buildset.get_apps_of_type(apptype))
)
)
contents.append("};")
contents.append(
f"const size_t {entry_block}_COUNT = COUNT_OF({entry_block});"
)
archive_app = self.buildset.get_apps_of_type(FlipperAppType.ARCHIVE)
if archive_app:
contents.extend(
[
self.get_app_ep_forward(archive_app[0]),
f"const FlipperApplication FLIPPER_ARCHIVE = {self.get_app_descr(archive_app[0])};",
]
)
return "\n".join(contents)

View File

@@ -1,85 +0,0 @@
from dataclasses import dataclass
import os
import struct
from dataclasses import dataclass, field
from .appmanifest import FlipperApplication
_MANIFEST_MAGIC = 0x52474448
@dataclass
class ElfManifestBaseHeader:
manifest_version: int
api_version: int
hardware_target_id: int
manifest_magic: int = 0x52474448
def as_bytes(self):
return struct.pack(
"<IIIh",
self.manifest_magic,
self.manifest_version,
self.api_version,
self.hardware_target_id,
)
@dataclass
class ElfManifestV1:
stack_size: int
app_version: int
name: str = ""
icon: bytes = field(default=b"")
def as_bytes(self):
return struct.pack(
"<hI32s?32s",
self.stack_size,
self.app_version,
bytes(self.name.encode("ascii")),
bool(self.icon),
self.icon,
)
def assemble_manifest_data(
app_manifest: FlipperApplication,
hardware_target: int,
sdk_version,
):
image_data = b""
if app_manifest.fap_icon:
from flipper.assets.icon import file2image
image = file2image(os.path.join(app_manifest._apppath, app_manifest.fap_icon))
if (image.width, image.height) != (10, 10):
raise ValueError(
f"Flipper app icon must be 10x10 pixels, but {image.width}x{image.height} was given"
)
if len(image.data) > 32:
raise ValueError(
f"Flipper app icon must be 32 bytes or less, but {len(image.data)} bytes were given"
)
image_data = image.data
app_version_as_int = ((app_manifest.fap_version[0] & 0xFFFF) << 16) | (
app_manifest.fap_version[1] & 0xFFFF
)
data = ElfManifestBaseHeader(
manifest_version=1,
api_version=sdk_version,
hardware_target_id=hardware_target,
).as_bytes()
data += ElfManifestV1(
stack_size=app_manifest.stack_size,
app_version=app_version_as_int,
name=app_manifest.name,
icon=image_data,
).as_bytes()
return data

View File

@@ -1,519 +0,0 @@
import operator
import os
import csv
import operator
from enum import Enum, auto
from typing import List, Set, ClassVar, Any
from dataclasses import dataclass, field
from cxxheaderparser.parser import CxxParser
# 'Fixing' complaints about typedefs
CxxParser._fundamentals.discard("wchar_t")
from cxxheaderparser.types import (
EnumDecl,
Field,
ForwardDecl,
FriendDecl,
Function,
Method,
Typedef,
UsingAlias,
UsingDecl,
Variable,
Pointer,
Type,
PQName,
NameSpecifier,
FundamentalSpecifier,
Parameter,
Array,
Value,
Token,
FunctionType,
)
from cxxheaderparser.parserstate import (
State,
EmptyBlockState,
ClassBlockState,
ExternBlockState,
NamespaceBlockState,
)
@dataclass(frozen=True)
class ApiEntryFunction:
name: str
returns: str
params: str
csv_type: ClassVar[str] = "Function"
def dictify(self):
return dict(name=self.name, type=self.returns, params=self.params)
@dataclass(frozen=True)
class ApiEntryVariable:
name: str
var_type: str
csv_type: ClassVar[str] = "Variable"
def dictify(self):
return dict(name=self.name, type=self.var_type, params=None)
@dataclass(frozen=True)
class ApiHeader:
name: str
csv_type: ClassVar[str] = "Header"
def dictify(self):
return dict(name=self.name, type=None, params=None)
@dataclass
class ApiEntries:
# These are sets, to avoid creating duplicates when we have multiple
# declarations with same signature
functions: Set[ApiEntryFunction] = field(default_factory=set)
variables: Set[ApiEntryVariable] = field(default_factory=set)
headers: Set[ApiHeader] = field(default_factory=set)
class SymbolManager:
def __init__(self):
self.api = ApiEntries()
self.name_hashes = set()
# Calculate hash of name and raise exception if it already is in the set
def _name_check(self, name: str):
name_hash = gnu_sym_hash(name)
if name_hash in self.name_hashes:
raise Exception(f"Hash collision on {name}")
self.name_hashes.add(name_hash)
def add_function(self, function_def: ApiEntryFunction):
if function_def in self.api.functions:
return
self._name_check(function_def.name)
self.api.functions.add(function_def)
def add_variable(self, variable_def: ApiEntryVariable):
if variable_def in self.api.variables:
return
self._name_check(variable_def.name)
self.api.variables.add(variable_def)
def add_header(self, header: str):
self.api.headers.add(ApiHeader(header))
def gnu_sym_hash(name: str):
h = 0x1505
for c in name:
h = (h << 5) + h + ord(c)
return str(hex(h))[-8:]
class SdkCollector:
def __init__(self):
self.symbol_manager = SymbolManager()
def add_header_to_sdk(self, header: str):
self.symbol_manager.add_header(header)
def process_source_file_for_sdk(self, file_path: str):
visitor = SdkCxxVisitor(self.symbol_manager)
with open(file_path, "rt") as f:
content = f.read()
parser = CxxParser(file_path, content, visitor, None)
parser.parse()
def get_api(self):
return self.symbol_manager.api
def stringify_array_dimension(size_descr):
if not size_descr:
return ""
return stringify_descr(size_descr)
def stringify_array_descr(type_descr):
assert isinstance(type_descr, Array)
return (
stringify_descr(type_descr.array_of),
stringify_array_dimension(type_descr.size),
)
def stringify_descr(type_descr):
if isinstance(type_descr, (NameSpecifier, FundamentalSpecifier)):
return type_descr.name
elif isinstance(type_descr, PQName):
return "::".join(map(stringify_descr, type_descr.segments))
elif isinstance(type_descr, Pointer):
# Hack
if isinstance(type_descr.ptr_to, FunctionType):
return stringify_descr(type_descr.ptr_to)
return f"{stringify_descr(type_descr.ptr_to)}*"
elif isinstance(type_descr, Type):
return (
f"{'const ' if type_descr.const else ''}"
f"{'volatile ' if type_descr.volatile else ''}"
f"{stringify_descr(type_descr.typename)}"
)
elif isinstance(type_descr, Parameter):
return stringify_descr(type_descr.type)
elif isinstance(type_descr, Array):
# Hack for 2d arrays
if isinstance(type_descr.array_of, Array):
argtype, dimension = stringify_array_descr(type_descr.array_of)
return (
f"{argtype}[{stringify_array_dimension(type_descr.size)}][{dimension}]"
)
return f"{stringify_descr(type_descr.array_of)}[{stringify_array_dimension(type_descr.size)}]"
elif isinstance(type_descr, Value):
return " ".join(map(stringify_descr, type_descr.tokens))
elif isinstance(type_descr, FunctionType):
return f"{stringify_descr(type_descr.return_type)} (*)({', '.join(map(stringify_descr, type_descr.parameters))})"
elif isinstance(type_descr, Token):
return type_descr.value
elif type_descr is None:
return ""
else:
raise Exception("unsupported type_descr: %s" % type_descr)
class SdkCxxVisitor:
def __init__(self, symbol_manager: SymbolManager):
self.api = symbol_manager
def on_variable(self, state: State, v: Variable) -> None:
if not v.extern:
return
self.api.add_variable(
ApiEntryVariable(
stringify_descr(v.name),
stringify_descr(v.type),
)
)
def on_function(self, state: State, fn: Function) -> None:
if fn.inline or fn.has_body:
return
self.api.add_function(
ApiEntryFunction(
stringify_descr(fn.name),
stringify_descr(fn.return_type),
", ".join(map(stringify_descr, fn.parameters))
+ (", ..." if fn.vararg else ""),
)
)
def on_define(self, state: State, content: str) -> None:
pass
def on_pragma(self, state: State, content: str) -> None:
pass
def on_include(self, state: State, filename: str) -> None:
pass
def on_empty_block_start(self, state: EmptyBlockState) -> None:
pass
def on_empty_block_end(self, state: EmptyBlockState) -> None:
pass
def on_extern_block_start(self, state: ExternBlockState) -> None:
pass
def on_extern_block_end(self, state: ExternBlockState) -> None:
pass
def on_namespace_start(self, state: NamespaceBlockState) -> None:
pass
def on_namespace_end(self, state: NamespaceBlockState) -> None:
pass
def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None:
pass
def on_typedef(self, state: State, typedef: Typedef) -> None:
pass
def on_using_namespace(self, state: State, namespace: List[str]) -> None:
pass
def on_using_alias(self, state: State, using: UsingAlias) -> None:
pass
def on_using_declaration(self, state: State, using: UsingDecl) -> None:
pass
def on_enum(self, state: State, enum: EnumDecl) -> None:
pass
def on_class_start(self, state: ClassBlockState) -> None:
pass
def on_class_field(self, state: State, f: Field) -> None:
pass
def on_class_method(self, state: ClassBlockState, method: Method) -> None:
pass
def on_class_friend(self, state: ClassBlockState, friend: FriendDecl) -> None:
pass
def on_class_end(self, state: ClassBlockState) -> None:
pass
@dataclass(frozen=True)
class SdkVersion:
major: int = 0
minor: int = 0
csv_type: ClassVar[str] = "Version"
def __str__(self) -> str:
return f"{self.major}.{self.minor}"
def as_int(self) -> int:
return ((self.major & 0xFFFF) << 16) | (self.minor & 0xFFFF)
@staticmethod
def from_str(s: str) -> "SdkVersion":
major, minor = s.split(".")
return SdkVersion(int(major), int(minor))
def dictify(self) -> dict:
return dict(name=str(self), type=None, params=None)
class VersionBump(Enum):
NONE = auto()
MAJOR = auto()
MINOR = auto()
class ApiEntryState(Enum):
PENDING = "?"
APPROVED = "+"
DISABLED = "-"
# Special value for API version entry so users have less incentive to edit it
VERSION_PENDING = "v"
# Class that stores all known API entries, both enabled and disabled.
# Also keeps track of API versioning
# Allows comparison and update from newly-generated API
class SdkCache:
CSV_FIELD_NAMES = ("entry", "status", "name", "type", "params")
def __init__(self, cache_file: str, load_version_only=False):
self.cache_file_name = cache_file
self.version = SdkVersion(0, 0)
self.sdk = ApiEntries()
self.disabled_entries = set()
self.new_entries = set()
self.loaded_dirty_version = False
self.version_action = VersionBump.NONE
self._load_version_only = load_version_only
self.load_cache()
def is_buildable(self) -> bool:
return (
self.version != SdkVersion(0, 0)
and self.version_action == VersionBump.NONE
and not self._have_pending_entries()
)
def _filter_enabled(self, sdk_entries):
return sorted(
filter(lambda e: e not in self.disabled_entries, sdk_entries),
key=operator.attrgetter("name"),
)
def get_valid_names(self):
syms = set(map(lambda e: e.name, self.get_functions()))
syms.update(map(lambda e: e.name, self.get_variables()))
return syms
def get_functions(self):
return self._filter_enabled(self.sdk.functions)
def get_variables(self):
return self._filter_enabled(self.sdk.variables)
def get_headers(self):
return self._filter_enabled(self.sdk.headers)
def _get_entry_status(self, entry) -> str:
if entry in self.disabled_entries:
return ApiEntryState.DISABLED
elif entry in self.new_entries:
if isinstance(entry, SdkVersion):
return ApiEntryState.VERSION_PENDING
return ApiEntryState.PENDING
else:
return ApiEntryState.APPROVED
def _format_entry(self, obj):
obj_dict = obj.dictify()
obj_dict.update(
dict(
entry=obj.csv_type,
status=self._get_entry_status(obj).value,
)
)
return obj_dict
def save(self) -> None:
if self._load_version_only:
raise Exception("Only SDK version was loaded, cannot save")
if self.version_action == VersionBump.MINOR:
self.version = SdkVersion(self.version.major, self.version.minor + 1)
elif self.version_action == VersionBump.MAJOR:
self.version = SdkVersion(self.version.major + 1, 0)
if self._have_pending_entries():
self.new_entries.add(self.version)
print(
f"API version is still WIP: {self.version}. Review the changes and re-run command."
)
print(f"Entries to review:")
print(
"\n".join(
map(
str,
filter(
lambda e: not isinstance(e, SdkVersion), self.new_entries
),
)
)
)
else:
print(f"API version {self.version} is up to date")
regenerate_csv = (
self.loaded_dirty_version
or self._have_pending_entries()
or self.version_action != VersionBump.NONE
)
if regenerate_csv:
str_cache_entries = [self.version]
name_getter = operator.attrgetter("name")
str_cache_entries.extend(sorted(self.sdk.headers, key=name_getter))
str_cache_entries.extend(sorted(self.sdk.functions, key=name_getter))
str_cache_entries.extend(sorted(self.sdk.variables, key=name_getter))
with open(self.cache_file_name, "wt", newline="") as f:
writer = csv.DictWriter(f, fieldnames=SdkCache.CSV_FIELD_NAMES)
writer.writeheader()
for entry in str_cache_entries:
writer.writerow(self._format_entry(entry))
def _process_entry(self, entry_dict: dict) -> None:
entry_class = entry_dict["entry"]
entry_status = entry_dict["status"]
entry_name = entry_dict["name"]
entry = None
if entry_class == SdkVersion.csv_type:
self.version = SdkVersion.from_str(entry_name)
if entry_status == ApiEntryState.VERSION_PENDING.value:
self.loaded_dirty_version = True
elif entry_class == ApiHeader.csv_type:
self.sdk.headers.add(entry := ApiHeader(entry_name))
elif entry_class == ApiEntryFunction.csv_type:
self.sdk.functions.add(
entry := ApiEntryFunction(
entry_name,
entry_dict["type"],
entry_dict["params"],
)
)
elif entry_class == ApiEntryVariable.csv_type:
self.sdk.variables.add(
entry := ApiEntryVariable(entry_name, entry_dict["type"])
)
else:
print(entry_dict)
raise Exception("Unknown entry type: %s" % entry_class)
if entry is None:
return
if entry_status == ApiEntryState.DISABLED.value:
self.disabled_entries.add(entry)
elif entry_status == ApiEntryState.PENDING.value:
self.new_entries.add(entry)
def load_cache(self) -> None:
if not os.path.exists(self.cache_file_name):
raise Exception(
f"Cannot load symbol cache '{self.cache_file_name}'! File does not exist"
)
with open(self.cache_file_name, "rt") as f:
reader = csv.DictReader(f)
for row in reader:
self._process_entry(row)
if self._load_version_only and row.get("entry") == SdkVersion.csv_type:
break
def _have_pending_entries(self) -> bool:
return any(
filter(
lambda e: not isinstance(e, SdkVersion),
self.new_entries,
)
)
def sync_sets(
self, known_set: Set[Any], new_set: Set[Any], update_version: bool = True
):
new_entries = new_set - known_set
if new_entries:
print(f"New: {new_entries}")
known_set |= new_entries
self.new_entries |= new_entries
if update_version and self.version_action == VersionBump.NONE:
self.version_action = VersionBump.MINOR
removed_entries = known_set - new_set
if removed_entries:
print(f"Removed: {removed_entries}")
known_set -= removed_entries
# If any of removed entries was a part of active API, that's a major bump
if update_version and any(
filter(
lambda e: e not in self.disabled_entries
and e not in self.new_entries,
removed_entries,
)
):
self.version_action = VersionBump.MAJOR
self.disabled_entries -= removed_entries
self.new_entries -= removed_entries
def validate_api(self, api: ApiEntries) -> None:
self.sync_sets(self.sdk.headers, api.headers, False)
self.sync_sets(self.sdk.functions, api.functions)
self.sync_sets(self.sdk.variables, api.variables)

View File

@@ -1,65 +0,0 @@
import SCons
from SCons.Subst import quote_spaces
from SCons.Errors import StopError
import re
import os
import random
import string
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
def tempfile_arg_esc_func(arg):
arg = quote_spaces(arg)
if SCons.Platform.platform_default() != "win32":
return arg
# GCC requires double Windows slashes, let's use UNIX separator
return WINPATHSEP_RE.sub(r"/\1", arg)
def wrap_tempfile(env, command):
env[command] = '${TEMPFILE("' + env[command] + '","$' + command + 'STR")}'
def link_dir(target_path, source_path, is_windows):
# print(f"link_dir: {target_path} -> {source_path}")
if os.path.lexists(target_path) or os.path.exists(target_path):
os.unlink(target_path)
if is_windows:
# Crete junction
import _winapi
if not os.path.isdir(source_path):
raise StopError(f"Source directory {source_path} is not a directory")
if not os.path.exists(target_path):
_winapi.CreateJunction(source_path, target_path)
else:
os.symlink(source_path, target_path)
def single_quote(arg_list):
return " ".join(f"'{arg}'" if " " in arg else str(arg) for arg in arg_list)
def link_elf_dir_as_latest(env, elf_node):
elf_dir = elf_node.Dir(".")
latest_dir = env.Dir("#build/latest")
print(f"Setting {elf_dir} as latest built dir (./build/latest/)")
return link_dir(latest_dir.abspath, elf_dir.abspath, env["PLATFORM"] == "win32")
def should_gen_cdb_and_link_dir(env, requested_targets):
explicitly_building_updater = False
# Hacky way to check if updater-related targets were requested
for build_target in requested_targets:
if "updater" in str(build_target):
explicitly_building_updater = True
is_updater = not env["IS_BASE_FIRMWARE"]
# If updater is explicitly requested, link to the latest updater
# Otherwise, link to firmware
return (is_updater and explicitly_building_updater) or (
not is_updater and not explicitly_building_updater
)

View File

@@ -1,30 +0,0 @@
import subprocess
import datetime
from functools import cache
@cache
def get_git_commit_unix_timestamp():
return int(subprocess.check_output(["git", "show", "-s", "--format=%ct"]))
@cache
def get_fast_git_version_id():
try:
version = (
subprocess.check_output(
[
"git",
"describe",
"--always",
"--dirty",
"--all",
"--long",
]
)
.strip()
.decode()
)
return (version, datetime.date.today())
except Exception as e:
print("Failed to check for git changes", e)

View File

@@ -0,0 +1,23 @@
from fbt.util import link_dir
def link_elf_dir_as_latest(env, elf_node):
elf_dir = elf_node.Dir(".")
latest_dir = env.Dir("#build/latest")
print(f"Setting {elf_dir} as latest built dir (./build/latest/)")
return link_dir(latest_dir.abspath, elf_dir.abspath, env["PLATFORM"] == "win32")
def should_gen_cdb_and_link_dir(env, requested_targets):
explicitly_building_updater = False
# Hacky way to check if updater-related targets were requested
for build_target in requested_targets:
if "updater" in str(build_target):
explicitly_building_updater = True
is_updater = not env["IS_BASE_FIRMWARE"]
# If updater is explicitly requested, link to the latest updater
# Otherwise, link to firmware
return (is_updater and explicitly_building_updater) or (
not is_updater and not explicitly_building_updater
)

View File

@@ -1,74 +0,0 @@
from SCons.Errors import StopError
class BlackmagicResolver:
BLACKMAGIC_HOSTNAME = "blackmagic.local"
def __init__(self, env):
self.env = env
# On Win:
# 'location': '1-5:x.0', 'name': 'COM4',
# 'location': '1-5:x.2', 'name': 'COM13',
# On Linux:
# 'location': '1-1.2:1.0', 'name': 'ttyACM0',
# 'location': '1-1.2:1.2', 'name': 'ttyACM1',
# On MacOS:
# 'location': '0-1.3', 'name': 'cu.usbmodemblackmagic1',
# 'location': '0-1.3', 'name': 'cu.usbmodemblackmagic3',
def _find_probe(self):
import serial.tools.list_ports as list_ports
ports = list(list_ports.grep("blackmagic"))
if len(ports) == 0:
# Blackmagic probe serial port not found, will be handled later
pass
elif len(ports) > 2:
raise StopError("More than one Blackmagic probe found")
else:
# If you're getting any issues with auto lookup, uncomment this
# print("\n".join([f"{p.device} {vars(p)}" for p in ports]))
return sorted(ports, key=lambda p: f"{p.location}_{p.name}")[0]
# Look up blackmagic probe hostname with dns
def _resolve_hostname(self):
import socket
try:
return socket.gethostbyname(self.BLACKMAGIC_HOSTNAME)
except socket.gaierror:
print("Failed to resolve Blackmagic hostname")
return None
def get_serial(self):
if not (probe := self._find_probe()):
return None
# print(f"Found Blackmagic probe on {probe.device}")
if self.env.subst("$PLATFORM") == "win32":
return f"\\\\.\\{probe.device}"
return probe.device
def get_networked(self):
if not (probe := self._resolve_hostname()):
return None
return f"tcp:{probe}:2345"
def __str__(self):
# print("distenv blackmagic", self.env.subst("$BLACKMAGIC"))
if (blackmagic := self.env.subst("$BLACKMAGIC")) != "auto":
return blackmagic
# print("Looking for Blackmagic...")
if probe := self.get_serial() or self.get_networked():
return probe
raise StopError("Please specify BLACKMAGIC=...")
def generate(env):
env.SetDefault(BLACKMAGIC_ADDR=BlackmagicResolver(env))
def exists(env):
return True

View File

@@ -1,14 +0,0 @@
def exists():
return True
def generate(env):
if ccache := env.WhereIs("ccache"):
env["CCACHE"] = "ccache"
env["CC_NOCACHE"] = env["CC"]
env["CC"] = "$CCACHE $CC_NOCACHE"
# Tricky place: linking is done with CXX
# Using ccache breaks it
env["LINK"] = env["CXX"]
env["CXX_NOCACHE"] = env["CXX"]
env["CXX"] = "$CCACHE $CXX_NOCACHE"

View File

@@ -1,73 +0,0 @@
from SCons.Errors import StopError
from SCons.Tool import asm
from SCons.Tool import gcc
from SCons.Tool import gxx
from SCons.Tool import ar
from SCons.Tool import gnulink
import strip
import gdb
import objdump
from SCons.Action import _subproc
import subprocess
def prefix_commands(env, command_prefix, cmd_list):
for command in cmd_list:
if command in env:
env[command] = command_prefix + env[command]
def _get_tool_version(env, tool):
verstr = "version unknown"
proc = _subproc(
env,
env.subst("${%s} --version" % tool),
stdout=subprocess.PIPE,
stderr="devnull",
stdin="devnull",
universal_newlines=True,
error="raise",
shell=True,
)
if proc:
verstr = proc.stdout.readline()
proc.communicate()
return verstr
def generate(env, **kw):
for orig_tool in (asm, gcc, gxx, ar, gnulink, strip, gdb, objdump):
orig_tool.generate(env)
env.SetDefault(
TOOLCHAIN_PREFIX=kw.get("toolchain_prefix"),
)
prefix_commands(
env,
env.subst("$TOOLCHAIN_PREFIX"),
[
"AR",
"AS",
"CC",
"CXX",
"OBJCOPY",
"RANLIB",
"STRIP",
"GDB",
"GDBPY",
"OBJDUMP",
],
)
# Call CC to check version
if whitelisted_versions := kw.get("versions", ()):
cc_version = _get_tool_version(env, "CC")
# print("CC version =", cc_version)
# print(list(filter(lambda v: v in cc_version, whitelisted_versions)))
if not any(filter(lambda v: v in cc_version, whitelisted_versions)):
raise StopError(
f"Toolchain version is not supported. Allowed: {whitelisted_versions}, toolchain: {cc_version} "
)
def exists(env):
return True

View File

@@ -1,85 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
from SCons.Warnings import warn, WarningOnByDefault
import SCons
import os.path
from fbt.appmanifest import (
FlipperAppType,
AppManager,
ApplicationsCGenerator,
FlipperManifestException,
)
# Adding objects for application management to env
# AppManager env["APPMGR"] - loads all manifests; manages list of known apps
# AppBuildset env["APPBUILD"] - contains subset of apps, filtered for current config
def LoadApplicationManifests(env):
appmgr = env["APPMGR"] = AppManager()
for app_dir, _ in env["APPDIRS"]:
app_dir_node = env.Dir("#").Dir(app_dir)
for entry in app_dir_node.glob("*", ondisk=True, source=True):
if isinstance(entry, SCons.Node.FS.Dir) and not str(entry).startswith("."):
try:
app_manifest_file_path = os.path.join(
entry.abspath, "application.fam"
)
appmgr.load_manifest(app_manifest_file_path, entry)
env.Append(PY_LINT_SOURCES=[app_manifest_file_path])
except FlipperManifestException as e:
warn(WarningOnByDefault, str(e))
def PrepareApplicationsBuild(env):
appbuild = env["APPBUILD"] = env["APPMGR"].filter_apps(env["APPS"])
env.Append(
SDK_HEADERS=appbuild.get_sdk_headers(),
)
env["APPBUILD_DUMP"] = env.Action(
DumpApplicationConfig,
"\tINFO\t",
)
def DumpApplicationConfig(target, source, env):
print(f"Loaded {len(env['APPMGR'].known_apps)} app definitions.")
print("Firmware modules configuration:")
for apptype in FlipperAppType:
app_sublist = env["APPBUILD"].get_apps_of_type(apptype)
if app_sublist:
print(
f"{apptype.value}:\n\t",
", ".join(app.appid for app in app_sublist),
)
def build_apps_c(target, source, env):
target_file_name = target[0].path
gen = ApplicationsCGenerator(env["APPBUILD"], env.subst("$LOADER_AUTOSTART"))
with open(target_file_name, "w") as file:
file.write(gen.generate())
def generate(env):
env.AddMethod(LoadApplicationManifests)
env.AddMethod(PrepareApplicationsBuild)
env.Append(
BUILDERS={
"ApplicationsC": Builder(
action=Action(
build_apps_c,
"${APPSCOMSTR}",
),
suffix=".c",
),
}
)
def exists(env):
return True

View File

@@ -1,175 +0,0 @@
import SCons
from SCons.Builder import Builder
from SCons.Action import Action
from SCons.Node.FS import File
import os
import subprocess
def icons_emitter(target, source, env):
target = [
target[0].File(env.subst("${ICON_FILE_NAME}.c")),
target[0].File(env.subst("${ICON_FILE_NAME}.h")),
]
source = env.GlobRecursive("*.*", env["ICON_SRC_DIR"])
return target, source
def proto_emitter(target, source, env):
target = []
for src in source:
basename = os.path.splitext(src.name)[0]
target.append(env.File(f"compiled/{basename}.pb.c"))
target.append(env.File(f"compiled/{basename}.pb.h"))
return target, source
def dolphin_emitter(target, source, env):
res_root_dir = source[0].Dir(env["DOLPHIN_RES_TYPE"])
source = [res_root_dir]
source.extend(
env.GlobRecursive("*.*", res_root_dir),
)
target_base_dir = target[0]
env.Replace(_DOLPHIN_OUT_DIR=target[0])
if env["DOLPHIN_RES_TYPE"] == "external":
target = []
target.extend(
map(
lambda node: target_base_dir.File(
res_root_dir.rel_path(node).replace(".png", ".bm")
),
filter(lambda node: isinstance(node, SCons.Node.FS.File), source),
)
)
else:
asset_basename = f"assets_dolphin_{env['DOLPHIN_RES_TYPE']}"
target = [
target_base_dir.File(asset_basename + ".c"),
target_base_dir.File(asset_basename + ".h"),
]
return target, source
def _invoke_git(args, source_dir):
cmd = ["git"]
cmd.extend(args)
return (
subprocess.check_output(cmd, cwd=source_dir, stderr=subprocess.STDOUT)
.strip()
.decode()
)
def proto_ver_generator(target, source, env):
target_file = target[0]
src_dir = source[0].dir.abspath
try:
git_fetch = _invoke_git(
["fetch", "--tags"],
source_dir=src_dir,
)
except (subprocess.CalledProcessError, EnvironmentError) as e:
# Not great, not terrible
print("Git: fetch failed")
try:
git_describe = _invoke_git(
["describe", "--tags", "--abbrev=0"],
source_dir=src_dir,
)
except (subprocess.CalledProcessError, EnvironmentError) as e:
print("Git: describe failed")
Exit("git error")
# print("describe=", git_describe)
git_major, git_minor = git_describe.split(".")
version_file_data = (
"#pragma once",
f"#define PROTOBUF_MAJOR_VERSION {git_major}",
f"#define PROTOBUF_MINOR_VERSION {git_minor}",
"",
)
with open(str(target_file), "wt") as file:
file.write("\n".join(version_file_data))
def CompileIcons(env, target_dir, source_dir, *, icon_bundle_name="assets_icons"):
# Gathering icons sources
icons_src = env.GlobRecursive("*.png", source_dir)
icons_src += env.GlobRecursive("frame_rate", source_dir)
icons = env.IconBuilder(
target_dir,
ICON_SRC_DIR=source_dir,
ICON_FILE_NAME=icon_bundle_name,
)
env.Depends(icons, icons_src)
return icons
def generate(env):
env.SetDefault(
ASSETS_COMPILER="${ROOT_DIR.abspath}/scripts/assets.py",
NANOPB_COMPILER="${ROOT_DIR.abspath}/lib/nanopb/generator/nanopb_generator.py",
)
env.AddMethod(CompileIcons)
if not env["VERBOSE"]:
env.SetDefault(
ICONSCOMSTR="\tICONS\t${TARGET}",
PROTOCOMSTR="\tPROTO\t${SOURCE}",
DOLPHINCOMSTR="\tDOLPHIN\t${DOLPHIN_RES_TYPE}",
RESMANIFESTCOMSTR="\tMANIFEST\t${TARGET}",
PBVERCOMSTR="\tPBVER\t${TARGET}",
)
env.Append(
BUILDERS={
"IconBuilder": Builder(
action=Action(
'${PYTHON3} "${ASSETS_COMPILER}" icons ${ICON_SRC_DIR} ${TARGET.dir} --filename ${ICON_FILE_NAME}',
"${ICONSCOMSTR}",
),
emitter=icons_emitter,
),
"ProtoBuilder": Builder(
action=Action(
'${PYTHON3} "${NANOPB_COMPILER}" -q -I${SOURCE.dir.posix} -D${TARGET.dir.posix} ${SOURCES.posix}',
"${PROTOCOMSTR}",
),
emitter=proto_emitter,
suffix=".pb.c",
src_suffix=".proto",
),
"DolphinSymBuilder": Builder(
action=Action(
'${PYTHON3} "${ASSETS_COMPILER}" dolphin -s dolphin_${DOLPHIN_RES_TYPE} "${SOURCE}" "${_DOLPHIN_OUT_DIR}"',
"${DOLPHINCOMSTR}",
),
emitter=dolphin_emitter,
),
"DolphinExtBuilder": Builder(
action=Action(
'${PYTHON3} "${ASSETS_COMPILER}" dolphin "${SOURCE}" "${_DOLPHIN_OUT_DIR}"',
"${DOLPHINCOMSTR}",
),
emitter=dolphin_emitter,
),
"ProtoVerBuilder": Builder(
action=Action(
proto_ver_generator,
"${PBVERCOMSTR}",
),
),
}
)
def exists(env):
return True

View File

@@ -1,156 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
from SCons.Script import Mkdir
from SCons.Defaults import Touch
def GetProjetDirName(env, project=None):
parts = [f"f{env['TARGET_HW']}"]
if project:
parts.append(project)
suffix = ""
if env["DEBUG"]:
suffix += "D"
if env["COMPACT"]:
suffix += "C"
if suffix:
parts.append(suffix)
return "-".join(parts)
def create_fw_build_targets(env, configuration_name):
flavor = GetProjetDirName(env, configuration_name)
build_dir = env.Dir("build").Dir(flavor).abspath
return env.SConscript(
"firmware.scons",
variant_dir=build_dir,
duplicate=0,
exports={
"ENV": env,
"fw_build_meta": {
"type": configuration_name,
"flavor": flavor,
"build_dir": build_dir,
},
},
)
def AddFwProject(env, base_env, fw_type, fw_env_key):
project_env = env[fw_env_key] = create_fw_build_targets(base_env, fw_type)
env.Append(
DIST_PROJECTS=[
project_env["FW_FLAVOR"],
],
DIST_DEPENDS=[
project_env["FW_ARTIFACTS"],
],
)
env.Replace(DIST_DIR=env.GetProjetDirName())
return project_env
def AddOpenOCDFlashTarget(env, targetenv, **kw):
openocd_target = env.OpenOCDFlash(
"#build/oocd-${BUILD_CFG}-flash.flag",
targetenv["FW_BIN"],
OPENOCD_COMMAND=[
"-c",
"program ${SOURCE.posix} reset exit ${BASE_ADDRESS}",
],
BUILD_CFG=targetenv.subst("$FIRMWARE_BUILD_CFG"),
BASE_ADDRESS=targetenv.subst("$IMAGE_BASE_ADDRESS"),
**kw,
)
env.Alias(targetenv.subst("${FIRMWARE_BUILD_CFG}_flash"), openocd_target)
if env["FORCE"]:
env.AlwaysBuild(openocd_target)
return openocd_target
def AddJFlashTarget(env, targetenv, **kw):
jflash_target = env.JFlash(
"#build/jflash-${BUILD_CFG}-flash.flag",
targetenv["FW_BIN"],
JFLASHADDR=targetenv.subst("$IMAGE_BASE_ADDRESS"),
BUILD_CFG=targetenv.subst("${FIRMWARE_BUILD_CFG}"),
**kw,
)
env.Alias(targetenv.subst("${FIRMWARE_BUILD_CFG}_jflash"), jflash_target)
if env["FORCE"]:
env.AlwaysBuild(jflash_target)
return jflash_target
def AddUsbFlashTarget(env, file_flag, extra_deps, **kw):
usb_update = env.UsbInstall(
file_flag,
(
env["DIST_DEPENDS"],
*extra_deps,
),
)
if env["FORCE"]:
env.AlwaysBuild(usb_update)
return usb_update
def DistCommand(env, name, source, **kw):
target = f"dist_{name}"
command = env.Command(
target,
source,
'@${PYTHON3} "${ROOT_DIR.abspath}/scripts/sconsdist.py" copy -p ${DIST_PROJECTS} -s "${DIST_SUFFIX}" ${DIST_EXTRA}',
**kw,
)
env.Pseudo(target)
env.Alias(name, command)
return command
def generate(env):
env.AddMethod(AddFwProject)
env.AddMethod(DistCommand)
env.AddMethod(AddOpenOCDFlashTarget)
env.AddMethod(GetProjetDirName)
env.AddMethod(AddJFlashTarget)
env.AddMethod(AddUsbFlashTarget)
env.SetDefault(
COPRO_MCU_FAMILY="STM32WB5x",
)
env.Append(
BUILDERS={
"UsbInstall": Builder(
action=[
Action(
'${PYTHON3} "${ROOT_DIR.abspath}/scripts/selfupdate.py" dist/${DIST_DIR}/f${TARGET_HW}-update-${DIST_SUFFIX}/update.fuf'
),
Touch("${TARGET}"),
]
),
"CoproBuilder": Builder(
action=Action(
[
Mkdir("$TARGET"),
'${PYTHON3} "${ROOT_DIR.abspath}/scripts/assets.py" '
"copro ${COPRO_CUBE_DIR} "
"${TARGET} ${COPRO_MCU_FAMILY} "
"--cube_ver=${COPRO_CUBE_VERSION} "
"--stack_type=${COPRO_STACK_TYPE} "
'--stack_file="${COPRO_STACK_BIN}" '
"--stack_addr=${COPRO_STACK_ADDR} ",
],
"",
)
),
}
)
def exists(env):
return True

View File

@@ -1,261 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
from SCons.Errors import UserError
import SCons.Warnings
import os
import pathlib
from fbt.elfmanifest import assemble_manifest_data
from fbt.appmanifest import FlipperManifestException
from fbt.sdk import SdkCache
import itertools
from site_scons.fbt.appmanifest import FlipperApplication
def BuildAppElf(env, app):
ext_apps_work_dir = env.subst("$EXT_APPS_WORK_DIR")
app_work_dir = os.path.join(ext_apps_work_dir, app.appid)
env.VariantDir(app_work_dir, app._appdir, duplicate=False)
app_env = env.Clone(FAP_SRC_DIR=app._appdir, FAP_WORK_DIR=app_work_dir)
app_alias = f"fap_{app.appid}"
# Deprecation stub
legacy_app_taget_name = f"{app_env['FIRMWARE_BUILD_CFG']}_{app.appid}"
def legacy_app_build_stub(**kw):
raise UserError(
f"Target name '{legacy_app_taget_name}' is deprecated, use '{app_alias}' instead"
)
app_env.PhonyTarget(legacy_app_taget_name, Action(legacy_app_build_stub, None))
externally_built_files = []
if app.fap_extbuild:
for external_file_def in app.fap_extbuild:
externally_built_files.append(external_file_def.path)
app_env.Alias(app_alias, external_file_def.path)
app_env.AlwaysBuild(
app_env.Command(
external_file_def.path,
None,
Action(
external_file_def.command,
"" if app_env["VERBOSE"] else "\tEXTCMD\t${TARGET}",
),
)
)
if app.fap_icon_assets:
app_env.CompileIcons(
app_env.Dir(app_work_dir),
app._appdir.Dir(app.fap_icon_assets),
icon_bundle_name=f"{app.appid}_icons",
)
private_libs = []
for lib_def in app.fap_private_libs:
lib_src_root_path = os.path.join(app_work_dir, "lib", lib_def.name)
app_env.AppendUnique(
CPPPATH=list(
app_env.Dir(lib_src_root_path).Dir(incpath).srcnode()
for incpath in lib_def.fap_include_paths
),
)
lib_sources = list(
itertools.chain.from_iterable(
app_env.GlobRecursive(source_type, lib_src_root_path)
for source_type in lib_def.sources
)
)
if len(lib_sources) == 0:
raise UserError(f"No sources gathered for private library {lib_def}")
private_lib_env = app_env.Clone()
private_lib_env.AppendUnique(
CCFLAGS=[
*lib_def.cflags,
],
CPPDEFINES=lib_def.cdefines,
CPPPATH=list(
os.path.join(app._appdir.path, cinclude)
for cinclude in lib_def.cincludes
),
)
lib = private_lib_env.StaticLibrary(
os.path.join(app_work_dir, lib_def.name),
lib_sources,
)
private_libs.append(lib)
app_sources = list(
itertools.chain.from_iterable(
app_env.GlobRecursive(
source_type,
app_work_dir,
exclude="lib",
)
for source_type in app.sources
)
)
app_env.Append(
LIBS=[*app.fap_libs, *private_libs],
CPPPATH=env.Dir(app_work_dir),
)
app_elf_raw = app_env.Program(
os.path.join(app_work_dir, f"{app.appid}_d"),
app_sources,
APP_ENTRY=app.entry_point,
)
app_env.Clean(app_elf_raw, [*externally_built_files, app_env.Dir(app_work_dir)])
app_elf_dump = app_env.ObjDump(app_elf_raw)
app_env.Alias(f"{app_alias}_list", app_elf_dump)
app_elf_augmented = app_env.EmbedAppMetadata(
os.path.join(ext_apps_work_dir, app.appid),
app_elf_raw,
APP=app,
)
manifest_vals = {
k: v
for k, v in vars(app).items()
if not k.startswith(FlipperApplication.PRIVATE_FIELD_PREFIX)
}
app_env.Depends(
app_elf_augmented,
[app_env["SDK_DEFINITION"], app_env.Value(manifest_vals)],
)
if app.fap_icon:
app_env.Depends(
app_elf_augmented,
app_env.File(f"{app._apppath}/{app.fap_icon}"),
)
app_elf_import_validator = app_env.ValidateAppImports(app_elf_augmented)
app_env.AlwaysBuild(app_elf_import_validator)
app_env.Alias(app_alias, app_elf_import_validator)
return (app_elf_augmented, app_elf_raw, app_elf_import_validator)
def prepare_app_metadata(target, source, env):
sdk_cache = SdkCache(env.subst("$SDK_DEFINITION"), load_version_only=True)
if not sdk_cache.is_buildable():
raise UserError(
"SDK version is not finalized, please review changes and re-run operation"
)
app = env["APP"]
meta_file_name = source[0].path + ".meta"
with open(meta_file_name, "wb") as f:
# f.write(f"hello this is {app}")
f.write(
assemble_manifest_data(
app_manifest=app,
hardware_target=int(env.subst("$TARGET_HW")),
sdk_version=sdk_cache.version.as_int(),
)
)
def validate_app_imports(target, source, env):
sdk_cache = SdkCache(env.subst("$SDK_DEFINITION"), load_version_only=False)
app_syms = set()
with open(target[0].path, "rt") as f:
for line in f:
app_syms.add(line.split()[0])
unresolved_syms = app_syms - sdk_cache.get_valid_names()
if unresolved_syms:
SCons.Warnings.warn(
SCons.Warnings.LinkWarning,
f"{source[0].path}: app won't run. Unresolved symbols: {unresolved_syms}",
)
def GetExtAppFromPath(env, app_dir):
if not app_dir:
raise UserError("APPSRC= not set")
appmgr = env["APPMGR"]
app = None
try:
# Maybe used passed an appid?
app = appmgr.get(app_dir)
except FlipperManifestException as _:
# Look up path components in known app dits
for dir_part in reversed(pathlib.Path(app_dir).parts):
if app := appmgr.find_by_appdir(dir_part):
break
if not app:
raise UserError(f"Failed to resolve application for given APPSRC={app_dir}")
app_elf = env["_extapps"]["compact"].get(app.appid, None)
if not app_elf:
raise UserError(
f"Application {app.appid} is not configured for building as external"
)
app_validator = env["_extapps"]["validators"].get(app.appid, None)
return (app, app_elf[0], app_validator[0])
def generate(env, **kw):
env.SetDefault(EXT_APPS_WORK_DIR=kw.get("EXT_APPS_WORK_DIR"))
# env.VariantDir(env.subst("$EXT_APPS_WORK_DIR"), env.Dir("#"), duplicate=False)
env.AddMethod(BuildAppElf)
env.AddMethod(GetExtAppFromPath)
env.Append(
BUILDERS={
"EmbedAppMetadata": Builder(
action=[
Action(prepare_app_metadata, "$APPMETA_COMSTR"),
Action(
"${OBJCOPY} "
"--remove-section .ARM.attributes "
"--add-section .fapmeta=${SOURCE}.meta "
"--set-section-flags .fapmeta=contents,noload,readonly,data "
"--strip-debug --strip-unneeded "
"--add-gnu-debuglink=${SOURCE} "
"${SOURCES} ${TARGET}",
"$APPMETAEMBED_COMSTR",
),
],
suffix=".fap",
src_suffix=".elf",
),
"ValidateAppImports": Builder(
action=[
Action(
"@${NM} -P -u ${SOURCE} > ${TARGET}",
None, # "$APPDUMP_COMSTR",
),
Action(
validate_app_imports,
"$APPCHECK_COMSTR",
),
],
suffix=".impsyms",
src_suffix=".fap",
),
}
)
def exists(env):
return True

View File

@@ -1,209 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
from SCons.Errors import UserError
# from SCons.Scanner import C
from SCons.Script import Mkdir, Copy, Delete, Entry
from SCons.Util import LogicalLines
import os.path
import posixpath
import pathlib
from fbt.sdk import SdkCollector, SdkCache
def prebuild_sdk_emitter(target, source, env):
target.append(env.ChangeFileExtension(target[0], ".d"))
target.append(env.ChangeFileExtension(target[0], ".i.c"))
return target, source
def prebuild_sdk_create_origin_file(target, source, env):
mega_file = env.subst("${TARGET}.c", target=target[0])
with open(mega_file, "wt") as sdk_c:
sdk_c.write("\n".join(f"#include <{h.path}>" for h in env["SDK_HEADERS"]))
class SdkTreeBuilder:
def __init__(self, env, target, source) -> None:
self.env = env
self.target = target
self.source = source
self.header_depends = []
self.header_dirs = []
self.target_sdk_dir = env.subst("f${TARGET_HW}_sdk")
self.sdk_deploy_dir = target[0].Dir(self.target_sdk_dir)
def _parse_sdk_depends(self):
deps_file = self.source[0]
with open(deps_file.path, "rt") as deps_f:
lines = LogicalLines(deps_f).readlines()
_, depends = lines[0].split(":", 1)
self.header_depends = list(
filter(lambda fname: fname.endswith(".h"), depends.split()),
)
self.header_dirs = sorted(
set(map(os.path.normpath, map(os.path.dirname, self.header_depends)))
)
def _generate_sdk_meta(self):
filtered_paths = [self.target_sdk_dir]
full_fw_paths = list(
map(
os.path.normpath,
(self.env.Dir(inc_dir).relpath for inc_dir in self.env["CPPPATH"]),
)
)
sdk_dirs = ", ".join(f"'{dir}'" for dir in self.header_dirs)
for dir in full_fw_paths:
if dir in sdk_dirs:
filtered_paths.append(
posixpath.normpath(posixpath.join(self.target_sdk_dir, dir))
)
sdk_env = self.env.Clone()
sdk_env.Replace(CPPPATH=filtered_paths)
with open(self.target[0].path, "wt") as f:
cmdline_options = sdk_env.subst(
"$CCFLAGS $_CCCOMCOM", target=Entry("dummy")
)
f.write(cmdline_options.replace("\\", "/"))
f.write("\n")
def _create_deploy_commands(self):
dirs_to_create = set(
self.sdk_deploy_dir.Dir(dirpath) for dirpath in self.header_dirs
)
actions = [
Delete(self.sdk_deploy_dir),
Mkdir(self.sdk_deploy_dir),
]
actions += [Mkdir(d) for d in dirs_to_create]
actions += [
Copy(
self.sdk_deploy_dir.File(h).path,
h,
)
for h in self.header_depends
]
return actions
def generate_actions(self):
self._parse_sdk_depends()
self._generate_sdk_meta()
return self._create_deploy_commands()
def deploy_sdk_tree(target, source, env, for_signature):
if for_signature:
return []
sdk_tree = SdkTreeBuilder(env, target, source)
return sdk_tree.generate_actions()
def gen_sdk_data(sdk_cache: SdkCache):
api_def = []
api_def.extend(
(f"#include <{h.name}>" for h in sdk_cache.get_headers()),
)
api_def.append(f"const int elf_api_version = {sdk_cache.version.as_int()};")
api_def.append(
"static constexpr auto elf_api_table = sort(create_array_t<sym_entry>("
)
api_lines = []
for fun_def in sdk_cache.get_functions():
api_lines.append(
f"API_METHOD({fun_def.name}, {fun_def.returns}, ({fun_def.params}))"
)
for var_def in sdk_cache.get_variables():
api_lines.append(f"API_VARIABLE({var_def.name}, {var_def.var_type })")
api_def.append(",\n".join(api_lines))
api_def.append("));")
return api_def
def _check_sdk_is_up2date(sdk_cache: SdkCache):
if not sdk_cache.is_buildable():
raise UserError(
"SDK version is not finalized, please review changes and re-run operation"
)
def validate_sdk_cache(source, target, env):
# print(f"Generating SDK for {source[0]} to {target[0]}")
current_sdk = SdkCollector()
current_sdk.process_source_file_for_sdk(source[0].path)
for h in env["SDK_HEADERS"]:
current_sdk.add_header_to_sdk(pathlib.Path(h.path).as_posix())
sdk_cache = SdkCache(target[0].path)
sdk_cache.validate_api(current_sdk.get_api())
sdk_cache.save()
_check_sdk_is_up2date(sdk_cache)
def generate_sdk_symbols(source, target, env):
sdk_cache = SdkCache(source[0].path)
_check_sdk_is_up2date(sdk_cache)
api_def = gen_sdk_data(sdk_cache)
with open(target[0].path, "wt") as f:
f.write("\n".join(api_def))
def generate(env, **kw):
env.Append(
BUILDERS={
"SDKPrebuilder": Builder(
emitter=prebuild_sdk_emitter,
action=[
Action(
prebuild_sdk_create_origin_file,
"$SDK_PREGEN_COMSTR",
),
Action(
"$CC -o $TARGET -E -P $CCFLAGS $_CCCOMCOM $SDK_PP_FLAGS -MMD ${TARGET}.c",
"$SDK_COMSTR",
),
],
suffix=".i",
),
"SDKTree": Builder(
generator=deploy_sdk_tree,
src_suffix=".d",
),
"SDKSymUpdater": Builder(
action=Action(
validate_sdk_cache,
"$SDKSYM_UPDATER_COMSTR",
),
suffix=".csv",
src_suffix=".i",
),
"SDKSymGenerator": Builder(
action=Action(
generate_sdk_symbols,
"$SDKSYM_GENERATOR_COMSTR",
),
suffix=".h",
src_suffix=".csv",
),
}
)
def exists(env):
return True

View File

@@ -1,29 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
def version_emitter(target, source, env):
target_dir = target[0]
target = [
target_dir.File("version.inc.h"),
target_dir.File("version.json"),
]
return target, source
def generate(env):
env.Append(
BUILDERS={
"VersionBuilder": Builder(
action=Action(
'${PYTHON3} "${ROOT_DIR.abspath}/scripts/version.py" generate -t ${TARGET_HW} -o ${TARGET.dir.posix} --dir "${ROOT_DIR}"',
"${VERSIONCOMSTR}",
),
emitter=version_emitter,
),
}
)
def exists(env):
return True

View File

@@ -1,54 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
import SCons
__OBJCOPY_ARM_BIN = "arm-none-eabi-objcopy"
__NM_ARM_BIN = "arm-none-eabi-nm"
def generate(env):
env.SetDefault(
BIN2DFU="${ROOT_DIR.abspath}/scripts/bin2dfu.py",
OBJCOPY=__OBJCOPY_ARM_BIN, # FIXME
NM=__NM_ARM_BIN, # FIXME
)
env.Append(
BUILDERS={
"HEXBuilder": Builder(
action=Action(
'${OBJCOPY} -O ihex "${SOURCE}" "${TARGET}"',
"${HEXCOMSTR}",
),
suffix=".hex",
src_suffix=".elf",
),
"BINBuilder": Builder(
action=Action(
'${OBJCOPY} -O binary -S "${SOURCE}" "${TARGET}"',
"${BINCOMSTR}",
),
suffix=".bin",
src_suffix=".elf",
),
"DFUBuilder": Builder(
action=Action(
'${PYTHON3} "${BIN2DFU}" -i "${SOURCE}" -o "${TARGET}" -a ${IMAGE_BASE_ADDRESS} -l "Flipper Zero F${TARGET_HW}"',
"${DFUCOMSTR}",
),
suffix=".dfu",
src_suffix=".bin",
),
}
)
def exists(env):
try:
return env["OBJCOPY"]
except KeyError:
pass
if objcopy := env.WhereIs(__OBJCOPY_ARM_BIN):
return objcopy
raise SCons.Errors.StopError("Could not detect objcopy for arm")

View File

@@ -1,17 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
def generate(env):
env.SetDefault(
GDB="gdb",
GDBPY="gdb-py",
GDBOPTS="",
GDBPYOPTS="",
GDBCOM="$GDB $GDBOPTS $SOURCES", # no $TARGET
GDBPYCOM="$GDBPY $GDBOPTS $GDBPYOPTS $SOURCES", # no $TARGET
)
def exists(env):
return True

View File

@@ -1,27 +0,0 @@
from SCons.Builder import Builder
from SCons.Defaults import Touch
def generate(env):
env.SetDefault(
JFLASH="JFlash" if env.subst("$PLATFORM") == "win32" else "JFlashExe",
JFLASHFLAGS=[
"-auto",
"-exit",
],
JFLASHCOM="${JFLASH} -openprj${JFLASHPROJECT} -open${SOURCE},${JFLASHADDR} ${JFLASHFLAGS}",
)
env.Append(
BUILDERS={
"JFlash": Builder(
action=[
"${JFLASHCOM}",
Touch("${TARGET}"),
],
),
}
)
def exists(env):
return True

View File

@@ -1,26 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
def generate(env):
env.SetDefault(
OBJDUMP="objdump",
OBJDUMPFLAGS=[],
OBJDUMPCOM="$OBJDUMP $OBJDUMPFLAGS -S $SOURCES > $TARGET",
)
env.Append(
BUILDERS={
"ObjDump": Builder(
action=Action(
"${OBJDUMPCOM}",
"${OBJDUMPCOMSTR}",
),
suffix=".lst",
src_suffix=".elf",
),
}
)
def exists(env):
return True

View File

@@ -1,46 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
from SCons.Defaults import Touch
import SCons
__OPENOCD_BIN = "openocd"
_oocd_action = Action(
"${OPENOCD} ${OPENOCD_OPTS} ${OPENOCD_COMMAND}",
"${OPENOCDCOMSTR}",
)
def generate(env):
env.SetDefault(
OPENOCD=__OPENOCD_BIN,
OPENOCD_OPTS="",
OPENOCD_COMMAND="",
OPENOCDCOM="${OPENOCD} ${OPENOCD_OPTS} ${OPENOCD_COMMAND}",
OPENOCDCOMSTR="",
)
env.Append(
BUILDERS={
"OpenOCDFlash": Builder(
action=[
_oocd_action,
Touch("${TARGET}"),
],
suffix=".flash",
src_suffix=".bin",
),
}
)
def exists(env):
try:
return env["OPENOCD"]
except KeyError:
pass
if openocd := env.WhereIs(__OPENOCD_BIN):
return openocd
raise SCons.Errors.StopError("Could not detect openocd")

View File

@@ -1,13 +0,0 @@
def generate(env):
py_name = "python3"
if env["PLATFORM"] == "win32":
# On Windows, Python 3 executable is usually just "python"
py_name = "python"
env.SetDefault(
PYTHON3=py_name,
)
def exists(env):
return True

View File

@@ -1,55 +0,0 @@
import posixpath
import os
from SCons.Errors import UserError
def BuildModule(env, module):
src_dir = str(env.Dir(".").srcdir or os.getcwd())
module_sconscript = posixpath.join(src_dir, module, "SConscript")
if not os.path.exists(module_sconscript):
module_sconscript = posixpath.join(src_dir, f"{module}.scons")
if not os.path.exists(module_sconscript):
raise UserError(f"Cannot build module {module}: scons file not found")
env.Append(PY_LINT_SOURCES=[module_sconscript])
return env.SConscript(
module_sconscript,
variant_dir=posixpath.join(env.subst("$BUILD_DIR"), module),
duplicate=0,
)
def BuildModules(env, modules):
result = []
for module in modules:
build_res = env.BuildModule(module)
# print("module ", module, build_res)
if build_res is None:
continue
result.append(build_res)
return result
def PhonyTarget(env, name, action, source=None, **kw):
if not source:
source = []
phony_name = "phony_" + name
env.Pseudo(phony_name)
command = env.Command(phony_name, source, action, **kw)
env.AlwaysBuild(env.Alias(name, command))
return command
def ChangeFileExtension(env, fnode, ext):
return env.File(f"#{os.path.splitext(fnode.path)[0]}{ext}")
def generate(env):
env.AddMethod(BuildModule)
env.AddMethod(BuildModules)
env.AddMethod(PhonyTarget)
env.AddMethod(ChangeFileExtension)
def exists(env):
return True

View File

@@ -1,25 +0,0 @@
import SCons
def GlobRecursive(env, pattern, node=".", exclude=None):
results = []
if isinstance(node, str):
node = env.Dir(node)
for f in node.glob("*", source=True, exclude=exclude):
if isinstance(f, SCons.Node.FS.Dir):
results += env.GlobRecursive(pattern, f, exclude)
results += node.glob(
pattern,
source=True,
exclude=exclude,
)
# print(f"Glob for {pattern} from {node}: {results}")
return results
def generate(env):
env.AddMethod(GlobRecursive)
def exists(env):
return True

View File

@@ -1,26 +0,0 @@
from SCons.Builder import Builder
from SCons.Action import Action
def generate(env):
env.SetDefault(
STRIP="strip",
STRIPFLAGS=[],
STRIPCOM="$STRIP $STRIPFLAGS $SOURCES -o $TARGET",
)
env.Append(
BUILDERS={
"ELFStripper": Builder(
action=Action(
"${STRIPCOM}",
"${STRIPCOMSTR}",
),
suffix=".elf",
src_suffix=".elf",
),
}
)
def exists(env):
return True