[FL-2627] Flipper applications: SDK, build and debug system (#1387)
* Added support for running applications from SD card (FAPs - Flipper Application Packages) * Added plugin_dist target for fbt to build FAPs * All apps of type FlipperAppType.EXTERNAL and FlipperAppType.PLUGIN are built as FAPs by default * Updated VSCode configuration for new fbt features - re-deploy stock configuration to use them * Added debugging support for FAPs with fbt debug & VSCode * Added public firmware API with automated versioning Co-authored-by: hedger <hedger@users.noreply.github.com> Co-authored-by: SG <who.just.the.doctor@gmail.com> Co-authored-by: あく <alleteam@gmail.com>
This commit is contained in:
@@ -11,6 +11,7 @@ ENV.AppendUnique(
|
||||
"-fno-use-cxa-atexit",
|
||||
"-fno-exceptions",
|
||||
"-fno-threadsafe-statics",
|
||||
"-ftemplate-depth=4096",
|
||||
],
|
||||
CCFLAGS=[
|
||||
"-mcpu=cortex-m4",
|
||||
|
@@ -68,6 +68,11 @@ vars.AddVariables(
|
||||
"7",
|
||||
],
|
||||
),
|
||||
BoolVariable(
|
||||
"DEBUG_TOOLS",
|
||||
help="Enable debug tools to be built",
|
||||
default=False,
|
||||
),
|
||||
)
|
||||
|
||||
vars.Add(
|
||||
@@ -186,21 +191,17 @@ vars.Add(
|
||||
help="Map of (configuration_name->application_list)",
|
||||
default={
|
||||
"default": (
|
||||
"crypto_start",
|
||||
# Svc
|
||||
"basic_services",
|
||||
# Apps
|
||||
"basic_apps",
|
||||
"updater_app",
|
||||
"archive",
|
||||
"main_apps",
|
||||
"system_apps",
|
||||
# Settings
|
||||
"passport",
|
||||
"system_settings",
|
||||
"about",
|
||||
"settings_apps",
|
||||
# Plugins
|
||||
"basic_plugins",
|
||||
# "basic_plugins",
|
||||
# Debug
|
||||
"debug_apps",
|
||||
# "debug_apps",
|
||||
)
|
||||
},
|
||||
)
|
||||
@@ -211,4 +212,26 @@ vars.Add(
|
||||
default="default",
|
||||
)
|
||||
|
||||
vars.Add(
|
||||
"APPSRC",
|
||||
help="Application source directory for app to build & upload",
|
||||
default="",
|
||||
)
|
||||
|
||||
# List of tuples (directory, add_to_global_include_path)
|
||||
vars.Add(
|
||||
"APPDIRS",
|
||||
help="Directories to search for firmware components & external apps",
|
||||
default=[
|
||||
("applications", False),
|
||||
("applications/services", True),
|
||||
("applications/main", True),
|
||||
("applications/settings", False),
|
||||
("applications/system", False),
|
||||
("applications/debug", False),
|
||||
("applications/plugins", False),
|
||||
("applications_user", False),
|
||||
],
|
||||
)
|
||||
|
||||
Return("vars")
|
||||
|
94
site_scons/extapps.scons
Normal file
94
site_scons/extapps.scons
Normal file
@@ -0,0 +1,94 @@
|
||||
Import("ENV")
|
||||
|
||||
|
||||
from fbt.appmanifest import FlipperAppType
|
||||
|
||||
appenv = ENV.Clone(
|
||||
tools=[("fbt_extapps", {"EXT_APPS_WORK_DIR": ENV.subst("${BUILD_DIR}/.extapps")})]
|
||||
)
|
||||
|
||||
appenv.Replace(
|
||||
LINKER_SCRIPT="application-ext",
|
||||
)
|
||||
|
||||
appenv.AppendUnique(
|
||||
CCFLAGS=[
|
||||
"-ggdb3",
|
||||
"-mword-relocations",
|
||||
"-mlong-calls",
|
||||
"-fno-common",
|
||||
"-nostdlib",
|
||||
"-fvisibility=hidden",
|
||||
],
|
||||
LINKFLAGS=[
|
||||
"-Ur",
|
||||
"-Wl,-Ur",
|
||||
# "-Wl,--orphan-handling=error",
|
||||
"-Bsymbolic",
|
||||
"-nostartfiles",
|
||||
"-mlong-calls",
|
||||
"-fno-common",
|
||||
"-nostdlib",
|
||||
"-Wl,--gc-sections",
|
||||
"-Wl,--no-export-dynamic",
|
||||
"-fvisibility=hidden",
|
||||
"-Wl,-e${APP_ENTRY}",
|
||||
"-Xlinker",
|
||||
"-Map=${TARGET}.map",
|
||||
"-specs=nano.specs",
|
||||
"-specs=nosys.specs",
|
||||
],
|
||||
LIBS=[
|
||||
"m",
|
||||
"gcc",
|
||||
"stdc++",
|
||||
"supc++",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
extapps = appenv["_extapps"] = {
|
||||
"compact": {},
|
||||
"debug": {},
|
||||
"validators": {},
|
||||
"dist": {},
|
||||
}
|
||||
|
||||
|
||||
def build_app_as_external(env, appdef):
|
||||
compact_elf, debug_elf, validator = env.BuildAppElf(appdef)
|
||||
extapps["compact"][appdef.appid] = compact_elf
|
||||
extapps["debug"][appdef.appid] = debug_elf
|
||||
extapps["validators"][appdef.appid] = validator
|
||||
extapps["dist"][appdef.appid] = (appdef.fap_category, compact_elf)
|
||||
|
||||
|
||||
apps_to_build_as_faps = [FlipperAppType.PLUGIN, FlipperAppType.EXTERNAL]
|
||||
if appenv["DEBUG_TOOLS"]:
|
||||
apps_to_build_as_faps.append(FlipperAppType.DEBUG)
|
||||
|
||||
for apptype in apps_to_build_as_faps:
|
||||
for app in appenv["APPBUILD"].get_apps_of_type(apptype, True):
|
||||
build_app_as_external(appenv, app)
|
||||
|
||||
# Ugly access to global option
|
||||
if extra_app_list := GetOption("extra_ext_apps"):
|
||||
for extra_app in extra_app_list.split(","):
|
||||
build_app_as_external(appenv, appenv["APPMGR"].get(extra_app))
|
||||
|
||||
|
||||
if appenv["FORCE"]:
|
||||
appenv.AlwaysBuild(extapps["compact"].values())
|
||||
|
||||
Alias(appenv["FIRMWARE_BUILD_CFG"] + "_extapps", extapps["compact"].values())
|
||||
|
||||
if appsrc := appenv.subst("$APPSRC"):
|
||||
app_manifest, fap_file = appenv.GetExtAppFromPath(appsrc)
|
||||
appenv.PhonyTarget(
|
||||
"launch_app",
|
||||
'${PYTHON3} scripts/runfap.py ${SOURCE} --fap_dst_dir "/ext/apps/${FAP_CATEGORY}"',
|
||||
source=fap_file,
|
||||
FAP_CATEGORY=app_manifest.fap_category,
|
||||
)
|
||||
|
||||
Return("extapps")
|
@@ -1,5 +1,5 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Tuple
|
||||
from enum import Enum
|
||||
import os
|
||||
|
||||
@@ -25,7 +25,7 @@ class FlipperAppType(Enum):
|
||||
class FlipperApplication:
|
||||
appid: str
|
||||
apptype: FlipperAppType
|
||||
name: Optional[str] = None
|
||||
name: Optional[str] = ""
|
||||
entry_point: Optional[str] = None
|
||||
flags: List[str] = field(default_factory=lambda: ["Default"])
|
||||
cdefines: List[str] = field(default_factory=list)
|
||||
@@ -35,7 +35,14 @@ class FlipperApplication:
|
||||
stack_size: int = 2048
|
||||
icon: Optional[str] = None
|
||||
order: int = 0
|
||||
_appdir: Optional[str] = None
|
||||
sdk_headers: List[str] = field(default_factory=list)
|
||||
version: Tuple[int] = field(default_factory=lambda: (0, 0))
|
||||
sources: List[str] = field(default_factory=lambda: ["*.c*"])
|
||||
fap_icon: Optional[str] = None
|
||||
fap_libs: List[str] = field(default_factory=list)
|
||||
fap_category: str = ""
|
||||
_appdir: Optional[object] = None
|
||||
_apppath: Optional[str] = None
|
||||
|
||||
|
||||
class AppManager:
|
||||
@@ -50,7 +57,13 @@ class AppManager:
|
||||
f"Missing application manifest for '{appname}'"
|
||||
)
|
||||
|
||||
def load_manifest(self, app_manifest_path: str, app_dir_name: str):
|
||||
def find_by_appdir(self, appdir: str):
|
||||
for app in self.known_apps.values():
|
||||
if app._appdir.name == appdir:
|
||||
return app
|
||||
return None
|
||||
|
||||
def load_manifest(self, app_manifest_path: str, app_dir_node: object):
|
||||
if not os.path.exists(app_manifest_path):
|
||||
raise FlipperManifestException(
|
||||
f"App manifest not found at path {app_manifest_path}"
|
||||
@@ -61,7 +74,14 @@ class AppManager:
|
||||
|
||||
def App(*args, **kw):
|
||||
nonlocal app_manifests
|
||||
app_manifests.append(FlipperApplication(*args, **kw, _appdir=app_dir_name))
|
||||
app_manifests.append(
|
||||
FlipperApplication(
|
||||
*args,
|
||||
**kw,
|
||||
_appdir=app_dir_node,
|
||||
_apppath=os.path.dirname(app_manifest_path),
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
with open(app_manifest_path, "rt") as manifest_file:
|
||||
@@ -172,19 +192,32 @@ class AppBuildset:
|
||||
cdefs.update(app.cdefines)
|
||||
return sorted(list(cdefs))
|
||||
|
||||
def get_apps_of_type(self, apptype: FlipperAppType):
|
||||
def get_sdk_headers(self):
|
||||
sdk_headers = []
|
||||
for app in self.apps:
|
||||
sdk_headers.extend([app._appdir.File(header) for header in app.sdk_headers])
|
||||
return sdk_headers
|
||||
|
||||
def get_apps_of_type(self, apptype: FlipperAppType, all_known: bool = False):
|
||||
return sorted(
|
||||
filter(lambda app: app.apptype == apptype, self.apps),
|
||||
filter(
|
||||
lambda app: app.apptype == apptype,
|
||||
self.appmgr.known_apps.values() if all_known else self.apps,
|
||||
),
|
||||
key=lambda app: app.order,
|
||||
)
|
||||
|
||||
def get_builtin_apps(self):
|
||||
return list(
|
||||
filter(lambda app: app.apptype in self.BUILTIN_APP_TYPES, self.apps)
|
||||
)
|
||||
|
||||
def get_builtin_app_folders(self):
|
||||
return sorted(
|
||||
set(
|
||||
app._appdir
|
||||
for app in filter(
|
||||
lambda app: app.apptype in self.BUILTIN_APP_TYPES, self.apps
|
||||
)
|
||||
(app._appdir, source_type)
|
||||
for app in self.get_builtin_apps()
|
||||
for source_type in app.sources
|
||||
)
|
||||
)
|
||||
|
||||
|
85
site_scons/fbt/elfmanifest.py
Normal file
85
site_scons/fbt/elfmanifest.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from dataclasses import dataclass
|
||||
import os
|
||||
|
||||
import struct
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .appmanifest import FlipperApplication
|
||||
|
||||
|
||||
_MANIFEST_MAGIC = 0x52474448
|
||||
|
||||
|
||||
@dataclass
|
||||
class ElfManifestBaseHeader:
|
||||
manifest_version: int
|
||||
api_version: int
|
||||
hardware_target_id: int
|
||||
|
||||
manifest_magic: int = 0x52474448
|
||||
|
||||
def as_bytes(self):
|
||||
return struct.pack(
|
||||
"<IIIh",
|
||||
self.manifest_magic,
|
||||
self.manifest_version,
|
||||
self.api_version,
|
||||
self.hardware_target_id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ElfManifestV1:
|
||||
stack_size: int
|
||||
app_version: int
|
||||
name: str = ""
|
||||
icon: bytes = field(default=b"")
|
||||
|
||||
def as_bytes(self):
|
||||
return struct.pack(
|
||||
"<hI32s?32s",
|
||||
self.stack_size,
|
||||
self.app_version,
|
||||
bytes(self.name.encode("ascii")),
|
||||
bool(self.icon),
|
||||
self.icon,
|
||||
)
|
||||
|
||||
|
||||
def assemble_manifest_data(
|
||||
app_manifest: FlipperApplication,
|
||||
hardware_target: int,
|
||||
sdk_version,
|
||||
):
|
||||
image_data = b""
|
||||
if app_manifest.fap_icon:
|
||||
from flipper.assets.icon import file2image
|
||||
|
||||
image = file2image(os.path.join(app_manifest._apppath, app_manifest.fap_icon))
|
||||
if (image.width, image.height) != (10, 10):
|
||||
raise ValueError(
|
||||
f"Flipper app icon must be 10x10 pixels, but {image.width}x{image.height} was given"
|
||||
)
|
||||
if len(image.data) > 32:
|
||||
raise ValueError(
|
||||
f"Flipper app icon must be 32 bytes or less, but {len(image.data)} bytes were given"
|
||||
)
|
||||
image_data = image.data
|
||||
|
||||
app_version_as_int = ((app_manifest.version[0] & 0xFFFF) << 16) | (
|
||||
app_manifest.version[1] & 0xFFFF
|
||||
)
|
||||
|
||||
data = ElfManifestBaseHeader(
|
||||
manifest_version=1,
|
||||
api_version=sdk_version,
|
||||
hardware_target_id=hardware_target,
|
||||
).as_bytes()
|
||||
data += ElfManifestV1(
|
||||
stack_size=app_manifest.stack_size,
|
||||
app_version=app_version_as_int,
|
||||
name=app_manifest.name,
|
||||
icon=image_data,
|
||||
).as_bytes()
|
||||
|
||||
return data
|
514
site_scons/fbt/sdk.py
Normal file
514
site_scons/fbt/sdk.py
Normal file
@@ -0,0 +1,514 @@
|
||||
import operator
|
||||
import os
|
||||
import csv
|
||||
import operator
|
||||
|
||||
from enum import Enum, auto
|
||||
from typing import List, Set, ClassVar, Any
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from cxxheaderparser.parser import CxxParser
|
||||
|
||||
|
||||
# 'Fixing' complaints about typedefs
|
||||
CxxParser._fundamentals.discard("wchar_t")
|
||||
|
||||
from cxxheaderparser.types import (
|
||||
EnumDecl,
|
||||
Field,
|
||||
ForwardDecl,
|
||||
FriendDecl,
|
||||
Function,
|
||||
Method,
|
||||
Typedef,
|
||||
UsingAlias,
|
||||
UsingDecl,
|
||||
Variable,
|
||||
Pointer,
|
||||
Type,
|
||||
PQName,
|
||||
NameSpecifier,
|
||||
FundamentalSpecifier,
|
||||
Parameter,
|
||||
Array,
|
||||
Value,
|
||||
Token,
|
||||
FunctionType,
|
||||
)
|
||||
|
||||
from cxxheaderparser.parserstate import (
|
||||
State,
|
||||
EmptyBlockState,
|
||||
ClassBlockState,
|
||||
ExternBlockState,
|
||||
NamespaceBlockState,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ApiEntryFunction:
|
||||
name: str
|
||||
returns: str
|
||||
params: str
|
||||
|
||||
csv_type: ClassVar[str] = "Function"
|
||||
|
||||
def dictify(self):
|
||||
return dict(name=self.name, type=self.returns, params=self.params)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ApiEntryVariable:
|
||||
name: str
|
||||
var_type: str
|
||||
|
||||
csv_type: ClassVar[str] = "Variable"
|
||||
|
||||
def dictify(self):
|
||||
return dict(name=self.name, type=self.var_type, params=None)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ApiHeader:
|
||||
name: str
|
||||
|
||||
csv_type: ClassVar[str] = "Header"
|
||||
|
||||
def dictify(self):
|
||||
return dict(name=self.name, type=None, params=None)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ApiEntries:
|
||||
# These are sets, to avoid creating duplicates when we have multiple
|
||||
# declarations with same signature
|
||||
functions: Set[ApiEntryFunction] = field(default_factory=set)
|
||||
variables: Set[ApiEntryVariable] = field(default_factory=set)
|
||||
headers: Set[ApiHeader] = field(default_factory=set)
|
||||
|
||||
|
||||
class SymbolManager:
|
||||
def __init__(self):
|
||||
self.api = ApiEntries()
|
||||
self.name_hashes = set()
|
||||
|
||||
# Calculate hash of name and raise exception if it already is in the set
|
||||
def _name_check(self, name: str):
|
||||
name_hash = gnu_sym_hash(name)
|
||||
if name_hash in self.name_hashes:
|
||||
raise Exception(f"Hash collision on {name}")
|
||||
self.name_hashes.add(name_hash)
|
||||
|
||||
def add_function(self, function_def: ApiEntryFunction):
|
||||
if function_def in self.api.functions:
|
||||
return
|
||||
self._name_check(function_def.name)
|
||||
self.api.functions.add(function_def)
|
||||
|
||||
def add_variable(self, variable_def: ApiEntryVariable):
|
||||
if variable_def in self.api.variables:
|
||||
return
|
||||
self._name_check(variable_def.name)
|
||||
self.api.variables.add(variable_def)
|
||||
|
||||
def add_header(self, header: str):
|
||||
self.api.headers.add(ApiHeader(header))
|
||||
|
||||
|
||||
def gnu_sym_hash(name: str):
|
||||
h = 0x1505
|
||||
for c in name:
|
||||
h = (h << 5) + h + ord(c)
|
||||
return str(hex(h))[-8:]
|
||||
|
||||
|
||||
class SdkCollector:
|
||||
def __init__(self):
|
||||
self.symbol_manager = SymbolManager()
|
||||
|
||||
def add_header_to_sdk(self, header: str):
|
||||
self.symbol_manager.add_header(header)
|
||||
|
||||
def process_source_file_for_sdk(self, file_path: str):
|
||||
visitor = SdkCxxVisitor(self.symbol_manager)
|
||||
with open(file_path, "rt") as f:
|
||||
content = f.read()
|
||||
parser = CxxParser(file_path, content, visitor, None)
|
||||
parser.parse()
|
||||
|
||||
def get_api(self):
|
||||
return self.symbol_manager.api
|
||||
|
||||
|
||||
def stringify_array_dimension(size_descr):
|
||||
if not size_descr:
|
||||
return ""
|
||||
return stringify_descr(size_descr)
|
||||
|
||||
|
||||
def stringify_array_descr(type_descr):
|
||||
assert isinstance(type_descr, Array)
|
||||
return (
|
||||
stringify_descr(type_descr.array_of),
|
||||
stringify_array_dimension(type_descr.size),
|
||||
)
|
||||
|
||||
|
||||
def stringify_descr(type_descr):
|
||||
if isinstance(type_descr, (NameSpecifier, FundamentalSpecifier)):
|
||||
return type_descr.name
|
||||
elif isinstance(type_descr, PQName):
|
||||
return "::".join(map(stringify_descr, type_descr.segments))
|
||||
elif isinstance(type_descr, Pointer):
|
||||
# Hack
|
||||
if isinstance(type_descr.ptr_to, FunctionType):
|
||||
return stringify_descr(type_descr.ptr_to)
|
||||
return f"{stringify_descr(type_descr.ptr_to)}*"
|
||||
elif isinstance(type_descr, Type):
|
||||
return (
|
||||
f"{'const ' if type_descr.const else ''}"
|
||||
f"{'volatile ' if type_descr.volatile else ''}"
|
||||
f"{stringify_descr(type_descr.typename)}"
|
||||
)
|
||||
elif isinstance(type_descr, Parameter):
|
||||
return stringify_descr(type_descr.type)
|
||||
elif isinstance(type_descr, Array):
|
||||
# Hack for 2d arrays
|
||||
if isinstance(type_descr.array_of, Array):
|
||||
argtype, dimension = stringify_array_descr(type_descr.array_of)
|
||||
return (
|
||||
f"{argtype}[{stringify_array_dimension(type_descr.size)}][{dimension}]"
|
||||
)
|
||||
return f"{stringify_descr(type_descr.array_of)}[{stringify_array_dimension(type_descr.size)}]"
|
||||
elif isinstance(type_descr, Value):
|
||||
return " ".join(map(stringify_descr, type_descr.tokens))
|
||||
elif isinstance(type_descr, FunctionType):
|
||||
return f"{stringify_descr(type_descr.return_type)} (*)({', '.join(map(stringify_descr, type_descr.parameters))})"
|
||||
elif isinstance(type_descr, Token):
|
||||
return type_descr.value
|
||||
elif type_descr is None:
|
||||
return ""
|
||||
else:
|
||||
raise Exception("unsupported type_descr: %s" % type_descr)
|
||||
|
||||
|
||||
class SdkCxxVisitor:
|
||||
def __init__(self, symbol_manager: SymbolManager):
|
||||
self.api = symbol_manager
|
||||
|
||||
def on_variable(self, state: State, v: Variable) -> None:
|
||||
if not v.extern:
|
||||
return
|
||||
|
||||
self.api.add_variable(
|
||||
ApiEntryVariable(
|
||||
stringify_descr(v.name),
|
||||
stringify_descr(v.type),
|
||||
)
|
||||
)
|
||||
|
||||
def on_function(self, state: State, fn: Function) -> None:
|
||||
if fn.inline or fn.has_body:
|
||||
return
|
||||
|
||||
self.api.add_function(
|
||||
ApiEntryFunction(
|
||||
stringify_descr(fn.name),
|
||||
stringify_descr(fn.return_type),
|
||||
", ".join(map(stringify_descr, fn.parameters))
|
||||
+ (", ..." if fn.vararg else ""),
|
||||
)
|
||||
)
|
||||
|
||||
def on_define(self, state: State, content: str) -> None:
|
||||
pass
|
||||
|
||||
def on_pragma(self, state: State, content: str) -> None:
|
||||
pass
|
||||
|
||||
def on_include(self, state: State, filename: str) -> None:
|
||||
pass
|
||||
|
||||
def on_empty_block_start(self, state: EmptyBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_empty_block_end(self, state: EmptyBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_extern_block_start(self, state: ExternBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_extern_block_end(self, state: ExternBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_namespace_start(self, state: NamespaceBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_namespace_end(self, state: NamespaceBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None:
|
||||
pass
|
||||
|
||||
def on_typedef(self, state: State, typedef: Typedef) -> None:
|
||||
pass
|
||||
|
||||
def on_using_namespace(self, state: State, namespace: List[str]) -> None:
|
||||
pass
|
||||
|
||||
def on_using_alias(self, state: State, using: UsingAlias) -> None:
|
||||
pass
|
||||
|
||||
def on_using_declaration(self, state: State, using: UsingDecl) -> None:
|
||||
pass
|
||||
|
||||
def on_enum(self, state: State, enum: EnumDecl) -> None:
|
||||
pass
|
||||
|
||||
def on_class_start(self, state: ClassBlockState) -> None:
|
||||
pass
|
||||
|
||||
def on_class_field(self, state: State, f: Field) -> None:
|
||||
pass
|
||||
|
||||
def on_class_method(self, state: ClassBlockState, method: Method) -> None:
|
||||
pass
|
||||
|
||||
def on_class_friend(self, state: ClassBlockState, friend: FriendDecl) -> None:
|
||||
pass
|
||||
|
||||
def on_class_end(self, state: ClassBlockState) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SdkVersion:
|
||||
major: int = 0
|
||||
minor: int = 0
|
||||
|
||||
csv_type: ClassVar[str] = "Version"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.major}.{self.minor}"
|
||||
|
||||
def as_int(self) -> int:
|
||||
return ((self.major & 0xFFFF) << 16) | (self.minor & 0xFFFF)
|
||||
|
||||
@staticmethod
|
||||
def from_str(s: str) -> "SdkVersion":
|
||||
major, minor = s.split(".")
|
||||
return SdkVersion(int(major), int(minor))
|
||||
|
||||
def dictify(self) -> dict:
|
||||
return dict(name=str(self), type=None, params=None)
|
||||
|
||||
|
||||
class VersionBump(Enum):
|
||||
NONE = auto()
|
||||
MAJOR = auto()
|
||||
MINOR = auto()
|
||||
|
||||
|
||||
class ApiEntryState(Enum):
|
||||
PENDING = "?"
|
||||
APPROVED = "+"
|
||||
DISABLED = "-"
|
||||
# Special value for API version entry so users have less incentive to edit it
|
||||
VERSION_PENDING = "v"
|
||||
|
||||
|
||||
# Class that stores all known API entries, both enabled and disabled.
|
||||
# Also keeps track of API versioning
|
||||
# Allows comparison and update from newly-generated API
|
||||
class SdkCache:
|
||||
CSV_FIELD_NAMES = ("entry", "status", "name", "type", "params")
|
||||
|
||||
def __init__(self, cache_file: str, load_version_only=False):
|
||||
self.cache_file_name = cache_file
|
||||
self.version = SdkVersion(0, 0)
|
||||
self.sdk = ApiEntries()
|
||||
self.disabled_entries = set()
|
||||
self.new_entries = set()
|
||||
self.loaded_dirty = False
|
||||
self.loaded_dirty_version = False
|
||||
|
||||
self.version_action = VersionBump.NONE
|
||||
self._load_version_only = load_version_only
|
||||
self.load_cache()
|
||||
|
||||
def is_buildable(self) -> bool:
|
||||
return (
|
||||
self.version != SdkVersion(0, 0)
|
||||
and self.version_action == VersionBump.NONE
|
||||
and not self.loaded_dirty
|
||||
and not self.new_entries
|
||||
)
|
||||
|
||||
def _filter_enabled(self, sdk_entries):
|
||||
return sorted(
|
||||
filter(lambda e: e not in self.disabled_entries, sdk_entries),
|
||||
key=operator.attrgetter("name"),
|
||||
)
|
||||
|
||||
def get_valid_names(self):
|
||||
syms = set(map(lambda e: e.name, self.get_functions()))
|
||||
syms.update(map(lambda e: e.name, self.get_variables()))
|
||||
return syms
|
||||
|
||||
def get_functions(self):
|
||||
return self._filter_enabled(self.sdk.functions)
|
||||
|
||||
def get_variables(self):
|
||||
return self._filter_enabled(self.sdk.variables)
|
||||
|
||||
def get_headers(self):
|
||||
return self._filter_enabled(self.sdk.headers)
|
||||
|
||||
def _get_entry_status(self, entry) -> str:
|
||||
if entry in self.disabled_entries:
|
||||
return ApiEntryState.DISABLED
|
||||
elif entry in self.new_entries:
|
||||
if isinstance(entry, SdkVersion):
|
||||
return ApiEntryState.VERSION_PENDING
|
||||
return ApiEntryState.PENDING
|
||||
else:
|
||||
return ApiEntryState.APPROVED
|
||||
|
||||
def _format_entry(self, obj):
|
||||
obj_dict = obj.dictify()
|
||||
obj_dict.update(
|
||||
dict(
|
||||
entry=obj.csv_type,
|
||||
status=self._get_entry_status(obj).value,
|
||||
)
|
||||
)
|
||||
return obj_dict
|
||||
|
||||
def save(self) -> None:
|
||||
if self._load_version_only:
|
||||
raise Exception("Only SDK version was loaded, cannot save")
|
||||
|
||||
version_is_clean = True
|
||||
if self.loaded_dirty:
|
||||
# There are still new entries and version was already updated
|
||||
version_is_clean = False
|
||||
|
||||
if self.version_action == VersionBump.MINOR:
|
||||
self.version = SdkVersion(self.version.major, self.version.minor + 1)
|
||||
version_is_clean = False
|
||||
elif self.version_action == VersionBump.MAJOR:
|
||||
self.version = SdkVersion(self.version.major + 1, 0)
|
||||
version_is_clean = False
|
||||
|
||||
if version_is_clean:
|
||||
print(f"API version {self.version} is up to date")
|
||||
else:
|
||||
self.new_entries.add(self.version)
|
||||
print(
|
||||
f"API version is still WIP: {self.version}. Review the changes and re-run command."
|
||||
)
|
||||
print(f"Entries to review:")
|
||||
print(
|
||||
"\n".join(
|
||||
map(
|
||||
str,
|
||||
filter(
|
||||
lambda e: not isinstance(e, SdkVersion), self.new_entries
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if not version_is_clean or self.loaded_dirty_version:
|
||||
# Regenerate cache file
|
||||
str_cache_entries = [self.version]
|
||||
name_getter = operator.attrgetter("name")
|
||||
str_cache_entries.extend(sorted(self.sdk.headers, key=name_getter))
|
||||
str_cache_entries.extend(sorted(self.sdk.functions, key=name_getter))
|
||||
str_cache_entries.extend(sorted(self.sdk.variables, key=name_getter))
|
||||
|
||||
with open(self.cache_file_name, "w", newline="") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=SdkCache.CSV_FIELD_NAMES)
|
||||
writer.writeheader()
|
||||
|
||||
for entry in str_cache_entries:
|
||||
writer.writerow(self._format_entry(entry))
|
||||
|
||||
def _process_entry(self, entry_dict: dict) -> None:
|
||||
entry_class = entry_dict["entry"]
|
||||
entry_status = entry_dict["status"]
|
||||
entry_name = entry_dict["name"]
|
||||
|
||||
entry = None
|
||||
if entry_class == SdkVersion.csv_type:
|
||||
self.version = SdkVersion.from_str(entry_name)
|
||||
if entry_status == ApiEntryState.VERSION_PENDING.value:
|
||||
self.loaded_dirty_version = True
|
||||
elif entry_class == ApiHeader.csv_type:
|
||||
self.sdk.headers.add(entry := ApiHeader(entry_name))
|
||||
elif entry_class == ApiEntryFunction.csv_type:
|
||||
self.sdk.functions.add(
|
||||
entry := ApiEntryFunction(
|
||||
entry_name,
|
||||
entry_dict["type"],
|
||||
entry_dict["params"],
|
||||
)
|
||||
)
|
||||
elif entry_class == ApiEntryVariable.csv_type:
|
||||
self.sdk.variables.add(
|
||||
entry := ApiEntryVariable(entry_name, entry_dict["type"])
|
||||
)
|
||||
else:
|
||||
print(entry_dict)
|
||||
raise Exception("Unknown entry type: %s" % entry_class)
|
||||
|
||||
if entry is None:
|
||||
return
|
||||
|
||||
if entry_status == ApiEntryState.DISABLED.value:
|
||||
self.disabled_entries.add(entry)
|
||||
elif entry_status == ApiEntryState.PENDING.value:
|
||||
self.new_entries.add(entry)
|
||||
|
||||
def load_cache(self) -> None:
|
||||
if not os.path.exists(self.cache_file_name):
|
||||
raise Exception(
|
||||
f"Cannot load symbol cache '{self.cache_file_name}'! File does not exist"
|
||||
)
|
||||
|
||||
with open(self.cache_file_name, "r") as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
self._process_entry(row)
|
||||
if self._load_version_only and row.get("entry") == SdkVersion.csv_type:
|
||||
break
|
||||
self.loaded_dirty = bool(self.new_entries)
|
||||
|
||||
def sync_sets(self, known_set: Set[Any], new_set: Set[Any]):
|
||||
new_entries = new_set - known_set
|
||||
if new_entries:
|
||||
print(f"New: {new_entries}")
|
||||
known_set |= new_entries
|
||||
self.new_entries |= new_entries
|
||||
if self.version_action == VersionBump.NONE:
|
||||
self.version_action = VersionBump.MINOR
|
||||
removed_entries = known_set - new_set
|
||||
if removed_entries:
|
||||
print(f"Removed: {removed_entries}")
|
||||
known_set -= removed_entries
|
||||
# If any of removed entries was part of active API, that's a major bump
|
||||
if any(
|
||||
filter(
|
||||
lambda e: e not in self.disabled_entries
|
||||
and e not in self.new_entries,
|
||||
removed_entries,
|
||||
)
|
||||
):
|
||||
self.version_action = VersionBump.MAJOR
|
||||
self.disabled_entries -= removed_entries
|
||||
self.new_entries -= removed_entries
|
||||
|
||||
def validate_api(self, api: ApiEntries) -> None:
|
||||
self.sync_sets(self.sdk.headers, api.headers)
|
||||
self.sync_sets(self.sdk.functions, api.functions)
|
||||
self.sync_sets(self.sdk.variables, api.variables)
|
@@ -5,6 +5,7 @@ import os
|
||||
import atexit
|
||||
|
||||
sys.path.insert(0, os.path.join(os.getcwd(), "scripts"))
|
||||
sys.path.insert(0, os.path.join(os.getcwd(), "lib/cxxheaderparser"))
|
||||
|
||||
|
||||
def bf_to_str(bf):
|
||||
|
@@ -63,7 +63,7 @@ class BlackmagicResolver:
|
||||
if probe := self.get_serial() or self.get_networked():
|
||||
return probe
|
||||
|
||||
raise Exception("Please specify BLACKMAGIC=...")
|
||||
raise StopError("Please specify BLACKMAGIC=...")
|
||||
|
||||
|
||||
def generate(env):
|
||||
|
@@ -18,18 +18,26 @@ from fbt.appmanifest import (
|
||||
|
||||
def LoadApplicationManifests(env):
|
||||
appmgr = env["APPMGR"] = AppManager()
|
||||
for entry in env.Glob("#/applications/*", ondisk=True, source=True):
|
||||
if isinstance(entry, SCons.Node.FS.Dir) and not str(entry).startswith("."):
|
||||
try:
|
||||
app_manifest_file_path = os.path.join(entry.abspath, "application.fam")
|
||||
appmgr.load_manifest(app_manifest_file_path, entry.name)
|
||||
env.Append(PY_LINT_SOURCES=[app_manifest_file_path])
|
||||
except FlipperManifestException as e:
|
||||
warn(WarningOnByDefault, str(e))
|
||||
for app_dir, _ in env["APPDIRS"]:
|
||||
app_dir_node = env.Dir("#").Dir(app_dir)
|
||||
|
||||
for entry in app_dir_node.glob("*", ondisk=True, source=True):
|
||||
if isinstance(entry, SCons.Node.FS.Dir) and not str(entry).startswith("."):
|
||||
try:
|
||||
app_manifest_file_path = os.path.join(
|
||||
entry.abspath, "application.fam"
|
||||
)
|
||||
appmgr.load_manifest(app_manifest_file_path, entry)
|
||||
env.Append(PY_LINT_SOURCES=[app_manifest_file_path])
|
||||
except FlipperManifestException as e:
|
||||
warn(WarningOnByDefault, str(e))
|
||||
|
||||
|
||||
def PrepareApplicationsBuild(env):
|
||||
env["APPBUILD"] = env["APPMGR"].filter_apps(env["APPS"])
|
||||
appbuild = env["APPBUILD"] = env["APPMGR"].filter_apps(env["APPS"])
|
||||
env.Append(
|
||||
SDK_HEADERS=appbuild.get_sdk_headers(),
|
||||
)
|
||||
env["APPBUILD_DUMP"] = env.Action(
|
||||
DumpApplicationConfig,
|
||||
"\tINFO\t",
|
||||
|
@@ -13,11 +13,11 @@ def icons_emitter(target, source, env):
|
||||
"compiled/assets_icons.c",
|
||||
"compiled/assets_icons.h",
|
||||
]
|
||||
source = env.GlobRecursive("*.*", env["ICON_SRC_DIR"])
|
||||
return target, source
|
||||
|
||||
|
||||
def proto_emitter(target, source, env):
|
||||
out_path = target[0].path
|
||||
target = []
|
||||
for src in source:
|
||||
basename = os.path.splitext(src.name)[0]
|
||||
@@ -109,7 +109,7 @@ def generate(env):
|
||||
BUILDERS={
|
||||
"IconBuilder": Builder(
|
||||
action=Action(
|
||||
'${PYTHON3} "${ASSETS_COMPILER}" icons ${SOURCE.posix} ${TARGET.dir.posix}',
|
||||
'${PYTHON3} "${ASSETS_COMPILER}" icons ${ICON_SRC_DIR} ${TARGET.dir}',
|
||||
"${ICONSCOMSTR}",
|
||||
),
|
||||
emitter=icons_emitter,
|
||||
|
@@ -4,7 +4,7 @@ from SCons.Script import Mkdir
|
||||
from SCons.Defaults import Touch
|
||||
|
||||
|
||||
def get_variant_dirname(env, project=None):
|
||||
def GetProjetDirName(env, project=None):
|
||||
parts = [f"f{env['TARGET_HW']}"]
|
||||
if project:
|
||||
parts.append(project)
|
||||
@@ -21,7 +21,7 @@ def get_variant_dirname(env, project=None):
|
||||
|
||||
|
||||
def create_fw_build_targets(env, configuration_name):
|
||||
flavor = get_variant_dirname(env, configuration_name)
|
||||
flavor = GetProjetDirName(env, configuration_name)
|
||||
build_dir = env.Dir("build").Dir(flavor).abspath
|
||||
return env.SConscript(
|
||||
"firmware.scons",
|
||||
@@ -49,7 +49,7 @@ def AddFwProject(env, base_env, fw_type, fw_env_key):
|
||||
],
|
||||
)
|
||||
|
||||
env.Replace(DIST_DIR=get_variant_dirname(env))
|
||||
env.Replace(DIST_DIR=env.GetProjetDirName())
|
||||
return project_env
|
||||
|
||||
|
||||
@@ -115,6 +115,7 @@ def generate(env):
|
||||
env.AddMethod(AddFwProject)
|
||||
env.AddMethod(DistCommand)
|
||||
env.AddMethod(AddOpenOCDFlashTarget)
|
||||
env.AddMethod(GetProjetDirName)
|
||||
env.AddMethod(AddJFlashTarget)
|
||||
env.AddMethod(AddUsbFlashTarget)
|
||||
|
||||
|
@@ -1,29 +1,151 @@
|
||||
from SCons.Builder import Builder
|
||||
from SCons.Action import Action
|
||||
from SCons.Errors import UserError
|
||||
import SCons.Warnings
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
from fbt.elfmanifest import assemble_manifest_data
|
||||
from fbt.sdk import SdkCache
|
||||
import itertools
|
||||
|
||||
|
||||
def BuildAppElf(env, app):
|
||||
work_dir = env.subst("$EXT_APPS_WORK_DIR")
|
||||
app_target_name = os.path.join(work_dir, app.appid)
|
||||
|
||||
app_alias = f"{env['FIRMWARE_BUILD_CFG']}_{app.appid}"
|
||||
app_elf = env.Program(
|
||||
app_target_name,
|
||||
env.GlobRecursive("*.c*", os.path.join(work_dir, app._appdir)),
|
||||
APP_ENTRY=app.entry_point,
|
||||
app_original_elf = os.path.join(work_dir, f"{app.appid}_d")
|
||||
app_sources = list(
|
||||
itertools.chain.from_iterable(
|
||||
env.GlobRecursive(source_type, os.path.join(work_dir, app._appdir.relpath))
|
||||
for source_type in app.sources
|
||||
)
|
||||
)
|
||||
app_elf_dump = env.ObjDump(app_target_name)
|
||||
app_elf_raw = env.Program(
|
||||
app_original_elf,
|
||||
app_sources,
|
||||
APP_ENTRY=app.entry_point,
|
||||
LIBS=env["LIBS"] + app.fap_libs,
|
||||
)
|
||||
|
||||
app_elf_dump = env.ObjDump(app_elf_raw)
|
||||
env.Alias(f"{app_alias}_list", app_elf_dump)
|
||||
|
||||
app_stripped_elf = env.ELFStripper(
|
||||
os.path.join(env.subst("$PLUGIN_ELF_DIR"), app.appid), app_elf
|
||||
app_elf_augmented = env.EmbedAppMetadata(
|
||||
os.path.join(env.subst("$PLUGIN_ELF_DIR"), app.appid),
|
||||
app_elf_raw,
|
||||
APP=app,
|
||||
)
|
||||
env.Alias(app_alias, app_stripped_elf)
|
||||
return app_stripped_elf
|
||||
|
||||
env.Depends(app_elf_augmented, [env["SDK_DEFINITION"], env.Value(app)])
|
||||
if app.fap_icon:
|
||||
env.Depends(
|
||||
app_elf_augmented,
|
||||
env.File(f"{app._apppath}/{app.fap_icon}"),
|
||||
)
|
||||
env.Alias(app_alias, app_elf_augmented)
|
||||
|
||||
app_elf_import_validator = env.ValidateAppImports(app_elf_augmented)
|
||||
env.AlwaysBuild(app_elf_import_validator)
|
||||
return (app_elf_augmented, app_elf_raw, app_elf_import_validator)
|
||||
|
||||
|
||||
def prepare_app_metadata(target, source, env):
|
||||
sdk_cache = SdkCache(env.subst("$SDK_DEFINITION"), load_version_only=True)
|
||||
|
||||
if not sdk_cache.is_buildable():
|
||||
raise UserError(
|
||||
"SDK version is not finalized, please review changes and re-run operation"
|
||||
)
|
||||
|
||||
app = env["APP"]
|
||||
meta_file_name = source[0].path + ".meta"
|
||||
with open(meta_file_name, "wb") as f:
|
||||
# f.write(f"hello this is {app}")
|
||||
f.write(
|
||||
assemble_manifest_data(
|
||||
app_manifest=app,
|
||||
hardware_target=int(env.subst("$TARGET_HW")),
|
||||
sdk_version=sdk_cache.version.as_int(),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def validate_app_imports(target, source, env):
|
||||
sdk_cache = SdkCache(env.subst("$SDK_DEFINITION"), load_version_only=False)
|
||||
app_syms = set()
|
||||
with open(target[0].path, "rt") as f:
|
||||
for line in f:
|
||||
app_syms.add(line.split()[0])
|
||||
unresolved_syms = app_syms - sdk_cache.get_valid_names()
|
||||
if unresolved_syms:
|
||||
SCons.Warnings.warn(
|
||||
SCons.Warnings.LinkWarning,
|
||||
f"{source[0].path}: app won't run. Unresolved symbols: {unresolved_syms}",
|
||||
)
|
||||
|
||||
|
||||
def GetExtAppFromPath(env, app_dir):
|
||||
if not app_dir:
|
||||
raise UserError("APPSRC= not set")
|
||||
|
||||
appmgr = env["APPMGR"]
|
||||
|
||||
app = None
|
||||
for dir_part in reversed(pathlib.Path(app_dir).parts):
|
||||
if app := appmgr.find_by_appdir(dir_part):
|
||||
break
|
||||
if not app:
|
||||
raise UserError(f"Failed to resolve application for given APPSRC={app_dir}")
|
||||
|
||||
app_elf = env["_extapps"]["compact"].get(app.appid, None)
|
||||
if not app_elf:
|
||||
raise UserError(f"No external app found for {app.appid}")
|
||||
|
||||
return (app, app_elf[0])
|
||||
|
||||
|
||||
def generate(env, **kw):
|
||||
env.SetDefault(EXT_APPS_WORK_DIR=kw.get("EXT_APPS_WORK_DIR", ".extapps"))
|
||||
env.VariantDir(env.subst("$EXT_APPS_WORK_DIR"), ".", duplicate=False)
|
||||
env.SetDefault(EXT_APPS_WORK_DIR=kw.get("EXT_APPS_WORK_DIR"))
|
||||
env.VariantDir(env.subst("$EXT_APPS_WORK_DIR"), env.Dir("#"), duplicate=False)
|
||||
|
||||
env.AddMethod(BuildAppElf)
|
||||
env.AddMethod(GetExtAppFromPath)
|
||||
env.Append(
|
||||
BUILDERS={
|
||||
"EmbedAppMetadata": Builder(
|
||||
action=[
|
||||
Action(prepare_app_metadata, "$APPMETA_COMSTR"),
|
||||
Action(
|
||||
"${OBJCOPY} "
|
||||
"--remove-section .ARM.attributes "
|
||||
"--add-section .fapmeta=${SOURCE}.meta "
|
||||
"--set-section-flags .fapmeta=contents,noload,readonly,data "
|
||||
"--strip-debug --strip-unneeded "
|
||||
"--add-gnu-debuglink=${SOURCE} "
|
||||
"${SOURCES} ${TARGET}",
|
||||
"$APPMETAEMBED_COMSTR",
|
||||
),
|
||||
],
|
||||
suffix=".fap",
|
||||
src_suffix=".elf",
|
||||
),
|
||||
"ValidateAppImports": Builder(
|
||||
action=[
|
||||
Action(
|
||||
"@${NM} -P -u ${SOURCE} > ${TARGET}",
|
||||
None, # "$APPDUMP_COMSTR",
|
||||
),
|
||||
Action(
|
||||
validate_app_imports,
|
||||
None, # "$APPCHECK_COMSTR",
|
||||
),
|
||||
],
|
||||
suffix=".impsyms",
|
||||
src_suffix=".fap",
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def exists(env):
|
||||
|
208
site_scons/site_tools/fbt_sdk.py
Normal file
208
site_scons/site_tools/fbt_sdk.py
Normal file
@@ -0,0 +1,208 @@
|
||||
from SCons.Builder import Builder
|
||||
from SCons.Action import Action
|
||||
from SCons.Errors import UserError
|
||||
|
||||
# from SCons.Scanner import C
|
||||
from SCons.Script import Mkdir, Copy, Delete, Entry
|
||||
from SCons.Util import LogicalLines
|
||||
|
||||
import os.path
|
||||
import posixpath
|
||||
import pathlib
|
||||
|
||||
from fbt.sdk import SdkCollector, SdkCache
|
||||
|
||||
|
||||
def prebuild_sdk_emitter(target, source, env):
|
||||
target.append(env.ChangeFileExtension(target[0], ".d"))
|
||||
return target, source
|
||||
|
||||
|
||||
def prebuild_sdk_create_origin_file(target, source, env):
|
||||
mega_file = env.subst("${TARGET}.c", target=target[0])
|
||||
with open(mega_file, "wt") as sdk_c:
|
||||
sdk_c.write("\n".join(f"#include <{h.path}>" for h in env["SDK_HEADERS"]))
|
||||
|
||||
|
||||
class SdkTreeBuilder:
|
||||
def __init__(self, env, target, source) -> None:
|
||||
self.env = env
|
||||
self.target = target
|
||||
self.source = source
|
||||
|
||||
self.header_depends = []
|
||||
self.header_dirs = []
|
||||
|
||||
self.target_sdk_dir = env.subst("f${TARGET_HW}_sdk")
|
||||
self.sdk_deploy_dir = target[0].Dir(self.target_sdk_dir)
|
||||
|
||||
def _parse_sdk_depends(self):
|
||||
deps_file = self.source[0]
|
||||
with open(deps_file.path, "rt") as deps_f:
|
||||
lines = LogicalLines(deps_f).readlines()
|
||||
_, depends = lines[0].split(":", 1)
|
||||
self.header_depends = list(
|
||||
filter(lambda fname: fname.endswith(".h"), depends.split()),
|
||||
)
|
||||
self.header_dirs = sorted(
|
||||
set(map(os.path.normpath, map(os.path.dirname, self.header_depends)))
|
||||
)
|
||||
|
||||
def _generate_sdk_meta(self):
|
||||
filtered_paths = [self.target_sdk_dir]
|
||||
full_fw_paths = list(
|
||||
map(
|
||||
os.path.normpath,
|
||||
(self.env.Dir(inc_dir).relpath for inc_dir in self.env["CPPPATH"]),
|
||||
)
|
||||
)
|
||||
|
||||
sdk_dirs = ", ".join(f"'{dir}'" for dir in self.header_dirs)
|
||||
for dir in full_fw_paths:
|
||||
if dir in sdk_dirs:
|
||||
filtered_paths.append(
|
||||
posixpath.normpath(posixpath.join(self.target_sdk_dir, dir))
|
||||
)
|
||||
|
||||
sdk_env = self.env.Clone()
|
||||
sdk_env.Replace(CPPPATH=filtered_paths)
|
||||
with open(self.target[0].path, "wt") as f:
|
||||
cmdline_options = sdk_env.subst(
|
||||
"$CCFLAGS $_CCCOMCOM", target=Entry("dummy")
|
||||
)
|
||||
f.write(cmdline_options.replace("\\", "/"))
|
||||
f.write("\n")
|
||||
|
||||
def _create_deploy_commands(self):
|
||||
dirs_to_create = set(
|
||||
self.sdk_deploy_dir.Dir(dirpath) for dirpath in self.header_dirs
|
||||
)
|
||||
actions = [
|
||||
Delete(self.sdk_deploy_dir),
|
||||
Mkdir(self.sdk_deploy_dir),
|
||||
]
|
||||
actions += [Mkdir(d) for d in dirs_to_create]
|
||||
|
||||
actions += [
|
||||
Copy(
|
||||
self.sdk_deploy_dir.File(h).path,
|
||||
h,
|
||||
)
|
||||
for h in self.header_depends
|
||||
]
|
||||
return actions
|
||||
|
||||
def generate_actions(self):
|
||||
self._parse_sdk_depends()
|
||||
self._generate_sdk_meta()
|
||||
|
||||
return self._create_deploy_commands()
|
||||
|
||||
|
||||
def deploy_sdk_tree(target, source, env, for_signature):
|
||||
if for_signature:
|
||||
return []
|
||||
|
||||
sdk_tree = SdkTreeBuilder(env, target, source)
|
||||
return sdk_tree.generate_actions()
|
||||
|
||||
|
||||
def gen_sdk_data(sdk_cache: SdkCache):
|
||||
api_def = []
|
||||
api_def.extend(
|
||||
(f"#include <{h.name}>" for h in sdk_cache.get_headers()),
|
||||
)
|
||||
|
||||
api_def.append(f"const int elf_api_version = {sdk_cache.version.as_int()};")
|
||||
|
||||
api_def.append(
|
||||
"static constexpr auto elf_api_table = sort(create_array_t<sym_entry>("
|
||||
)
|
||||
|
||||
api_lines = []
|
||||
for fun_def in sdk_cache.get_functions():
|
||||
api_lines.append(
|
||||
f"API_METHOD({fun_def.name}, {fun_def.returns}, ({fun_def.params}))"
|
||||
)
|
||||
|
||||
for var_def in sdk_cache.get_variables():
|
||||
api_lines.append(f"API_VARIABLE({var_def.name}, {var_def.var_type })")
|
||||
|
||||
api_def.append(",\n".join(api_lines))
|
||||
|
||||
api_def.append("));")
|
||||
return api_def
|
||||
|
||||
|
||||
def _check_sdk_is_up2date(sdk_cache: SdkCache):
|
||||
if not sdk_cache.is_buildable():
|
||||
raise UserError(
|
||||
"SDK version is not finalized, please review changes and re-run operation"
|
||||
)
|
||||
|
||||
|
||||
def validate_sdk_cache(source, target, env):
|
||||
# print(f"Generating SDK for {source[0]} to {target[0]}")
|
||||
current_sdk = SdkCollector()
|
||||
current_sdk.process_source_file_for_sdk(source[0].path)
|
||||
for h in env["SDK_HEADERS"]:
|
||||
current_sdk.add_header_to_sdk(pathlib.Path(h.path).as_posix())
|
||||
|
||||
sdk_cache = SdkCache(target[0].path)
|
||||
sdk_cache.validate_api(current_sdk.get_api())
|
||||
sdk_cache.save()
|
||||
_check_sdk_is_up2date(sdk_cache)
|
||||
|
||||
|
||||
def generate_sdk_symbols(source, target, env):
|
||||
sdk_cache = SdkCache(source[0].path)
|
||||
_check_sdk_is_up2date(sdk_cache)
|
||||
|
||||
api_def = gen_sdk_data(sdk_cache)
|
||||
with open(target[0].path, "wt") as f:
|
||||
f.write("\n".join(api_def))
|
||||
|
||||
|
||||
def generate(env, **kw):
|
||||
env.Append(
|
||||
BUILDERS={
|
||||
"SDKPrebuilder": Builder(
|
||||
emitter=prebuild_sdk_emitter,
|
||||
action=[
|
||||
Action(
|
||||
prebuild_sdk_create_origin_file,
|
||||
"$SDK_PREGEN_COMSTR",
|
||||
),
|
||||
Action(
|
||||
"$CC -o $TARGET -E -P $CCFLAGS $_CCCOMCOM $SDK_PP_FLAGS -MMD ${TARGET}.c",
|
||||
"$SDK_COMSTR",
|
||||
),
|
||||
],
|
||||
suffix=".i",
|
||||
),
|
||||
"SDKTree": Builder(
|
||||
generator=deploy_sdk_tree,
|
||||
src_suffix=".d",
|
||||
),
|
||||
"SDKSymUpdater": Builder(
|
||||
action=Action(
|
||||
validate_sdk_cache,
|
||||
"$SDKSYM_UPDATER_COMSTR",
|
||||
),
|
||||
suffix=".csv",
|
||||
src_suffix=".i",
|
||||
),
|
||||
"SDKSymGenerator": Builder(
|
||||
action=Action(
|
||||
generate_sdk_symbols,
|
||||
"$SDKSYM_GENERATOR_COMSTR",
|
||||
),
|
||||
suffix=".h",
|
||||
src_suffix=".csv",
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def exists(env):
|
||||
return True
|
@@ -3,12 +3,14 @@ from SCons.Action import Action
|
||||
import SCons
|
||||
|
||||
__OBJCOPY_ARM_BIN = "arm-none-eabi-objcopy"
|
||||
__NM_ARM_BIN = "arm-none-eabi-nm"
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.SetDefault(
|
||||
BIN2DFU="${ROOT_DIR.abspath}/scripts/bin2dfu.py",
|
||||
OBJCOPY=__OBJCOPY_ARM_BIN, # FIXME
|
||||
NM=__NM_ARM_BIN, # FIXME
|
||||
)
|
||||
env.Append(
|
||||
BUILDERS={
|
||||
|
@@ -40,10 +40,15 @@ def PhonyTarget(env, name, action, source=None, **kw):
|
||||
return command
|
||||
|
||||
|
||||
def ChangeFileExtension(env, fnode, ext):
|
||||
return env.File(f"#{os.path.splitext(fnode.path)[0]}{ext}")
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(BuildModule)
|
||||
env.AddMethod(BuildModules)
|
||||
env.AddMethod(PhonyTarget)
|
||||
env.AddMethod(ChangeFileExtension)
|
||||
|
||||
|
||||
def exists(env):
|
||||
|
Reference in New Issue
Block a user