[FL-2649] Drop Docker in CI/CD (#1412)

* enable sparseCheckout, moving github actions from docker to raw shell
* fix missing known_hosts while setting ssh priv key
* fix build.yml
* add ssh key to upload just in time
* fixing rsync syntax
* fix build.yml
* try to fix build.yml again
* testing rsync
* test rsync again
* add linters
* add Black Python linter to submodules
* add Black submodule
* add working python linter target, dirty file list
* up toolchain to version 4
* up toolchain to ver 5
* up toolchain version to 6
* fbt: using black 22.6.0
* remove Black submodule, up toolchain to ver 7
* fbt: added lint_py, format_py targets
* add pvs_studio workflow
* fix pvs_studio segfault
* fix pvs_studio command
* fix pvs_studio command 2
* show env before run pvs_studio
* try to debug pvs_studio
* try to strace pvs_studio..
* Add FBT_TOOLCHAIN_PATH, MacOS Rosseta check, and ignore non-x86_64 linux architectures
* prevent redownloading toolchain on github-runners
* fix toolchain download exitcode
* add strace to debug pvs_studio segfault
* disable strace to catch full code dump
* Add './fbt cli' target to access Flipper CLI via PySerial
* remove pvs_studio from this PR
* removing clang-format from toolchain due errors
* make source easy, and fix some mistakes found by @hedger
* Add check_submodules workflow, some fixes
* fixing mistakes

Co-authored-by: hedger <hedger@nanode.su>
Co-authored-by: hedger <hedger@users.noreply.github.com>
This commit is contained in:
Max Andreev 2022-08-02 17:05:31 +03:00 committed by GitHub
parent a1637e9216
commit 93a4b9c4a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 357 additions and 340 deletions

View File

@ -15,11 +15,8 @@ env:
jobs:
main:
runs-on: [self-hosted,FlipperZero]
runs-on: [self-hosted,FlipperZeroShell]
steps:
- name: 'Cleanup workspace'
uses: AutoModality/action-clean@v1
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]
@ -32,12 +29,8 @@ jobs:
uses: actions/checkout@v2
with:
fetch-depth: 0
submodules: true
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Build docker image'
uses: ./.github/actions/docker
- name: 'Make artifacts directory'
run: |
test -d artifacts && rm -rf artifacts || true
@ -71,20 +64,16 @@ jobs:
run: |
tar czpf artifacts/flipper-z-any-scripts-${{steps.names.outputs.suffix}}.tgz scripts
- name: 'Build the firmware in docker'
uses: ./.github/actions/docker
with:
- name: 'Build the firmware'
run: |
set -e
for TARGET in ${TARGETS}
do
./fbt TARGET_HW=`echo ${TARGET} | sed 's/f//'` updater_package ${{ startsWith(github.ref, 'refs/tags') && 'DEBUG=0 COMPACT=1' || '' }}
FBT_TOOLCHAIN_PATH=/opt ./fbt TARGET_HW=`echo ${TARGET} | sed 's/f//'` updater_package ${{ startsWith(github.ref, 'refs/tags') && 'DEBUG=0 COMPACT=1' || '' }}
done
- name: 'Move upload files'
if: ${{ !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/docker
with:
run: |
set -e
for TARGET in ${TARGETS}
@ -94,8 +83,6 @@ jobs:
- name: 'Bundle self-update package'
if: ${{ !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/docker
with:
run: |
set -e
for UPDATEBUNDLE in artifacts/*/
@ -117,29 +104,23 @@ jobs:
- name: 'Bundle core2 firmware'
if: ${{ !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/docker
with:
run: |
./fbt copro_dist
FBT_TOOLCHAIN_PATH=/opt ./fbt copro_dist
tar czpf artifacts/flipper-z-any-core2_firmware-${{steps.names.outputs.suffix}}.tgz -C assets core2_firmware
- name: 'Upload artifacts to update server'
if: ${{ !github.event.pull_request.head.repo.fork }}
uses: burnett01/rsync-deployments@5.1
with:
switches: -avzP --delete --mkpath
path: artifacts/
remote_path: "${{ secrets.RSYNC_DEPLOY_BASE_PATH }}${{steps.names.outputs.artifacts-path}}/"
remote_host: ${{ secrets.RSYNC_DEPLOY_HOST }}
remote_port: ${{ secrets.RSYNC_DEPLOY_PORT }}
remote_user: ${{ secrets.RSYNC_DEPLOY_USER }}
remote_key: ${{ secrets.RSYNC_DEPLOY_KEY }}
run: |
echo "${{ secrets.RSYNC_DEPLOY_KEY }}" > deploy_key;
chmod 600 ./deploy_key;
rsync -avzP --mkpath \
-e 'ssh -p ${{ secrets.RSYNC_DEPLOY_PORT }} -i ./deploy_key' \
artifacts/ ${{ secrets.RSYNC_DEPLOY_USER }}@${{ secrets.RSYNC_DEPLOY_HOST }}:"${{ secrets.RSYNC_DEPLOY_BASE_PATH }}${{steps.names.outputs.artifacts-path}}/";
rm ./deploy_key;
- name: 'Trigger update server reindex'
if: ${{ !github.event.pull_request.head.repo.fork }}
uses: wei/curl@master
with:
args: -X POST -F 'key=${{ secrets.REINDEX_KEY }}' ${{ secrets.REINDEX_URL }}
run: curl -X POST -F 'key=${{ secrets.REINDEX_KEY }}' ${{ secrets.REINDEX_URL }}
- name: 'Find Previous Comment'
if: ${{ !github.event.pull_request.head.repo.fork && github.event.pull_request }}
@ -165,11 +146,8 @@ jobs:
compact:
if: ${{ !startsWith(github.ref, 'refs/tags') }}
runs-on: [self-hosted,FlipperZero]
runs-on: [self-hosted,FlipperZeroShell]
steps:
- name: 'Cleanup workspace'
uses: AutoModality/action-clean@v1
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]
@ -185,9 +163,6 @@ jobs:
submodules: true
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Build docker image'
uses: ./.github/actions/docker
- name: 'Generate suffix and folder name'
id: names
run: |
@ -207,12 +182,10 @@ jobs:
echo "WORKFLOW_BRANCH_OR_TAG=${BRANCH_OR_TAG}" >> $GITHUB_ENV
echo "DIST_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
- name: 'Build the firmware in docker'
uses: ./.github/actions/docker
with:
- name: 'Build the firmware'
run: |
set -e
for TARGET in ${TARGETS}
do
./fbt TARGET_HW=`echo ${TARGET} | sed 's/f//'` updater_package DEBUG=0 COMPACT=1
FBT_TOOLCHAIN_PATH=/opt ./fbt TARGET_HW=`echo ${TARGET} | sed 's/f//'` updater_package DEBUG=0 COMPACT=1
done

View File

@ -1,46 +0,0 @@
name: 'Build toolchain Docker image'
on:
push:
branches:
- dev
tags:
- '*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: flipperdevices/flipperzero-toolchain
flavor: latest=${{ startsWith(github.ref, 'refs/tags/') && !endsWith(github.ref, 'rc')}}
tags: |
type=ref,event=branch
type=ref,event=tag
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: docker/
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
cache-from: type=registry,ref=flipperdevices/flipperzero-toolchain:buildcache
cache-to: type=registry,ref=flipperdevices/flipperzero-toolchain:buildcache,mode=max

View File

@ -1,17 +1,47 @@
name: 'Check submodules'
name: 'Check submodules branch'
on:
push:
branches:
- dev
- "release*"
tags:
- '*'
pull_request:
jobs:
protobuf:
runs-on: ubuntu-latest
check_protobuf:
runs-on: [self-hosted, FlipperZeroShell]
steps:
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]
then
git submodule status \
|| git checkout `git rev-list --max-parents=0 HEAD | tail -n 1`
fi
- name: 'Checkout code'
uses: actions/checkout@v2
- name: 'Check submodule commit branch'
uses: jtmullen/submodule-branch-check-action@v1
with:
path: assets/protobuf
branch: dev
fetch_depth: 50
fetch-depth: 0
- name: 'Check protobuf branch'
run: |
SUB_PATH="assets/protobuf";
SUB_BRANCH="dev";
SUB_COMMITS_MIN=40;
cd "$SUB_PATH";
SUBMODULE_HASH="$(git rev-parse HEAD)";
BRANCHES=$(git branch -r --contains "$SUBMODULE_HASH");
COMMITS_IN_BRANCH="$(git rev-list --count dev)";
if [ $COMMITS_IN_BRANCH -lt $SUB_COMMITS_MIN ]; then
echo "::set-output name=fails::error";
echo "::error::Error: Too low commits in $SUB_BRANCH of submodule $SUB_PATH: $COMMITS_IN_BRANCH(expected $SUB_COMMITS_MIN+)";
exit 1;
fi
if ! grep -q "/$SUB_BRANCH" <<< "$BRANCHES"; then
echo "::set-output name=fails::error";
echo "::error::Error: Submodule $SUB_PATH is not on branch $SUB_BRANCH";
exit 1;
fi

View File

@ -14,11 +14,8 @@ env:
jobs:
lint_c_cpp:
runs-on: [self-hosted,FlipperZero]
runs-on: [self-hosted,FlipperZeroShell]
steps:
- name: 'Cleanup workspace'
uses: AutoModality/action-clean@v1
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]
@ -31,23 +28,10 @@ jobs:
uses: actions/checkout@v2
with:
fetch-depth: 0
submodules: true
- name: 'Docker cache'
uses: satackey/action-docker-layer-caching@v0.0.11
continue-on-error: true
with:
key: docker-cache-${{ hashFiles('docker/**') }}-{hash}
restore-keys: docker-cache-${{ hashFiles('docker/**') }}-
- name: 'Build docker image'
uses: ./.github/actions/docker
- name: 'Check code formatting'
id: syntax_check
uses: ./.github/actions/docker
with:
run: SET_GH_OUTPUT=1 ./fbt lint
run: SET_GH_OUTPUT=1 FBT_TOOLCHAIN_PATH=/opt ./fbt lint
- name: Report code formatting errors
if: failure() && steps.syntax_check.outputs.errors && github.event.pull_request
@ -59,4 +43,4 @@ jobs:
```
${{ steps.syntax_check.outputs.errors }}
```
You might want to run `docker compose exec dev make format` for an auto-fix.
You might want to run `./fbt format` for an auto-fix.

View File

@ -11,11 +11,8 @@ on:
jobs:
lint_python:
runs-on: ubuntu-latest
runs-on: [self-hosted,FlipperZeroShell]
steps:
- name: 'Cleanup workspace'
uses: AutoModality/action-clean@v1
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]
@ -29,8 +26,5 @@ jobs:
with:
fetch-depth: 0
- name: 'Setup python'
uses: actions/setup-python@v2
- name: 'Check python code with black'
uses: psf/black@20.8b1
- name: 'Check code formatting'
run: SET_GH_OUTPUT=1 FBT_TOOLCHAIN_PATH=/opt ./fbt lint_py

View File

@ -7,9 +7,8 @@ on:
jobs:
reindex:
name: 'Reindex updates'
runs-on: [self-hosted,FlipperZero]
runs-on: [self-hosted,FlipperZeroShell]
steps:
- name: Trigger reindex
uses: wei/curl@master
with:
args: -X POST -F 'key=${{ secrets.REINDEX_KEY }}' ${{ secrets.REINDEX_URL }}
run: |
curl -X POST -F 'key=${{ secrets.REINDEX_KEY }}' ${{ secrets.REINDEX_URL }}

View File

@ -7,6 +7,7 @@
# construction of certain targets behind command-line options.
import os
import subprocess
EnsurePythonVersion(3, 8)
@ -231,6 +232,46 @@ distenv.PhonyTarget(
LINT_SOURCES=firmware_env["LINT_SOURCES"],
)
# PY_LINT_SOURCES contains recursively-built modules' SConscript files + application manifests
# Here we add additional Python files residing in repo root
firmware_env.Append(
PY_LINT_SOURCES=[
# Py code folders
"site_scons",
"scripts",
# Extra files
"applications/extapps.scons",
"SConstruct",
"firmware.scons",
"fbt_options.py",
]
)
black_commandline = "@${PYTHON3} -m black ${PY_BLACK_ARGS} ${PY_LINT_SOURCES}"
black_base_args = ["--include", '"\\.scons|\\.py|SConscript|SConstruct"']
distenv.PhonyTarget(
"lint_py",
black_commandline,
PY_BLACK_ARGS=[
"--check",
"--diff",
*black_base_args,
],
PY_LINT_SOURCES=firmware_env["PY_LINT_SOURCES"],
)
distenv.PhonyTarget(
"format_py",
black_commandline,
PY_BLACK_ARGS=black_base_args,
PY_LINT_SOURCES=firmware_env["PY_LINT_SOURCES"],
)
# Start Flipper CLI via PySerial's miniterm
distenv.PhonyTarget("cli", "${PYTHON3} scripts/serial_cli.py")
# Find blackmagic probe

View File

@ -41,6 +41,8 @@ FBT keeps track of internal dependencies, so you only need to build the highest-
- `blackmagic` - debug firmware with Blackmagic probe (WiFi dev board)
- `openocd` - just start OpenOCD
- `get_blackmagic` - output blackmagic address in gdb remote format. Useful for IDE integration
- `lint`, `format` - run clang-tidy on C source code to check and reformat it according to `.clang-format` specs
- `lint_py`, `format_py` - run [black](https://black.readthedocs.io/en/stable/index.html) on Python source code, build system files & application manifests
### Firmware targets

12
fbt
View File

@ -4,14 +4,20 @@
# unofficial strict mode
set -eu;
SCONS_DEFAULT_FLAGS="-Q --warn=target-not-built";
# private variables
SCRIPT_PATH="$(cd "$(dirname "$0")" && pwd -P)";
SCONS_DEFAULT_FLAGS="-Q --warn=target-not-built";
if [ -z "${FBT_NOENV:-}" ]; then
# public variables
FBT_NOENV="${FBT_NOENV:-""}";
FBT_NO_SYNC="${FBT_NO_SYNC:-""}";
FBT_TOOLCHAIN_PATH="${FBT_TOOLCHAIN_PATH:-$SCRIPT_PATH}";
if [ -z "$FBT_NOENV" ]; then
. "$SCRIPT_PATH/scripts/toolchain/fbtenv.sh";
fi
if [ -z "${FBT_NO_SYNC:-}" ]; then
if [ -z "$FBT_NO_SYNC" ]; then
if [ ! -d "$SCRIPT_PATH/.git" ]; then
echo "\".git\" directory not found, please clone repo via \"git clone --recursive\"";
exit 1;

View File

@ -4,8 +4,6 @@ env.Append(
CPPPATH=[
"#/lib/loclass",
],
CPPDEFINES=[
],
)

View File

@ -5,8 +5,6 @@ env.Append(
"#/lib/mbedtls",
"#/lib/mbedtls/include",
],
CPPDEFINES=[
],
)

14
scripts/serial_cli.py Normal file
View File

@ -0,0 +1,14 @@
import logging
import subprocess
from flipper.utils.cdc import resolve_port
def main():
logger = logging.getLogger()
if not (port := resolve_port(logger, "auto")):
return 1
subprocess.call(["python3", "-m", "serial.tools.miniterm", "--raw", port, "230400"])
if __name__ == "__main__":
main()

View File

@ -13,7 +13,7 @@ if not [%FBT_NOENV%] == [] (
exit /b 0
)
set "FLIPPER_TOOLCHAIN_VERSION=3"
set "FLIPPER_TOOLCHAIN_VERSION=8"
set "FBT_TOOLCHAIN_ROOT=%FBT_ROOT%\toolchain\i686-windows"

View File

@ -1,54 +1,211 @@
#!/bin/sh
# unofficial strict mode
set -eu;
# shellcheck disable=SC2034,SC2016,SC2086
FLIPPER_TOOLCHAIN_VERSION="3";
# public variables
DEFAULT_SCRIPT_PATH="$(pwd -P)";
SCRIPT_PATH="${SCRIPT_PATH:-$DEFAULT_SCRIPT_PATH}";
FBT_TOOLCHAIN_VERSION="${FBT_TOOLCHAIN_VERSION:-"8"}";
FBT_TOOLCHAIN_PATH="${FBT_TOOLCHAIN_PATH:-$SCRIPT_PATH}";
get_kernel_type()
fbtenv_check_sourced()
{
SYS_TYPE="$(uname -s)"
case "${ZSH_EVAL_CONTEXT:-""}" in *:file:*)
return 0;;
esac
case ${0##*/} in dash|-dash|bash|-bash|ksh|-ksh|sh|-sh)
return 0;;
esac
if [ "$(basename $0)" = "fbt" ]; then
return 0;
fi
echo "Running this script manually is wrong, please source it";
echo "Example:";
printf "\tsource scripts/toolchain/fbtenv.sh\n";
return 1;
}
fbtenv_check_script_path()
{
if [ ! -x "$SCRIPT_PATH/fbt" ]; then
echo "Please source this script being into flipperzero-firmware root directory, or specify 'SCRIPT_PATH' manually";
echo "Example:";
printf "\tSCRIPT_PATH=lang/c/flipperzero-firmware source lang/c/flipperzero-firmware/scripts/fbtenv.sh\n";
echo "If current directory is right, type 'unset SCRIPT_PATH' and try again"
return 1;
fi
return 0;
}
fbtenv_get_kernel_type()
{
SYS_TYPE="$(uname -s)";
ARCH_TYPE="$(uname -m)";
if [ "$ARCH_TYPE" != "x86_64" ] && [ "$SYS_TYPE" != "Darwin" ]; then
echo "Now we provide toolchain only for x86_64 arhitecture, sorry..";
return 1;
fi
if [ "$SYS_TYPE" = "Darwin" ]; then
TOOLCHAIN_PATH="toolchain/x86_64-darwin";
fbtenv_check_rosetta || return 1;
TOOLCHAIN_ARCH_DIR="$FBT_TOOLCHAIN_PATH/toolchain/x86_64-darwin";
TOOLCHAIN_URL="https://update.flipperzero.one/builds/toolchain/gcc-arm-none-eabi-10.3-x86_64-darwin-flipper-$FBT_TOOLCHAIN_VERSION.tar.gz";
elif [ "$SYS_TYPE" = "Linux" ]; then
TOOLCHAIN_PATH="toolchain/x86_64-linux";
TOOLCHAIN_ARCH_DIR="$FBT_TOOLCHAIN_PATH/toolchain/x86_64-linux";
TOOLCHAIN_URL="https://update.flipperzero.one/builds/toolchain/gcc-arm-none-eabi-10.3-x86_64-linux-flipper-$FBT_TOOLCHAIN_VERSION.tar.gz";
elif echo "$SYS_TYPE" | grep -q "MINGW"; then
echo "In MinGW shell use \"fbt.cmd\" instead of \"fbt\"";
exit 1;
return 1;
else
echo "Sorry, your system is not supported. Please report your configuration to us.";
exit 1;
echo "Your system is not recognized. Sorry.. Please report us your configuration.";
return 1;
fi
return 0;
}
check_download_toolchain()
fbtenv_check_rosetta()
{
if [ ! -d "$SCRIPT_PATH/$TOOLCHAIN_PATH" ]; then
download_toolchain;
elif [ ! -f "$SCRIPT_PATH/$TOOLCHAIN_PATH/VERSION" ]; then
download_toolchain;
elif [ "$(cat "$SCRIPT_PATH/$TOOLCHAIN_PATH/VERSION")" -ne "$FLIPPER_TOOLCHAIN_VERSION" ]; then
download_toolchain;
if [ "$ARCH_TYPE" = "arm64" ]; then
if ! /usr/bin/pgrep -q oahd; then
echo "Flipper Zero Toolchain needs Rosetta2 to run under Apple Silicon";
echo "Please instal it by typing 'softwareupdate --install-rosetta --agree-to-license'";
return 1;
fi
fi
return 0;
}
download_toolchain()
fbtenv_check_tar()
{
chmod 755 "$SCRIPT_PATH/scripts/toolchain/unix-toolchain-download.sh";
"$SCRIPT_PATH/scripts/toolchain/unix-toolchain-download.sh" "$FLIPPER_TOOLCHAIN_VERSION" || exit 1;
printf "Checking tar..";
if ! tar --version > /dev/null 2>&1; then
echo "no";
return 1;
fi
echo "yes";
return 0;
}
main()
fbtenv_check_downloaded_toolchain()
{
if [ -z "${SCRIPT_PATH:-}" ]; then
echo "Manual running of this script is not allowed.";
exit 1;
printf "Checking downloaded toolchain tgz..";
if [ ! -f "$FBT_TOOLCHAIN_PATH/toolchain/$TOOLCHAIN_TAR" ]; then
echo "no";
return 1;
fi
get_kernel_type; # sets TOOLCHAIN_PATH
check_download_toolchain;
PATH="$SCRIPT_PATH/$TOOLCHAIN_PATH/python/bin:$PATH";
PATH="$SCRIPT_PATH/$TOOLCHAIN_PATH/bin:$PATH";
PATH="$SCRIPT_PATH/$TOOLCHAIN_PATH/protobuf/bin:$PATH";
PATH="$SCRIPT_PATH/$TOOLCHAIN_PATH/openocd/bin:$PATH";
echo "yes";
return 0;
}
main;
fbtenv_download_toolchain_tar()
{
echo "Downloading toolchain:";
mkdir -p "$FBT_TOOLCHAIN_PATH/toolchain" || return 1;
"$DOWNLOADER" $DOWNLOADER_ARGS "$FBT_TOOLCHAIN_PATH/toolchain/$TOOLCHAIN_TAR" "$TOOLCHAIN_URL" || return 1;
echo "done";
return 0;
}
fbtenv_remove_old_tooclhain()
{
printf "Removing old toolchain (if exist)..";
rm -rf "${TOOLCHAIN_ARCH_DIR}";
echo "done";
}
fbtenv_show_unpack_percentage()
{
LINE=0;
while read -r line; do
LINE=$(( LINE + 1 ));
if [ $(( LINE % 300 )) -eq 0 ]; then
printf "#";
fi
done
echo " 100.0%";
}
fbtenv_unpack_toolchain()
{
echo "Unpacking toolchain:";
tar -xvf "$FBT_TOOLCHAIN_PATH/toolchain/$TOOLCHAIN_TAR" -C "$FBT_TOOLCHAIN_PATH/toolchain" 2>&1 | fbtenv_show_unpack_percentage;
mkdir -p "$FBT_TOOLCHAIN_PATH/toolchain" || return 1;
mv "$FBT_TOOLCHAIN_PATH/toolchain/$TOOLCHAIN_DIR" "$TOOLCHAIN_ARCH_DIR" || return 1;
echo "done";
return 0;
}
fbtenv_clearing()
{
printf "Clearing..";
rm -rf "${FBT_TOOLCHAIN_PATH:?}/toolchain/$TOOLCHAIN_TAR";
echo "done";
return 0;
}
fbtenv_curl_wget_check()
{
printf "Checking curl..";
if ! curl --version > /dev/null 2>&1; then
echo "no";
printf "Checking wget..";
if ! wget --version > /dev/null 2>&1; then
echo "no";
echo "No curl or wget found in your PATH";
echo "Please provide it or download this file:";
echo;
echo "$TOOLCHAIN_URL";
echo;
echo "And place in $FBT_TOOLCHAIN_PATH/toolchain/ dir mannualy";
return 1;
fi
echo "yes"
DOWNLOADER="wget";
DOWNLOADER_ARGS="--show-progress --progress=bar:force -qO";
return 0;
fi
echo "yes"
DOWNLOADER="curl";
DOWNLOADER_ARGS="--progress-bar -SLo";
return 0;
}
fbtenv_check_download_toolchain()
{
if [ ! -d "$TOOLCHAIN_ARCH_DIR" ]; then
fbtenv_download_toolchain || return 1;
elif [ ! -f "$TOOLCHAIN_ARCH_DIR/VERSION" ]; then
fbtenv_download_toolchain || return 1;
elif [ "$(cat "$TOOLCHAIN_ARCH_DIR/VERSION")" -ne "$FBT_TOOLCHAIN_VERSION" ]; then
fbtenv_download_toolchain || return 1;
fi
return 0;
}
fbtenv_download_toolchain()
{
fbtenv_check_tar || return 1;
TOOLCHAIN_TAR="$(basename "$TOOLCHAIN_URL")";
TOOLCHAIN_DIR="$(echo "$TOOLCHAIN_TAR" | sed "s/-$FBT_TOOLCHAIN_VERSION.tar.gz//g")";
if ! fbtenv_check_downloaded_toolchain; then
fbtenv_curl_wget_check || return 1;
fbtenv_download_toolchain_tar;
fi
fbtenv_remove_old_tooclhain;
fbtenv_unpack_toolchain || { fbtenv_clearing && return 1; };
fbtenv_clearing;
return 0;
}
fbtenv_main()
{
fbtenv_check_sourced || return 1;
fbtenv_check_script_path || return 1;
fbtenv_get_kernel_type || return 1;
fbtenv_check_download_toolchain || return 1;
PATH="$TOOLCHAIN_ARCH_DIR/python/bin:$PATH";
PATH="$TOOLCHAIN_ARCH_DIR/bin:$PATH";
PATH="$TOOLCHAIN_ARCH_DIR/protobuf/bin:$PATH";
PATH="$TOOLCHAIN_ARCH_DIR/openocd/bin:$PATH";
}
fbtenv_main;

View File

@ -1,135 +0,0 @@
#!/bin/sh
# shellcheck disable=SC2086,SC2034
# unofficial strict mode
set -eu;
check_system()
{
VER="$1"; # toolchain version
printf "Checking kernel type..";
SYS_TYPE="$(uname -s)"
if [ "$SYS_TYPE" = "Darwin" ]; then
echo "darwin";
TOOLCHAIN_URL="https://update.flipperzero.one/builds/toolchain/gcc-arm-none-eabi-10.3-x86_64-darwin-flipper-$VER.tar.gz";
TOOLCHAIN_PATH="toolchain/x86_64-darwin";
elif [ "$SYS_TYPE" = "Linux" ]; then
echo "linux";
TOOLCHAIN_URL="https://update.flipperzero.one/builds/toolchain/gcc-arm-none-eabi-10.3-x86_64-linux-flipper-$VER.tar.gz";
TOOLCHAIN_PATH="toolchain/x86_64-linux";
else
echo "unsupported.";
echo "Your system is unsupported.. sorry..";
exit 1;
fi
}
check_tar()
{
printf "Checking tar..";
if ! tar --version > /dev/null 2>&1; then
echo "no";
exit 1;
fi
echo "yes";
}
curl_wget_check()
{
printf "Checking curl..";
if ! curl --version > /dev/null 2>&1; then
echo "no";
printf "Checking wget..";
if ! wget --version > /dev/null 2>&1; then
echo "no";
echo "No curl or wget found in your PATH.";
echo "Please provide it or download this file:";
echo;
echo "$TOOLCHAIN_URL";
echo;
echo "And place in repo root dir mannualy.";
exit 1;
fi
echo "yes"
DOWNLOADER="wget";
DOWNLOADER_ARGS="--show-progress --progress=bar:force -qO";
return;
fi
echo "yes"
DOWNLOADER="curl";
DOWNLOADER_ARGS="--progress-bar -SLo";
}
check_downloaded_toolchain()
{
printf "Checking downloaded toolchain tgz..";
if [ -f "$REPO_ROOT/$TOOLCHAIN_TAR" ]; then
echo "yes";
return 0;
fi
echo "no";
return 1;
}
download_toolchain()
{
echo "Downloading toolchain:";
"$DOWNLOADER" $DOWNLOADER_ARGS "$REPO_ROOT/$TOOLCHAIN_TAR" "$TOOLCHAIN_URL";
echo "done";
}
remove_old_tooclhain()
{
printf "Removing old toolchain (if exist)..";
rm -rf "${REPO_ROOT:?}/$TOOLCHAIN_PATH";
echo "done";
}
show_unpack_percentage()
{
LINE=0;
while read -r line; do
LINE=$(( LINE + 1 ));
if [ $(( LINE % 300 )) -eq 0 ]; then
printf "#";
fi
done
echo " 100.0%";
}
unpack_toolchain()
{
echo "Unpacking toolchain:";
tar -xvf "$REPO_ROOT/$TOOLCHAIN_TAR" -C "$REPO_ROOT/" 2>&1 | show_unpack_percentage;
mkdir -p "$REPO_ROOT/toolchain";
mv "$REPO_ROOT/$TOOLCHAIN_DIR" "$REPO_ROOT/$TOOLCHAIN_PATH/";
echo "done";
}
clearing()
{
printf "Clearing..";
rm -rf "${REPO_ROOT:?}/$TOOLCHAIN_TAR";
echo "done";
}
main()
{
SCRIPT_PATH="$(cd "$(dirname "$0")" && pwd -P)"
REPO_ROOT="$(cd "$SCRIPT_PATH/../../" && pwd)";
check_system "$1"; # recives TOOLCHAIN_VERSION, defines TOOLCHAIN_URL and TOOLCHAIN_PATH
check_tar;
TOOLCHAIN_TAR="$(basename "$TOOLCHAIN_URL")";
TOOLCHAIN_DIR="$(echo "$TOOLCHAIN_TAR" | sed "s/-$VER.tar.gz//g")";
if ! check_downloaded_toolchain; then
curl_wget_check;
download_toolchain;
fi
remove_old_tooclhain;
unpack_toolchain;
}
trap clearing EXIT;
trap clearing 2; # SIGINT not coverable by EXIT
main "$1"; # toochain version

View File

@ -21,9 +21,9 @@ def LoadApplicationManifests(env):
for entry in env.Glob("#/applications/*", ondisk=True, source=True):
if isinstance(entry, SCons.Node.FS.Dir) and not str(entry).startswith("."):
try:
appmgr.load_manifest(
os.path.join(entry.abspath, "application.fam"), entry.name
)
app_manifest_file_path = os.path.join(entry.abspath, "application.fam")
appmgr.load_manifest(app_manifest_file_path, entry.name)
env.Append(PY_LINT_SOURCES=[app_manifest_file_path])
except FlipperManifestException as e:
warn(WarningOnByDefault, str(e))
@ -67,6 +67,7 @@ def generate(env):
build_apps_c,
"${APPSCOMSTR}",
),
suffix=".c",
),
}
)

View File

@ -1,5 +1,6 @@
import posixpath
import os
from SCons.Errors import UserError
def BuildModule(env, module):
@ -8,9 +9,9 @@ def BuildModule(env, module):
if not os.path.exists(module_sconscript):
module_sconscript = posixpath.join(src_dir, f"{module}.scons")
if not os.path.exists(module_sconscript):
print(f"Cannot build module {module}: scons file not found")
Exit(2)
raise UserError(f"Cannot build module {module}: scons file not found")
env.Append(PY_LINT_SOURCES=[module_sconscript])
return env.SConscript(
module_sconscript,
variant_dir=posixpath.join(env.subst("$BUILD_DIR"), module),