Amap and PVS Studio reports in CI/CD (#1526)

This commit is contained in:
Max Andreev 2022-08-23 14:29:26 +03:00 committed by GitHub
parent 57328761cf
commit a7a9c38036
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 384 additions and 123 deletions

4
.github/CODEOWNERS vendored
View File

@ -44,10 +44,6 @@
# Debug tools and plugins # Debug tools and plugins
/debug/ @skotopes @DrZlo13 @hedger /debug/ @skotopes @DrZlo13 @hedger
# Docker
/docker/ @skotopes @DrZlo13 @hedger @drunkbatya
/docker-compose.yml @skotopes @DrZlo13 @hedger @drunkbatya
# Documentation # Documentation
/documentation/ @skotopes @DrZlo13 @hedger @drunkbatya /documentation/ @skotopes @DrZlo13 @hedger @drunkbatya

View File

@ -1,11 +0,0 @@
name: 'Run in docker'
inputs:
run: # id of input
description: 'A command to run'
required: true
default: ''
runs:
using: 'docker'
image: '../../../docker/Dockerfile'
args:
- ${{ inputs.run }}

120
.github/workflows/amap_analyse.yml vendored Normal file
View File

@ -0,0 +1,120 @@
name: 'Analyze .map file with Amap'
on:
push:
branches:
- dev
- "release*"
tags:
- '*'
pull_request:
env:
TARGETS: f7
jobs:
amap_analyse:
runs-on: [self-hosted,FlipperZeroMacShell]
steps:
- name: 'Wait Build workflow'
uses: fountainhead/action-wait-for-check@v1.0.0
id: wait-for-build
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: 'main'
ref: ${{ github.event.pull_request.head.sha || github.sha }}
intervalSeconds: 20
- name: 'Check Build workflow status'
if: steps.wait-for-build.outputs.conclusion == 'failure'
run: |
exit 1
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]; then
git submodule status \
|| git checkout `git rev-list --max-parents=0 HEAD | tail -n 1`
fi
- name: 'Checkout code'
uses: actions/checkout@v2
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Generate prefixes by commit'
id: names
run: |
REF="${{github.ref}}"
COMMIT_HASH="$(git rev-parse HEAD)"
SHA="$(git rev-parse --short HEAD)"
COMMIT_MSG="${{github.event.head_commit.message}}"
if [[ ${{ github.event_name }} == 'pull_request' ]]; then
REF="${{github.head_ref}}"
COMMIT_HASH="$(git log -1 --pretty=oneline | awk '{print $1}')"
SHA="$(cut -c -8 <<< "$COMMIT_HASH")"
COMMIT_MSG="$(git log -1 --pretty=format:"%s")"
PULL_ID="${{github.event.pull_request.number}}"
PULL_NAME="${{github.event.pull_request.title}}"
fi
BRANCH_NAME=${REF#refs/*/}
SUFFIX=${BRANCH_NAME//\//_}-$(date +'%d%m%Y')-${SHA}
if [[ "${{ github.ref }}" == "refs/tags/"* ]]; then
SUFFIX=${BRANCH_NAME//\//_}
fi
echo "::set-output name=commit-hash::${COMMIT_HASH}"
echo "::set-output name=commit-msg::${COMMIT_MSG}"
echo "::set-output name=pull-id::${PULL_ID}"
echo "::set-output name=pull-name::${PULL_NAME}"
echo "::set-output name=branch-name::${BRANCH_NAME}"
echo "::set-output name=suffix::${SUFFIX}"
- name: 'Make artifacts directory'
run: |
rm -rf artifacts
mkdir artifacts
- name: 'Download build artifacts'
if: ${{ !github.event.pull_request.head.repo.fork }}
run: |
echo "${{ secrets.RSYNC_DEPLOY_KEY }}" > deploy_key;
chmod 600 ./deploy_key;
rsync -avzP \
-e 'ssh -p ${{ secrets.RSYNC_DEPLOY_PORT }} -i ./deploy_key' \
${{ secrets.RSYNC_DEPLOY_USER }}@${{ secrets.RSYNC_DEPLOY_HOST }}:"${{ secrets.RSYNC_DEPLOY_BASE_PATH }}${{steps.names.outputs.branch-name}}/" artifacts/;
rm ./deploy_key;
- name: 'Make .map file analyse'
run: |
cd artifacts/
/Applications/amap/Contents/MacOS/amap -f flipper-z-f7-firmware-${{steps.names.outputs.suffix}}.elf.map
- name: 'Upload report to DB'
run: |
FBT_TOOLCHAIN_PATH=/opt source scripts/toolchain/fbtenv.sh
get_size()
{
SECTION="$1";
arm-none-eabi-size \
-A artifacts/flipper-z-f7-firmware-${{steps.names.outputs.suffix}}.elf \
| grep "^$SECTION" | awk '{print $2}'
}
export COMMIT_HASH="${{steps.names.outputs.commit-hash}}"
export COMMIT_MSG="${{steps.names.outputs.commit-msg}}"
export BRANCH_NAME="${{steps.names.outputs.branch-name}}"
export PULL_ID="${{steps.names.outputs.pull-id}}"
export PULL_NAME="${{steps.names.outputs.pull-name}}"
export BSS_SIZE="$(get_size ".bss")"
export TEXT_SIZE="$(get_size ".text")"
export RODATA_SIZE="$(get_size ".rodata")"
export DATA_SIZE="$(get_size ".data")"
export FREE_FLASH_SIZE="$(get_size ".free_flash")"
python3 -m pip install mariadb
python3 scripts/amap_mariadb_insert.py \
${{ secrets.AMAP_MARIADB_USER }} \
${{ secrets.AMAP_MARIADB_PASSWORD }} \
${{ secrets.AMAP_MARIADB_HOST }} \
${{ secrets.AMAP_MARIADB_PORT }} \
${{ secrets.AMAP_MARIADB_DATABASE }} \
artifacts/flipper-z-f7-firmware-${{steps.names.outputs.suffix}}.elf.map.all

View File

@ -108,6 +108,10 @@ jobs:
FBT_TOOLCHAIN_PATH=/opt ./fbt copro_dist FBT_TOOLCHAIN_PATH=/opt ./fbt copro_dist
tar czpf artifacts/flipper-z-any-core2_firmware-${{steps.names.outputs.suffix}}.tgz -C assets core2_firmware tar czpf artifacts/flipper-z-any-core2_firmware-${{steps.names.outputs.suffix}}.tgz -C assets core2_firmware
- name: 'Copy .map file'
run: |
cp build/f7-firmware-D/firmware.elf.map artifacts/flipper-z-f7-firmware-${{steps.names.outputs.suffix}}.elf.map
- name: 'Upload artifacts to update server' - name: 'Upload artifacts to update server'
if: ${{ !github.event.pull_request.head.repo.fork }} if: ${{ !github.event.pull_request.head.repo.fork }}
run: | run: |

View File

@ -25,6 +25,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Check protobuf branch' - name: 'Check protobuf branch'
run: | run: |

View File

@ -28,6 +28,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Check code formatting' - name: 'Check code formatting'
id: syntax_check id: syntax_check

View File

@ -25,6 +25,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Check code formatting' - name: 'Check code formatting'
run: SET_GH_OUTPUT=1 FBT_TOOLCHAIN_PATH=/opt ./fbt lint_py run: SET_GH_OUTPUT=1 FBT_TOOLCHAIN_PATH=/opt ./fbt lint_py

107
.github/workflows/pvs_studio.yml vendored Normal file
View File

@ -0,0 +1,107 @@
name: 'Static C/C++ analysis with PVS-Studio'
on:
push:
branches:
- dev
- "release*"
tags:
- '*'
pull_request:
env:
TARGETS: f7
DEFAULT_TARGET: f7
jobs:
analyse_c_cpp:
runs-on: [self-hosted, FlipperZeroShell]
steps:
- name: 'Decontaminate previous build leftovers'
run: |
if [ -d .git ]
then
git submodule status \
|| git checkout `git rev-list --max-parents=0 HEAD | tail -n 1`
fi
- name: 'Checkout code'
uses: actions/checkout@v2
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Generate suffix and folder name'
id: names
run: |
REF=${{ github.ref }}
if [[ ${{ github.event_name }} == 'pull_request' ]]; then
REF=${{ github.head_ref }}
fi
BRANCH_OR_TAG=${REF#refs/*/}
SHA=$(git rev-parse --short HEAD)
if [[ "${{ github.ref }}" == "refs/tags/"* ]]; then
SUFFIX=${BRANCH_OR_TAG//\//_}
else
SUFFIX=${BRANCH_OR_TAG//\//_}-$(date +'%d%m%Y')-${SHA}
fi
echo "WORKFLOW_BRANCH_OR_TAG=${BRANCH_OR_TAG}" >> $GITHUB_ENV
echo "DIST_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
echo "::set-output name=artifacts-path::${BRANCH_OR_TAG}"
echo "::set-output name=suffix::${SUFFIX}"
echo "::set-output name=short-hash::${SHA}"
echo "::set-output name=default-target::${DEFAULT_TARGET}"
- name: 'Make reports directory'
run: |
rm -rf reports/
mkdir reports
- name: 'Generate compile_comands.json'
run: |
FBT_TOOLCHAIN_PATH=/opt ./fbt COMPACT=1 version_json proto_ver icons firmware_cdb dolphin_internal dolphin_blocking
- name: 'Static code analysis'
run: |
FBT_TOOLCHAIN_PATH=/opt source scripts/toolchain/fbtenv.sh
pvs-studio-analyzer credentials ${{ secrets.PVS_STUDIO_CREDENTIALS }}
pvs-studio-analyzer analyze \
@.pvsoptions \
-j$(grep -c processor /proc/cpuinfo) \
-f build/f7-firmware-DC/compile_commands.json \
-o PVS-Studio.log
- name: 'Convert PVS-Studio output to html page'
run: plog-converter -a GA:1,2,3 -t fullhtml PVS-Studio.log -o reports/${{steps.names.outputs.default-target}}-${{steps.names.outputs.suffix}}
- name: 'Upload artifacts to update server'
if: ${{ !github.event.pull_request.head.repo.fork }}
run: |
echo "${{ secrets.RSYNC_DEPLOY_KEY }}" > deploy_key;
chmod 600 ./deploy_key;
rsync -avrzP --mkpath \
-e 'ssh -p ${{ secrets.RSYNC_DEPLOY_PORT }} -i ./deploy_key' \
reports/ ${{ secrets.RSYNC_DEPLOY_USER }}@${{ secrets.RSYNC_DEPLOY_HOST }}:/home/data/firmware-pvs-studio-report/"${{steps.names.outputs.artifacts-path}}/";
rm ./deploy_key;
- name: 'Find Previous Comment'
if: ${{ !github.event.pull_request.head.repo.fork && github.event.pull_request }}
uses: peter-evans/find-comment@v1
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: 'PVS-Studio report for commit'
- name: 'Create or update comment'
if: ${{ !github.event.pull_request.head.repo.fork && github.event.pull_request}}
uses: peter-evans/create-or-update-comment@v1
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
body: |
**PVS-Studio report for commit `${{steps.names.outputs.short-hash}}`:**
- [Report](https://update.flipperzero.one/builds/firmware-pvs-studio-report/${{steps.names.outputs.artifacts-path}}/${{steps.names.outputs.default-target}}-${{steps.names.outputs.suffix}}/index.html)
edit-mode: replace

View File

@ -1 +1 @@
--rules-config .pvsconfig -e lib/fatfs -e lib/fnv1a-hash -e lib/FreeRTOS-Kernel -e lib/heatshrink -e lib/libusb_stm32 -e lib/littlefs -e lib/mbedtls -e lib/micro-ecc -e lib/microtar -e lib/mlib -e lib/qrcode -e lib/ST25RFAL002 -e lib/STM32CubeWB -e lib/u8g2 -e toolchain/ --rules-config .pvsconfig -e lib/fatfs -e lib/fnv1a-hash -e lib/FreeRTOS-Kernel -e lib/heatshrink -e lib/libusb_stm32 -e lib/littlefs -e lib/mbedtls -e lib/micro-ecc -e lib/microtar -e lib/mlib -e lib/qrcode -e lib/ST25RFAL002 -e lib/STM32CubeWB -e lib/u8g2 -e */arm-none-eabi/*

View File

@ -61,29 +61,6 @@ One liner: `./fbt firmware_flash`
3. Run `dfu-util -D full.dfu -a 0` 3. Run `dfu-util -D full.dfu -a 0`
# Build with Docker
## Prerequisites
1. Install [Docker Engine and Docker Compose](https://www.docker.com/get-started)
2. Prepare the container:
```sh
docker-compose up -d
```
## Compile everything
```sh
docker-compose exec dev ./fbt
```
Check `dist/` for build outputs.
Use **`flipper-z-{target}-full-{suffix}.dfu`** to flash your device.
If compilation fails, make sure all submodules are all initialized. Either clone with `--recursive` or use `git submodule update --init --recursive`.
# Build on Linux/macOS # Build on Linux/macOS
Check out `documentation/fbt.md` for details on building and flashing firmware. Check out `documentation/fbt.md` for details on building and flashing firmware.
@ -157,7 +134,6 @@ Connect your device via ST-Link and run:
- `assets` - Assets used by applications and services - `assets` - Assets used by applications and services
- `furi` - Furi Core: os level primitives and helpers - `furi` - Furi Core: os level primitives and helpers
- `debug` - Debug tool: GDB-plugins, SVD-file and etc - `debug` - Debug tool: GDB-plugins, SVD-file and etc
- `docker` - Docker image sources (used for firmware build automation)
- `documentation` - Documentation generation system configs and input files - `documentation` - Documentation generation system configs and input files
- `firmware` - Firmware source code - `firmware` - Firmware source code
- `lib` - Our and 3rd party libraries, drivers and etc... - `lib` - Our and 3rd party libraries, drivers and etc...

View File

@ -446,15 +446,15 @@ const char* subghz_setting_get_preset_name(SubGhzSetting* instance, size_t idx)
int subghz_setting_get_inx_preset_by_name(SubGhzSetting* instance, const char* preset_name) { int subghz_setting_get_inx_preset_by_name(SubGhzSetting* instance, const char* preset_name) {
furi_assert(instance); furi_assert(instance);
size_t idx = 0; size_t idx = 0;
for for
M_EACH(item, instance->preset->data, SubGhzSettingCustomPresetItemArray_t) { M_EACH(item, instance->preset->data, SubGhzSettingCustomPresetItemArray_t) {
if(strcmp(string_get_cstr(item->custom_preset_name), preset_name) == 0) { if(strcmp(string_get_cstr(item->custom_preset_name), preset_name) == 0) {
return idx; return idx;
} }
idx++; idx++;
} }
furi_crash("SubGhz: No name preset."); furi_crash("SubGhz: No name preset.");
return -1; return -1;
} }
bool subghz_setting_load_custom_preset( bool subghz_setting_load_custom_preset(

View File

@ -9,12 +9,6 @@
./fbt icons proto dolphin_internal dolphin_blocking dolphin_ext resources ./fbt icons proto dolphin_internal dolphin_blocking dolphin_ext resources
``` ```
# Compiling with Docker-Compose
```bash
docker-compose exec dev ./fbt icons proto dolphin_internal dolphin_blocking dolphin_ext resources
```
# Asset naming rules # Asset naming rules
## Images and Animations ## Images and Animations

View File

@ -1,12 +0,0 @@
version: '3'
services:
dev:
image: flipperdevices/flipperzero-toolchain
network_mode: host
privileged: true
tty: true
stdin_open: true
volumes:
- .:/project
- /dev/bus/usb:/dev/bus/usb
working_dir: '/project'

View File

@ -1,41 +0,0 @@
FROM ubuntu:hirsute
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
ca-certificates \
build-essential \
python3 \
git \
clang-format-12 \
dfu-util \
openocd \
libncurses5 \
python-setuptools \
libpython2.7-dev \
libxml2-dev \
libxslt1-dev \
zlib1g-dev \
wget \
python3-protobuf \
protobuf-compiler \
python3-pip \
libpython3-dev \
ccache \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
RUN wget --progress=dot:giga "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.07/gcc-arm-none-eabi-10.3-2021.07-$(uname -m)-linux.tar.bz2" && \
tar xjf gcc-arm-none-eabi-10.3-2021.07-$(uname -m)-linux.tar.bz2 && \
rm gcc-arm-none-eabi-10.3-2021.07-$(uname -m)-linux.tar.bz2 && \
cd gcc-arm-none-eabi-10.3-2021.07/bin/ && \
rm -rf ../share && \
for file in * ; do ln -s "${PWD}/${file}" "/usr/bin/${file}" ; done && \
cd / && arm-none-eabi-gcc -v && arm-none-eabi-gdb -v
RUN pip3 install heatshrink2==0.11.0 Pillow==9.1.1
RUN ln -s `which clang-format-12` /usr/local/bin/clang-format
COPY entrypoint.sh /
ENTRYPOINT ["/entrypoint.sh"]

View File

@ -1,9 +0,0 @@
#!/bin/bash
if [ -z "$1" ]; then
bash
else
echo "Running $1"
set -ex
bash -c "$1"
fi

136
scripts/amap_mariadb_insert.py Executable file
View File

@ -0,0 +1,136 @@
#!/usr/bin/env python3
from datetime import datetime
import argparse
import mariadb
import sys
import os
def parseArgs():
parser = argparse.ArgumentParser()
parser.add_argument("db_user", help="MariaDB user")
parser.add_argument("db_pass", help="MariaDB password")
parser.add_argument("db_host", help="MariaDB hostname")
parser.add_argument("db_port", type=int, help="MariaDB port")
parser.add_argument("db_name", help="MariaDB database")
parser.add_argument("report_file", help="Report file(.map.all)")
args = parser.parse_args()
return args
def mariadbConnect(args):
try:
conn = mariadb.connect(
user=args.db_user,
password=args.db_pass,
host=args.db_host,
port=args.db_port,
database=args.db_name,
)
except mariadb.Error as e:
print(f"Error connecting to MariaDB: {e}")
sys.exit(1)
return conn
def parseEnv():
outArr = []
outArr.append(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
outArr.append(os.getenv("COMMIT_HASH", default=None))
outArr.append(os.getenv("COMMIT_MSG", default=None))
outArr.append(os.getenv("BRANCH_NAME", default=None))
outArr.append(os.getenv("BSS_SIZE", default=None))
outArr.append(os.getenv("TEXT_SIZE", default=None))
outArr.append(os.getenv("RODATA_SIZE", default=None))
outArr.append(os.getenv("DATA_SIZE", default=None))
outArr.append(os.getenv("FREE_FLASH_SIZE", default=None))
outArr.append(os.getenv("PULL_ID", default=None))
outArr.append(os.getenv("PULL_NAME", default=None))
return outArr
def createTables(cur, conn):
headerTable = "CREATE TABLE IF NOT EXISTS `header` ( \
`id` int(10) unsigned NOT NULL AUTO_INCREMENT, \
`datetime` datetime NOT NULL, \
`commit` varchar(40) NOT NULL, \
`commit_msg` text NOT NULL, \
`branch_name` text NOT NULL, \
`bss_size` int(10) unsigned NOT NULL, \
`text_size` int(10) unsigned NOT NULL, \
`rodata_size` int(10) unsigned NOT NULL, \
`data_size` int(10) unsigned NOT NULL, \
`free_flash_size` int(10) unsigned NOT NULL, \
`pullrequest_id` int(10) unsigned DEFAULT NULL, \
`pullrequest_name` text DEFAULT NULL, \
PRIMARY KEY (`id`), \
KEY `header_id_index` (`id`) )"
dataTable = "CREATE TABLE IF NOT EXISTS `data` ( \
`header_id` int(10) unsigned NOT NULL, \
`id` int(10) unsigned NOT NULL AUTO_INCREMENT, \
`section` text NOT NULL, \
`address` text NOT NULL, \
`size` int(10) unsigned NOT NULL, \
`name` text NOT NULL, \
`lib` text NOT NULL, \
`obj_name` text NOT NULL, \
PRIMARY KEY (`id`), \
KEY `data_id_index` (`id`), \
KEY `data_header_id_index` (`header_id`), \
CONSTRAINT `data_header_id_foreign` FOREIGN KEY (`header_id`) REFERENCES `header` (`id`) )"
cur.execute(headerTable)
cur.execute(dataTable)
conn.commit()
def insertHeader(data, cur, conn):
query = "INSERT INTO `header` ( \
datetime, commit, commit_msg, branch_name, bss_size, text_size, \
rodata_size, data_size, free_flash_size, pullrequest_id, pullrequest_name) \
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
cur.execute(query, data)
conn.commit()
return cur.lastrowid
def parseFile(fileObj, headerID):
arr = []
fileLines = fileObj.readlines()
for line in fileLines:
lineArr = []
tempLineArr = line.split("\t")
lineArr.append(headerID)
lineArr.append(tempLineArr[0]) # section
lineArr.append(int(tempLineArr[2], 16)) # address hex
lineArr.append(int(tempLineArr[3])) # size
lineArr.append(tempLineArr[4]) # name
lineArr.append(tempLineArr[5]) # lib
lineArr.append(tempLineArr[6]) # obj_name
arr.append(tuple(lineArr))
return arr
def insertData(data, cur, conn):
query = "INSERT INTO `data` ( \
header_id, section, address, size, \
name, lib, obj_name) \
VALUES (?, ?, ?, ?, ? ,?, ?)"
cur.executemany(query, data)
conn.commit()
def main():
args = parseArgs()
dbConn = mariadbConnect(args)
reportFile = open(args.report_file)
dbCurs = dbConn.cursor()
createTables(dbCurs, dbConn)
headerID = insertHeader(parseEnv(), dbCurs, dbConn)
insertData(parseFile(reportFile, headerID), dbCurs, dbConn)
reportFile.close()
dbCurs.close()
if __name__ == "__main__":
main()

View File

@ -13,7 +13,7 @@ if not [%FBT_NOENV%] == [] (
exit /b 0 exit /b 0
) )
set "FLIPPER_TOOLCHAIN_VERSION=8" set "FLIPPER_TOOLCHAIN_VERSION=9"
set "FBT_TOOLCHAIN_ROOT=%FBT_ROOT%\toolchain\i686-windows" set "FBT_TOOLCHAIN_ROOT=%FBT_ROOT%\toolchain\i686-windows"

View File

@ -5,7 +5,7 @@
# public variables # public variables
DEFAULT_SCRIPT_PATH="$(pwd -P)"; DEFAULT_SCRIPT_PATH="$(pwd -P)";
SCRIPT_PATH="${SCRIPT_PATH:-$DEFAULT_SCRIPT_PATH}"; SCRIPT_PATH="${SCRIPT_PATH:-$DEFAULT_SCRIPT_PATH}";
FBT_TOOLCHAIN_VERSION="${FBT_TOOLCHAIN_VERSION:-"8"}"; FBT_TOOLCHAIN_VERSION="${FBT_TOOLCHAIN_VERSION:-"9"}";
FBT_TOOLCHAIN_PATH="${FBT_TOOLCHAIN_PATH:-$SCRIPT_PATH}"; FBT_TOOLCHAIN_PATH="${FBT_TOOLCHAIN_PATH:-$SCRIPT_PATH}";
fbtenv_check_sourced() fbtenv_check_sourced()
@ -13,12 +13,9 @@ fbtenv_check_sourced()
case "${ZSH_EVAL_CONTEXT:-""}" in *:file:*) case "${ZSH_EVAL_CONTEXT:-""}" in *:file:*)
return 0;; return 0;;
esac esac
case ${0##*/} in dash|-dash|bash|-bash|ksh|-ksh|sh|-sh) case ${0##*/} in dash|-dash|bash|-bash|ksh|-ksh|sh|-sh|*.sh|fbt)
return 0;; return 0;;
esac esac
if [ "$(basename $0)" = "fbt" ]; then
return 0;
fi
echo "Running this script manually is wrong, please source it"; echo "Running this script manually is wrong, please source it";
echo "Example:"; echo "Example:";
printf "\tsource scripts/toolchain/fbtenv.sh\n"; printf "\tsource scripts/toolchain/fbtenv.sh\n";
@ -202,6 +199,7 @@ fbtenv_main()
fbtenv_check_script_path || return 1; fbtenv_check_script_path || return 1;
fbtenv_get_kernel_type || return 1; fbtenv_get_kernel_type || return 1;
fbtenv_check_download_toolchain || return 1; fbtenv_check_download_toolchain || return 1;
PS1="[FBT]$PS1";
PATH="$TOOLCHAIN_ARCH_DIR/python/bin:$PATH"; PATH="$TOOLCHAIN_ARCH_DIR/python/bin:$PATH";
PATH="$TOOLCHAIN_ARCH_DIR/bin:$PATH"; PATH="$TOOLCHAIN_ARCH_DIR/bin:$PATH";
PATH="$TOOLCHAIN_ARCH_DIR/protobuf/bin:$PATH"; PATH="$TOOLCHAIN_ARCH_DIR/protobuf/bin:$PATH";