Trigger amap from another repo (#2171)
* Add s3 upload, repository dispatch * Add trigger * Fix map file upload * Debug * Add event file upload to s3 * fix triggering * Fix upload process * fix build.yml Co-authored-by: あく <alleteam@gmail.com>
This commit is contained in:
parent
9192520c70
commit
1c926cf8a2
103
.github/workflows/amap_analyse.yml
vendored
103
.github/workflows/amap_analyse.yml
vendored
@ -1,103 +0,0 @@
|
|||||||
name: 'Analyze .map file with Amap'
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- dev
|
|
||||||
- "release*"
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
env:
|
|
||||||
TARGETS: f7
|
|
||||||
FBT_TOOLCHAIN_PATH: /opt
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
amap_analyse:
|
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
|
||||||
runs-on: [self-hosted,FlipperZeroMacShell]
|
|
||||||
timeout-minutes: 15
|
|
||||||
steps:
|
|
||||||
- name: 'Wait Build workflow'
|
|
||||||
uses: fountainhead/action-wait-for-check@v1.0.0
|
|
||||||
id: wait-for-build
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
checkName: 'main'
|
|
||||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
|
||||||
intervalSeconds: 20
|
|
||||||
|
|
||||||
- name: 'Check Build workflow status'
|
|
||||||
if: steps.wait-for-build.outputs.conclusion == 'failure'
|
|
||||||
run: |
|
|
||||||
exit 1
|
|
||||||
|
|
||||||
- name: 'Decontaminate previous build leftovers'
|
|
||||||
run: |
|
|
||||||
if [ -d .git ]; then
|
|
||||||
git submodule status || git checkout "$(git rev-list --max-parents=0 HEAD | tail -n 1)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: 'Checkout code'
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
|
|
||||||
- name: 'Get commit details'
|
|
||||||
run: |
|
|
||||||
if [[ ${{ github.event_name }} == 'pull_request' ]]; then
|
|
||||||
TYPE="pull"
|
|
||||||
elif [[ "${{ github.ref }}" == "refs/tags/"* ]]; then
|
|
||||||
TYPE="tag"
|
|
||||||
else
|
|
||||||
TYPE="other"
|
|
||||||
fi
|
|
||||||
python3 scripts/get_env.py "--event_file=${{ github.event_path }}" "--type=$TYPE"
|
|
||||||
|
|
||||||
- name: 'Make artifacts directory'
|
|
||||||
run: |
|
|
||||||
rm -rf artifacts
|
|
||||||
mkdir artifacts
|
|
||||||
|
|
||||||
- name: 'Download build artifacts'
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.ssh
|
|
||||||
ssh-keyscan -p ${{ secrets.RSYNC_DEPLOY_PORT }} -H ${{ secrets.RSYNC_DEPLOY_HOST }} > ~/.ssh/known_hosts
|
|
||||||
echo "${{ secrets.RSYNC_DEPLOY_KEY }}" > deploy_key;
|
|
||||||
chmod 600 ./deploy_key;
|
|
||||||
rsync -avzP \
|
|
||||||
-e 'ssh -p ${{ secrets.RSYNC_DEPLOY_PORT }} -i ./deploy_key' \
|
|
||||||
${{ secrets.RSYNC_DEPLOY_USER }}@${{ secrets.RSYNC_DEPLOY_HOST }}:"${{ secrets.RSYNC_DEPLOY_BASE_PATH }}${BRANCH_NAME}/" artifacts/;
|
|
||||||
rm ./deploy_key;
|
|
||||||
|
|
||||||
- name: 'Make .map file analyze'
|
|
||||||
run: |
|
|
||||||
cd artifacts/
|
|
||||||
/Applications/amap/Contents/MacOS/amap -f "flipper-z-f7-firmware-${SUFFIX}.elf.map"
|
|
||||||
|
|
||||||
- name: 'Upload report to DB'
|
|
||||||
run: |
|
|
||||||
source scripts/toolchain/fbtenv.sh
|
|
||||||
get_size()
|
|
||||||
{
|
|
||||||
SECTION="$1";
|
|
||||||
arm-none-eabi-size \
|
|
||||||
-A artifacts/flipper-z-f7-firmware-$SUFFIX.elf \
|
|
||||||
| grep "^$SECTION" | awk '{print $2}'
|
|
||||||
}
|
|
||||||
export BSS_SIZE="$(get_size ".bss")"
|
|
||||||
export TEXT_SIZE="$(get_size ".text")"
|
|
||||||
export RODATA_SIZE="$(get_size ".rodata")"
|
|
||||||
export DATA_SIZE="$(get_size ".data")"
|
|
||||||
export FREE_FLASH_SIZE="$(get_size ".free_flash")"
|
|
||||||
python3 -m pip install mariadb==1.1.4
|
|
||||||
python3 scripts/amap_mariadb_insert.py \
|
|
||||||
${{ secrets.AMAP_MARIADB_USER }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_PASSWORD }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_HOST }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_PORT }} \
|
|
||||||
${{ secrets.AMAP_MARIADB_DATABASE }} \
|
|
||||||
artifacts/flipper-z-f7-firmware-$SUFFIX.elf.map.all
|
|
||||||
|
|
39
.github/workflows/build.yml
vendored
39
.github/workflows/build.yml
vendored
@ -30,11 +30,6 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
|
||||||
- name: 'Make artifacts directory'
|
|
||||||
run: |
|
|
||||||
rm -rf artifacts
|
|
||||||
mkdir artifacts
|
|
||||||
|
|
||||||
- name: 'Get commit details'
|
- name: 'Get commit details'
|
||||||
id: names
|
id: names
|
||||||
run: |
|
run: |
|
||||||
@ -46,6 +41,15 @@ jobs:
|
|||||||
TYPE="other"
|
TYPE="other"
|
||||||
fi
|
fi
|
||||||
python3 scripts/get_env.py "--event_file=${{ github.event_path }}" "--type=$TYPE"
|
python3 scripts/get_env.py "--event_file=${{ github.event_path }}" "--type=$TYPE"
|
||||||
|
echo random_hash=$(openssl rand -base64 40 | shasum -a 256 | awk '{print $1}') >> $GITHUB_OUTPUT
|
||||||
|
echo "event_type=$TYPE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: 'Make artifacts directory'
|
||||||
|
run: |
|
||||||
|
rm -rf artifacts
|
||||||
|
rm -rf map_analyser_files
|
||||||
|
mkdir artifacts
|
||||||
|
mkdir map_analyser_files
|
||||||
|
|
||||||
- name: 'Bundle scripts'
|
- name: 'Bundle scripts'
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||||
@ -82,9 +86,30 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cp build/core2_firmware.tgz "artifacts/flipper-z-any-core2_firmware-${SUFFIX}.tgz"
|
cp build/core2_firmware.tgz "artifacts/flipper-z-any-core2_firmware-${SUFFIX}.tgz"
|
||||||
|
|
||||||
- name: 'Copy .map file'
|
- name: 'Copy map analyser files'
|
||||||
run: |
|
run: |
|
||||||
cp build/f7-firmware-*/firmware.elf.map "artifacts/flipper-z-f7-firmware-${SUFFIX}.elf.map"
|
cp build/f7-firmware-*/firmware.elf.map map_analyser_files/firmware.elf.map
|
||||||
|
cp build/f7-firmware-*/firmware.elf map_analyser_files/firmware.elf
|
||||||
|
cp ${{ github.event_path }} map_analyser_files/event.json
|
||||||
|
|
||||||
|
- name: 'Upload map analyser files to storage'
|
||||||
|
uses: keithweaver/aws-s3-github-action@v1.0.0
|
||||||
|
with:
|
||||||
|
source: map_analyser_files/
|
||||||
|
destination: "s3://${{ secrets.MAP_REPORT_AWS_BUCKET }}/${{steps.names.outputs.random_hash}}"
|
||||||
|
aws_access_key_id: "${{ secrets.MAP_REPORT_AWS_ACCESS_KEY }}"
|
||||||
|
aws_secret_access_key: "${{ secrets.MAP_REPORT_AWS_SECRET_KEY }}"
|
||||||
|
aws_region: "${{ secrets.MAP_REPORT_AWS_REGION }}"
|
||||||
|
flags: --recursive
|
||||||
|
|
||||||
|
- name: 'Trigger map file reporter'
|
||||||
|
uses: peter-evans/repository-dispatch@v2
|
||||||
|
with:
|
||||||
|
repository: flipperdevices/flipper-map-reporter
|
||||||
|
token: ${{ secrets.REPOSITORY_DISPATCH_TOKEN }}
|
||||||
|
event-type: map-file-analyse
|
||||||
|
client-payload: '{"random_hash": "${{steps.names.outputs.random_hash}}", "event_type": "${{steps.names.outputs.event_type}}"}'
|
||||||
|
|
||||||
|
|
||||||
- name: 'Upload artifacts to update server'
|
- name: 'Upload artifacts to update server'
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||||
|
@ -1,136 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
import argparse
|
|
||||||
import mariadb
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def parseArgs():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("db_user", help="MariaDB user")
|
|
||||||
parser.add_argument("db_pass", help="MariaDB password")
|
|
||||||
parser.add_argument("db_host", help="MariaDB hostname")
|
|
||||||
parser.add_argument("db_port", type=int, help="MariaDB port")
|
|
||||||
parser.add_argument("db_name", help="MariaDB database")
|
|
||||||
parser.add_argument("report_file", help="Report file(.map.all)")
|
|
||||||
args = parser.parse_args()
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def mariadbConnect(args):
|
|
||||||
try:
|
|
||||||
conn = mariadb.connect(
|
|
||||||
user=args.db_user,
|
|
||||||
password=args.db_pass,
|
|
||||||
host=args.db_host,
|
|
||||||
port=args.db_port,
|
|
||||||
database=args.db_name,
|
|
||||||
)
|
|
||||||
except mariadb.Error as e:
|
|
||||||
print(f"Error connecting to MariaDB: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
return conn
|
|
||||||
|
|
||||||
|
|
||||||
def parseEnv():
|
|
||||||
outArr = []
|
|
||||||
outArr.append(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
||||||
outArr.append(os.getenv("COMMIT_HASH", default=None))
|
|
||||||
outArr.append(os.getenv("COMMIT_MSG", default=None))
|
|
||||||
outArr.append(os.getenv("BRANCH_NAME", default=None))
|
|
||||||
outArr.append(os.getenv("BSS_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("TEXT_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("RODATA_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("DATA_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("FREE_FLASH_SIZE", default=None))
|
|
||||||
outArr.append(os.getenv("PULL_ID", default=None))
|
|
||||||
outArr.append(os.getenv("PULL_NAME", default=None))
|
|
||||||
return outArr
|
|
||||||
|
|
||||||
|
|
||||||
def createTables(cur, conn):
|
|
||||||
headerTable = "CREATE TABLE IF NOT EXISTS `header` ( \
|
|
||||||
`id` int(10) unsigned NOT NULL AUTO_INCREMENT, \
|
|
||||||
`datetime` datetime NOT NULL, \
|
|
||||||
`commit` varchar(40) NOT NULL, \
|
|
||||||
`commit_msg` text NOT NULL, \
|
|
||||||
`branch_name` text NOT NULL, \
|
|
||||||
`bss_size` int(10) unsigned NOT NULL, \
|
|
||||||
`text_size` int(10) unsigned NOT NULL, \
|
|
||||||
`rodata_size` int(10) unsigned NOT NULL, \
|
|
||||||
`data_size` int(10) unsigned NOT NULL, \
|
|
||||||
`free_flash_size` int(10) unsigned NOT NULL, \
|
|
||||||
`pullrequest_id` int(10) unsigned DEFAULT NULL, \
|
|
||||||
`pullrequest_name` text DEFAULT NULL, \
|
|
||||||
PRIMARY KEY (`id`), \
|
|
||||||
KEY `header_id_index` (`id`) )"
|
|
||||||
dataTable = "CREATE TABLE IF NOT EXISTS `data` ( \
|
|
||||||
`header_id` int(10) unsigned NOT NULL, \
|
|
||||||
`id` int(10) unsigned NOT NULL AUTO_INCREMENT, \
|
|
||||||
`section` text NOT NULL, \
|
|
||||||
`address` text NOT NULL, \
|
|
||||||
`size` int(10) unsigned NOT NULL, \
|
|
||||||
`name` text NOT NULL, \
|
|
||||||
`lib` text NOT NULL, \
|
|
||||||
`obj_name` text NOT NULL, \
|
|
||||||
PRIMARY KEY (`id`), \
|
|
||||||
KEY `data_id_index` (`id`), \
|
|
||||||
KEY `data_header_id_index` (`header_id`), \
|
|
||||||
CONSTRAINT `data_header_id_foreign` FOREIGN KEY (`header_id`) REFERENCES `header` (`id`) )"
|
|
||||||
cur.execute(headerTable)
|
|
||||||
cur.execute(dataTable)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def insertHeader(data, cur, conn):
|
|
||||||
query = "INSERT INTO `header` ( \
|
|
||||||
datetime, commit, commit_msg, branch_name, bss_size, text_size, \
|
|
||||||
rodata_size, data_size, free_flash_size, pullrequest_id, pullrequest_name) \
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
|
||||||
cur.execute(query, data)
|
|
||||||
conn.commit()
|
|
||||||
return cur.lastrowid
|
|
||||||
|
|
||||||
|
|
||||||
def parseFile(fileObj, headerID):
|
|
||||||
arr = []
|
|
||||||
fileLines = fileObj.readlines()
|
|
||||||
for line in fileLines:
|
|
||||||
lineArr = []
|
|
||||||
tempLineArr = line.split("\t")
|
|
||||||
lineArr.append(headerID)
|
|
||||||
lineArr.append(tempLineArr[0]) # section
|
|
||||||
lineArr.append(int(tempLineArr[2], 16)) # address hex
|
|
||||||
lineArr.append(int(tempLineArr[3])) # size
|
|
||||||
lineArr.append(tempLineArr[4]) # name
|
|
||||||
lineArr.append(tempLineArr[5]) # lib
|
|
||||||
lineArr.append(tempLineArr[6]) # obj_name
|
|
||||||
arr.append(tuple(lineArr))
|
|
||||||
return arr
|
|
||||||
|
|
||||||
|
|
||||||
def insertData(data, cur, conn):
|
|
||||||
query = "INSERT INTO `data` ( \
|
|
||||||
header_id, section, address, size, \
|
|
||||||
name, lib, obj_name) \
|
|
||||||
VALUES (?, ?, ?, ?, ? ,?, ?)"
|
|
||||||
cur.executemany(query, data)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = parseArgs()
|
|
||||||
dbConn = mariadbConnect(args)
|
|
||||||
reportFile = open(args.report_file)
|
|
||||||
dbCurs = dbConn.cursor()
|
|
||||||
createTables(dbCurs, dbConn)
|
|
||||||
headerID = insertHeader(parseEnv(), dbCurs, dbConn)
|
|
||||||
insertData(parseFile(reportFile, headerID), dbCurs, dbConn)
|
|
||||||
reportFile.close()
|
|
||||||
dbCurs.close()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
Loading…
Reference in New Issue
Block a user