forked from espressif/arduino-esp32
Rework the sketch builder to match the IDE (#3146)
* Rework the sketch builder to match the IDE * Link the board to the home folder * Rename files for clarity * move ci files to own subfolder * Update Github CI to use the new script locations
This commit is contained in:
379
tools/ci/build-release.sh
Executable file
379
tools/ci/build-release.sh
Executable file
@ -0,0 +1,379 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$TRAVIS_TAG" ]; then
|
||||
echo "Skipping Packaging: Regular build"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
############################################################
|
||||
# $1 - download link
|
||||
# $2 - JSON output file
|
||||
function downloadAndMergePackageJSON()
|
||||
{
|
||||
echo
|
||||
echo " ---Package JSON definition merge BEGIN--->"
|
||||
|
||||
jsonLink=$1
|
||||
jsonOut=$2
|
||||
curlAuthToken=$3
|
||||
outDirectory=$4
|
||||
|
||||
echo " - remote package JSON: $jsonLink (source)"
|
||||
echo " - current package JSON: $jsonOut (target)"
|
||||
|
||||
old_json=$outDirectory/oldJson.json
|
||||
merged_json=$outDirectory/mergedJson.json
|
||||
|
||||
#DEBUG
|
||||
#echo " Local tmp for remote JSON: $old_json"
|
||||
#echo " Merge output JSON: $merged_json"
|
||||
|
||||
echo " - downloading JSON package definition: $jsonLink ..."
|
||||
|
||||
# Authentication through HTTP headers might fail on redirection due to bug in cURL (https://curl.haxx.se/docs/adv_2018-b3bf.html - headers are resent to the target location including the original authentication)
|
||||
# Notes:
|
||||
# - eg AmazonAWS fails with Bad Request due to having maximum 1 authentication mechanism per a request, might be general issue
|
||||
# - it's a first-class credential leakage
|
||||
# - the fix is available in cURL 7.58.0+, however, TravisCI is not yet updated (May 29, 2018) - see https://docs.travis-ci.com/user/build-environment-updates
|
||||
# - TravisCI workaround: updating build environment through .travis.yml (ie install required version of cURL using apt-get, see https://docs.travis-ci.com/user/installing-dependencies/)
|
||||
# - previous point not used on purpose (build time increase, possible failure corrupts whole build, etc) but it's good to know there's a way out of hell
|
||||
# - local workaround: authentication through 'access_token' as GET parameter works smoothly, however, HTTP headers are preferred
|
||||
|
||||
#curl --verbose -sH "Authorization: token $curlAuthToken" -L -o "$old_json" "$jsonLink"
|
||||
curl -L -o "$old_json" "$jsonLink?access_token=$curlAuthToken"
|
||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
||||
|
||||
|
||||
#curl -L -o "$old_json" "$jsonLink"
|
||||
|
||||
echo " - merging $old_json into $jsonOut ..."
|
||||
|
||||
echo
|
||||
set +e
|
||||
stdbuf -oL python package/merge_packages.py "$jsonOut" "$old_json" > "$merged_json"
|
||||
set -e #supposed to be ON by default
|
||||
echo
|
||||
|
||||
set -v
|
||||
if [ ! -s $merged_json ]; then
|
||||
rm -f "$merged_json"
|
||||
echo " Done: nothing to merge ($merged_json empty) => $jsonOut remains unchanged"
|
||||
else
|
||||
rm -f "$jsonOut"
|
||||
mv "$merged_json" "$jsonOut"
|
||||
echo " Done: JSON data successfully merged to $jsonOut"
|
||||
fi
|
||||
|
||||
rm -f "$old_json"
|
||||
set +v
|
||||
echo " <---Package JSON definition merge END---"
|
||||
echo
|
||||
}
|
||||
############################################################
|
||||
|
||||
#Cmdline options
|
||||
# -a: GitHub API access token
|
||||
# -d: output directory to store the (pre)release filedir set
|
||||
|
||||
set -e
|
||||
|
||||
echo
|
||||
echo "==================================================================="
|
||||
echo "RELEASE PACKAGE PUBLISHING ARRANGEMENTS (GitHub/Arduino compliance)"
|
||||
echo "==================================================================="
|
||||
echo
|
||||
cmdLine=`basename $0 $@`
|
||||
echo "Cmdline: ${cmdLine}"
|
||||
|
||||
# cURL authentication token
|
||||
while getopts ":a:,:d:" opt; do
|
||||
case $opt in
|
||||
a)
|
||||
curlAuth=$OPTARG
|
||||
#echo " ACCESS TOKEN: $curlAuth" >&2
|
||||
;;
|
||||
d)
|
||||
releaseDir=$OPTARG
|
||||
#echo " RELEASE OUTPUT DIRECTORY: $releaseDir" >&2
|
||||
;;
|
||||
\?)
|
||||
echo "Error: invalid option -$OPTARG => aborting" >&2
|
||||
exit 1
|
||||
;;
|
||||
:)
|
||||
echo "Error: option -$OPTARG requires an argument => aborting" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# where we at?
|
||||
echo
|
||||
echo "Prequisite check:"
|
||||
if [ -z "$TRAVIS_BUILD_DIR" ]; then
|
||||
echo " - non-TravisCI environment"
|
||||
cd "$( dirname ${BASH_SOURCE[0]} )"/..
|
||||
bTravisRun=0
|
||||
else
|
||||
echo " - TravisCI run"
|
||||
cd $TRAVIS_BUILD_DIR
|
||||
bTravisRun=1
|
||||
fi
|
||||
|
||||
# no tag, no love
|
||||
if [ -z "$TRAVIS_TAG" ] && [ $bTravisRun -eq 1 ]; then
|
||||
echo "Warning: non-tagged builds not supported in Travis CI environment => exiting"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Package build settings:"
|
||||
echo "======================="
|
||||
|
||||
# source directory
|
||||
srcdir=`pwd`
|
||||
echo "Current working directory: ${srcdir}"
|
||||
|
||||
# target directory for actual release fileset
|
||||
if [ -z "$releaseDir" ]; then
|
||||
releaseDir=release
|
||||
fi
|
||||
echo "Release output directory: $releaseDir"
|
||||
|
||||
# Git versions, branch names, tags
|
||||
branch_name=""
|
||||
verx=""
|
||||
extent=""
|
||||
|
||||
if [ -z "$TRAVIS_TAG" ]; then
|
||||
branch_name=`git rev-parse --abbrev-ref HEAD 2>/dev/null`
|
||||
ver=`sed -n -E 's/version=([0-9.]+)/\1/p' platform.txt`
|
||||
else
|
||||
ver=$TRAVIS_TAG
|
||||
fi
|
||||
verx=`git rev-parse --short=8 HEAD 2>/dev/null`
|
||||
|
||||
# Package name resolving (case-insensitive):
|
||||
# - unknown branch, master branch or branch in detached state (HEAD revision) use only the tag's name as version string (esp32-$TAG_NAME, eg 'esp32-1.0.0-RC1')
|
||||
# - all other branches use long-version string (esp32-$BRANCH_NAME-$GITREV_NUMBER_SHORT, eg 'esp32-idf_update-cde668da')
|
||||
|
||||
shopt -s nocasematch
|
||||
|
||||
if [ ! -z "$branch_name" ] && [ "$branch_name" != "master" ] && [ "$branch_name" != "head" ]; then
|
||||
extent="-$branch_name-$verx"
|
||||
fi
|
||||
|
||||
package_name=esp32-$ver$extent
|
||||
|
||||
shopt -u nocasematch
|
||||
|
||||
echo "Package version: $ver"
|
||||
echo "Git branch name: $branch_name"
|
||||
echo "Git revision number: $verx"
|
||||
echo "Package name extension: $extent"
|
||||
echo "Travis CI tag: $TRAVIS_TAG"
|
||||
echo "Release package name: $package_name"
|
||||
|
||||
# Set REMOTE_URL environment variable to the address where the package will be
|
||||
# available for download. This gets written into package json file.
|
||||
|
||||
if [ -z "$REMOTE_URL" ]; then
|
||||
REMOTE_URL="http://localhost:8000"
|
||||
remoteEchoOut="${REMOTE_URL} (REMOTE_URL variable not defined, using default)"
|
||||
else
|
||||
remoteEchoOut="${REMOTE_URL}"
|
||||
fi
|
||||
echo "Target URL for download (JSON incl): ${remoteEchoOut}"
|
||||
|
||||
# Create directory for the package
|
||||
outdir=$releaseDir/$package_name
|
||||
echo "Local temp directory: $outdir"
|
||||
|
||||
rm -rf $releaseDir
|
||||
mkdir -p $outdir
|
||||
|
||||
# Copy files required for the package release:
|
||||
echo
|
||||
echo "Package build processing:"
|
||||
echo "========================="
|
||||
echo
|
||||
echo "Prepare files for the package main archive:"
|
||||
echo " - copying neccessary files from current Git repository..."
|
||||
|
||||
# <PACKAGE ROOT>
|
||||
cp -f $srcdir/boards.txt $outdir/
|
||||
cp -f $srcdir/platform.txt $outdir/
|
||||
cp -f $srcdir/programmers.txt $outdir/
|
||||
|
||||
# <COMPLETE DIRS>
|
||||
# cores/
|
||||
# libraries/
|
||||
# variants/
|
||||
# tools/partitions/
|
||||
cp -Rf $srcdir/cores $outdir/
|
||||
cp -Rf $srcdir/libraries $outdir/
|
||||
cp -Rf $srcdir/variants $outdir/
|
||||
mkdir -p $outdir/tools
|
||||
cp -Rf $srcdir/tools/partitions $outdir/tools/
|
||||
|
||||
# <DIR & FILES>
|
||||
# tools/sdk/
|
||||
cp -Rf $srcdir/tools/sdk $outdir/tools/
|
||||
|
||||
# tools/
|
||||
cp -f $srcdir/tools/espota.exe $outdir/tools/
|
||||
cp -f $srcdir/tools/espota.py $outdir/tools/
|
||||
cp -f $srcdir/tools/esptool.py $outdir/tools/
|
||||
cp -f $srcdir/tools/gen_esp32part.py $outdir/tools/
|
||||
cp -f $srcdir/tools/gen_esp32part.exe $outdir/tools/
|
||||
|
||||
echo " - cleaning *.DS_Store files..."
|
||||
find $outdir -name '*.DS_Store' -exec rm -f {} \;
|
||||
|
||||
# Do some replacements in platform.txt file, which are required because IDE
|
||||
# handles tool paths differently when package is installed in hardware folder
|
||||
echo " - updating platform.txt..."
|
||||
cat $srcdir/platform.txt | \
|
||||
sed "s/version=.*/version=$ver$extent/g" | \
|
||||
sed 's/runtime.tools.xtensa-esp32-elf-gcc.path={runtime.platform.path}\/tools\/xtensa-esp32-elf//g' | \
|
||||
sed 's/tools.esptool_py.path={runtime.platform.path}\/tools\/esptool/tools.esptool_py.path=\{runtime.tools.esptool_py.path\}/g' \
|
||||
> $outdir/platform.txt
|
||||
|
||||
# Put core version and short hash of git version into core_version.h
|
||||
ver_define=`echo $ver | tr "[:lower:].\055" "[:upper:]_"`
|
||||
echo " - generating C/C++ header defines ($ver_define -> /cores/esp32/core_version.h)..."
|
||||
|
||||
echo \#define ARDUINO_ESP32_GIT_VER 0x$verx >$outdir/cores/esp32/core_version.h
|
||||
echo \#define ARDUINO_ESP32_GIT_DESC `git describe --tags 2>/dev/null` >>$outdir/cores/esp32/core_version.h
|
||||
echo \#define ARDUINO_ESP32_RELEASE_$ver_define >>$outdir/cores/esp32/core_version.h
|
||||
echo \#define ARDUINO_ESP32_RELEASE \"$ver_define\" >>$outdir/cores/esp32/core_version.h
|
||||
|
||||
# Store submodules' current versions
|
||||
echo " - getting submodule list (${releaseDir}/submodules.txt)..."
|
||||
git submodule status > $releaseDir/submodules.txt
|
||||
|
||||
# remove all .git* files
|
||||
echo " - removing *.git files possibly fetched to package tempdir..."
|
||||
find $outdir -name '*.git*' -type f -delete
|
||||
|
||||
# Zip the package
|
||||
package_name_zip=$package_name.zip
|
||||
echo " - creating package ZIP archive (${package_name_zip})..."
|
||||
|
||||
pushd $releaseDir >/dev/null
|
||||
|
||||
zip -qr $package_name_zip $package_name
|
||||
if [ $? -ne 0 ]; then echo " !error: failed to create ${package_name_zip} (ZIP errno: $?) => aborting"; exit 1; fi
|
||||
|
||||
# Calculate SHA sum and size of ZIP archive
|
||||
sha=`shasum -a 256 $package_name_zip | cut -f 1 -d ' '`
|
||||
size=`/bin/ls -l $package_name_zip | awk '{print $5}'`
|
||||
echo " ${package_name_zip} creation OK (size: $size, sha2: $sha)"
|
||||
echo
|
||||
|
||||
echo "Making $package_name JSON definition file(s):"
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
PACKAGE_JSON_DEV="package_esp32_dev_index.json"
|
||||
PACKAGE_JSON_REL="package_esp32_index.json"
|
||||
|
||||
# figure out the package type (release / pre-release)
|
||||
shopt -s nocasematch
|
||||
if [[ $TRAVIS_TAG == *-RC* ]]; then
|
||||
bIsPrerelease=1
|
||||
package_name_json=$PACKAGE_JSON_DEV
|
||||
echo " - package type: PRE-RELEASE, JSON def.file: $PACKAGE_JSON_DEV"
|
||||
else
|
||||
bIsPrerelease=0
|
||||
package_name_json=$PACKAGE_JSON_REL
|
||||
echo " - package type: RELEASE, JSON def.files: $PACKAGE_JSON_REL, $PACKAGE_JSON_DEV"
|
||||
fi
|
||||
shopt -u nocasematch
|
||||
|
||||
# Cleanup temporary work dir
|
||||
rm -rf $outdir
|
||||
|
||||
# Get all previously released versions
|
||||
echo " - fetching previous (pre)release versions from GitHub..."
|
||||
|
||||
set +e
|
||||
|
||||
releasesJson=$releaseDir/releases.json
|
||||
curl -sH "Authorization: token $curlAuth" https://api.github.com/repos/$TRAVIS_REPO_SLUG/releases > $releasesJson
|
||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
||||
|
||||
prev_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == false)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name' ${releasesJson})
|
||||
prev_any_release=$(jq -e -r '. | map(select(.draft == false)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name' ${releasesJson})
|
||||
prev_pre_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == true)) | sort_by(.created_at | - fromdateiso8601) | .[0].tag_name' ${releasesJson})
|
||||
|
||||
shopt -s nocasematch
|
||||
if [ "$prev_any_release" == "$TRAVIS_TAG" ]; then
|
||||
prev_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == false)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name' ${releasesJson})
|
||||
prev_any_release=$(jq -e -r '. | map(select(.draft == false)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name' ${releasesJson})
|
||||
prev_pre_release=$(jq -e -r '. | map(select(.draft == false and .prerelease == true)) | sort_by(.created_at | - fromdateiso8601) | .[1].tag_name' ${releasesJson})
|
||||
fi
|
||||
shopt -u nocasematch
|
||||
|
||||
set -e
|
||||
|
||||
rm -f "$releasesJson"
|
||||
|
||||
echo " previous Release: $prev_release"
|
||||
echo " previous Pre-release: $prev_pre_release"
|
||||
echo " previous (any)release: $prev_any_release"
|
||||
|
||||
# add generated items to JSON package-definition contents
|
||||
jq_arg=".packages[0].platforms[0].version = \"$ver\" | \
|
||||
.packages[0].platforms[0].url = \"$REMOTE_URL/$package_name_zip\" |\
|
||||
.packages[0].platforms[0].archiveFileName = \"$package_name_zip\""
|
||||
|
||||
jq_arg="$jq_arg |\
|
||||
.packages[0].platforms[0].size = \"$size\" |\
|
||||
.packages[0].platforms[0].checksum = \"SHA-256:$sha\""
|
||||
|
||||
# always get DEV version of JSON (included in both RC/REL)
|
||||
pkgJsonDev=$releaseDir/$PACKAGE_JSON_DEV
|
||||
echo " - generating/merging _DEV_ JSON file (${pkgJsonDev})..."
|
||||
|
||||
cat $srcdir/package/package_esp32_index.template.json | jq "$jq_arg" > $pkgJsonDev
|
||||
cd $srcdir
|
||||
if [ ! -z "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
|
||||
downloadAndMergePackageJSON "https://github.com/$TRAVIS_REPO_SLUG/releases/download/${prev_any_release}/${PACKAGE_JSON_DEV}" "${pkgJsonDev}" "${curlAuth}" "$releaseDir"
|
||||
|
||||
# Release notes: GIT log comments (prev_any_release, current_release>
|
||||
echo " - executing: git log --oneline $prev_any_release.."
|
||||
git log --oneline $prev_any_release.. > $releaseDir/commits.txt
|
||||
fi
|
||||
|
||||
# for RELEASE run update REL JSON as well
|
||||
if [ $bIsPrerelease -eq 0 ]; then
|
||||
|
||||
pkgJsonRel=$releaseDir/$PACKAGE_JSON_REL
|
||||
echo " - generating/merging _REL_ JSON file (${pkgJsonRel})..."
|
||||
|
||||
cat $srcdir/package/package_esp32_index.template.json | jq "$jq_arg" > $pkgJsonRel
|
||||
if [ ! -z "$prev_release" ] && [ "$prev_release" != "null" ]; then
|
||||
downloadAndMergePackageJSON "https://github.com/$TRAVIS_REPO_SLUG/releases/download/${prev_release}/${PACKAGE_JSON_REL}" "${pkgJsonRel}" "${curlAuth}" "$releaseDir"
|
||||
|
||||
# Release notes: GIT log comments (prev_release, current_release>
|
||||
echo " - executing: git log --oneline $prev_release.."
|
||||
git log --oneline $prev_release.. > $releaseDir/commits.txt
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "JSON definition file(s) creation OK"
|
||||
|
||||
echo
|
||||
echo "==================================================================="
|
||||
echo "Package preparation done ('$releaseDir' contents):"
|
||||
fileset=`ls -1 $releaseDir`
|
||||
echo -e $fileset
|
||||
|
||||
echo
|
||||
echo "==================================================================="
|
||||
echo "==================================================================="
|
||||
echo "'$package_name' ready for publishing, processing completed."
|
||||
echo "==================================================================="
|
||||
echo
|
141
tools/ci/build-sketch.py
Executable file
141
tools/ci/build-sketch.py
Executable file
@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# build.py — build a sketch using arduino-builder
|
||||
#
|
||||
# Wrapper script around arduino-builder which accepts some ESP8266-specific
|
||||
# options and translates them into FQBN
|
||||
#
|
||||
# Copyright © 2016 Ivan Grokhotkov
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import subprocess
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
def compile(tmp_dir, cache_dir, sketch, ide_path, f, args):
|
||||
cmd = ide_path + '/arduino-builder '
|
||||
cmd += '-compile -logger=human '
|
||||
cmd += '-hardware "' + ide_path + '/hardware" '
|
||||
if args.usr_path:
|
||||
cmd += '-hardware "' + args.usr_path + '/hardware" '
|
||||
if args.hardware_path:
|
||||
for hw_dir in args.hardware_path:
|
||||
cmd += '-hardware "' + hw_dir + '" '
|
||||
cmd += '-tools "' + ide_path + '/tools-builder" '
|
||||
if args.tools_path:
|
||||
for tools_dir in args.tools_path:
|
||||
cmd += '-tools "' + tools_dir + '" '
|
||||
cmd += '-built-in-libraries "' + ide_path + '/libraries" '
|
||||
if args.usr_path:
|
||||
cmd += '-libraries "' + args.usr_path + '/libraries" '
|
||||
if args.library_path:
|
||||
for lib_dir in args.library_path:
|
||||
cmd += '-libraries "' + lib_dir + '" '
|
||||
cmd += '-fqbn={fqbn} '.format(**vars(args))
|
||||
cmd += '-ide-version=10810 '
|
||||
cmd += '-build-path "' + tmp_dir + '" '
|
||||
cmd += '-warnings={warnings} '.format(**vars(args))
|
||||
cmd += '-build-cache "' + cache_dir + '" '
|
||||
if args.verbose:
|
||||
cmd += '-verbose '
|
||||
cmd += sketch
|
||||
|
||||
if args.verbose:
|
||||
print('Building: ' + cmd, file=f)
|
||||
|
||||
cmds = cmd.split(' ')
|
||||
p = subprocess.Popen(cmds, stdout=f, stderr=subprocess.STDOUT)
|
||||
p.wait()
|
||||
return p.returncode
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='Sketch build helper')
|
||||
parser.add_argument('-v', '--verbose', help='Enable verbose output', action='store_true')
|
||||
parser.add_argument('-i', '--ide_path', help='Arduino IDE path')
|
||||
parser.add_argument('-b', '--build_path', help='Build directory')
|
||||
parser.add_argument('-c', '--build_cache', help='Core Cache directory')
|
||||
parser.add_argument('-u', '--usr_path', help='Arduino Home directory (holds your sketches, libraries and hardware)')
|
||||
parser.add_argument('-f', '--fqbn', help='Arduino Board FQBN')
|
||||
parser.add_argument('-l', '--library_path', help='Additional library path', action='append')
|
||||
parser.add_argument('-d', '--hardware_path', help='Additional hardware path', action='append')
|
||||
parser.add_argument('-t', '--tools_path', help='Additional tools path', action='append')
|
||||
parser.add_argument('-w', '--warnings', help='Compilation warnings level', default='none', choices=['none', 'all', 'more', 'default'])
|
||||
parser.add_argument('-o', '--output_binary', help='File name for output binary')
|
||||
parser.add_argument('-k', '--keep', action='store_true', help='Don\'t delete temporary build directory')
|
||||
parser.add_argument('sketch_path', help='Sketch file path')
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
ide_path = args.ide_path
|
||||
if not ide_path:
|
||||
ide_path = os.environ.get('ARDUINO_IDE_PATH')
|
||||
if not ide_path:
|
||||
print("Please specify Arduino IDE path via --ide_path option"
|
||||
"or ARDUINO_IDE_PATH environment variable.", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
if not args.fqbn:
|
||||
print("Please specify Arduino Board FQBN using the --fqbn option", file=sys.stderr)
|
||||
return 3
|
||||
|
||||
sketch_path = args.sketch_path
|
||||
|
||||
tmp_dir = args.build_path
|
||||
created_tmp_dir = False
|
||||
if not tmp_dir:
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
created_tmp_dir = True
|
||||
|
||||
cache_dir = args.build_cache
|
||||
created_cache_dir = False
|
||||
if not cache_dir:
|
||||
cache_dir = tempfile.mkdtemp()
|
||||
created_cache_dir = True
|
||||
|
||||
output_name = tmp_dir + '/' + os.path.basename(sketch_path) + '.bin'
|
||||
|
||||
if args.verbose:
|
||||
print("Sketch: ", sketch_path)
|
||||
print("Build dir: ", tmp_dir)
|
||||
print("Cache dir: ", cache_dir)
|
||||
print("Output: ", output_name)
|
||||
|
||||
if args.verbose:
|
||||
f = sys.stdout
|
||||
else:
|
||||
f = open(tmp_dir + '/build.log', 'w')
|
||||
|
||||
res = compile(tmp_dir, cache_dir, sketch_path, ide_path, f, args)
|
||||
if res != 0:
|
||||
return res
|
||||
|
||||
if args.output_binary is not None:
|
||||
shutil.copy(output_name, args.output_binary)
|
||||
|
||||
if created_tmp_dir and not args.keep:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
|
||||
if created_cache_dir and not args.keep:
|
||||
shutil.rmtree(cache_dir, ignore_errors=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
64
tools/ci/build-tests.sh
Executable file
64
tools/ci/build-tests.sh
Executable file
@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ ! -z "$TRAVIS_TAG" ]; then
|
||||
echo "Skipping Test: Tagged build"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
CHUNK_INDEX=$1
|
||||
CHUNKS_CNT=$2
|
||||
BUILD_PIO=0
|
||||
if [ "$#" -lt 2 ]; then
|
||||
echo "Building all sketches"
|
||||
CHUNK_INDEX=0
|
||||
CHUNKS_CNT=1
|
||||
BUILD_PIO=1
|
||||
fi
|
||||
if [ "$CHUNKS_CNT" -le 0 ]; then
|
||||
CHUNK_INDEX=0
|
||||
CHUNKS_CNT=1
|
||||
BUILD_PIO=1
|
||||
fi
|
||||
if [ "$CHUNK_INDEX" -gt "$CHUNKS_CNT" ]; then
|
||||
CHUNK_INDEX=$CHUNKS_CNT
|
||||
fi
|
||||
if [ "$CHUNK_INDEX" -eq "$CHUNKS_CNT" ]; then
|
||||
BUILD_PIO=1
|
||||
fi
|
||||
|
||||
# CMake Test
|
||||
if [ "$CHUNK_INDEX" -eq 0 ]; then
|
||||
echo -e "travis_fold:start:check_cmakelists"
|
||||
tools/ci/check-cmakelists.sh
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
||||
echo -e "travis_fold:end:check_cmakelists"
|
||||
fi
|
||||
|
||||
if [ "$BUILD_PIO" -eq 0 ]; then
|
||||
# ArduinoIDE Test
|
||||
echo -e "travis_fold:start:prep_arduino_ide"
|
||||
tools/ci/prep-arduino-ide.sh
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
||||
echo -e "travis_fold:end:prep_arduino_ide"
|
||||
|
||||
echo -e "travis_fold:start:test_arduino_ide"
|
||||
tools/ci/test-arduino-ide.sh $CHUNK_INDEX $CHUNKS_CNT
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
||||
echo -e "travis_fold:end:test_arduino_ide"
|
||||
|
||||
echo -e "travis_fold:start:size_report"
|
||||
cat size.log
|
||||
echo -e "travis_fold:end:size_report"
|
||||
else
|
||||
# PlatformIO Test
|
||||
echo -e "travis_fold:start:prep_platformio"
|
||||
cd tools && python get.py && cd ..
|
||||
tools/ci/prep-platformio.sh
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
||||
echo -e "travis_fold:end:prep_platformio"
|
||||
|
||||
echo -e "travis_fold:start:test_platformio"
|
||||
tools/ci/test-platformio.sh
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
||||
echo -e "travis_fold:end:test_platformio"
|
||||
fi
|
29
tools/ci/check-cmakelists.sh
Executable file
29
tools/ci/check-cmakelists.sh
Executable file
@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script is for Travis. It checks all non-examples source files in libraries/ and cores/ are listed in
|
||||
# CMakeLists.txt for the cmake-based IDF component
|
||||
#
|
||||
# If you see an error running this script, edit CMakeLists.txt and add any new source files into your PR
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
cd "`dirname $0`/.." # cd to arduino-esp32 root
|
||||
|
||||
# pull all submodules
|
||||
git submodule update --init --recursive
|
||||
|
||||
# find all source files in repo
|
||||
REPO_SRCS=`find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort`
|
||||
|
||||
# find all source files named in CMakeLists.txt COMPONENT_SRCS
|
||||
CMAKE_SRCS=`cmake --trace-expand -C CMakeLists.txt 2>&1 | grep COMPONENT_SRCS | sed 's/.\+COMPONENT_SRCS //' | sed 's/ )//' | tr ' ;' '\n' | sort`
|
||||
|
||||
if ! diff -u0 --label "Repo Files" --label "COMPONENT_SRCS" <(echo "$REPO_SRCS") <(echo "$CMAKE_SRCS"); then
|
||||
echo "Source files in repo (-) and source files in CMakeLists.txt (+) don't match"
|
||||
echo "Edit CMakeLists.txt as appropriate to add/remove source files from COMPONENT_SRCS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "CMakeLists.txt and repo source files match"
|
||||
exit 0
|
248
tools/ci/deploy-release.sh
Normal file
248
tools/ci/deploy-release.sh
Normal file
@ -0,0 +1,248 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
json_escape () {
|
||||
printf '%s' "$1" | python -c 'import json,sys; print(json.dumps(sys.stdin.read()))'
|
||||
#printf '%s' "$1" | php -r 'echo json_encode(file_get_contents("php://stdin"));'
|
||||
}
|
||||
|
||||
set -e
|
||||
|
||||
#Cmdline options
|
||||
# -t: tag (*_RC* determines prerelease version, can be overriden be -p)
|
||||
# -a: GitHub API access token
|
||||
# -s: GitHub repository slug (user/repo)
|
||||
# -p: prerelease true/false
|
||||
# -f: files to upload (ie assets. delim = ';', must come quoted)
|
||||
# -d: directory to upload (by adding dir contents to assets)
|
||||
while getopts ":t:,:a:,:s:,:p:,:f:,:d:" opt; do
|
||||
case $opt in
|
||||
t)
|
||||
varTagName=$OPTARG
|
||||
echo "TAG: $varTagName" >&2
|
||||
;;
|
||||
a)
|
||||
varAccessToken=$OPTARG
|
||||
echo "ACCESS TOKEN: $varAccessToken" >&2
|
||||
;;
|
||||
s)
|
||||
varRepoSlug=$OPTARG
|
||||
echo "REPO SLUG: $varRepoSlug" >&2
|
||||
;;
|
||||
p)
|
||||
varPrerelease=$OPTARG
|
||||
echo "PRERELEASE: $varPrerelease" >&2
|
||||
;;
|
||||
f)
|
||||
varAssets=$OPTARG
|
||||
echo "ASSETS: $varAssets" >&2
|
||||
;;
|
||||
d)
|
||||
varAssetsDir=$OPTARG
|
||||
echo "ASSETS DIR: $varAssetsDir" >&2
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG" >&2
|
||||
exit 1
|
||||
;;
|
||||
:)
|
||||
echo "Option -$OPTARG requires an argument." >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# use TravisCI env as default, if available
|
||||
if [ -z $varTagName ] && [ ! -z $TRAVIS_TAG ]; then
|
||||
varTagName=$TRAVIS_TAG
|
||||
fi
|
||||
|
||||
if [ -z $varTagName ]; then
|
||||
echo "No tag name available => aborting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#Check tag name for release/prerelease (prerelease tag contains '_RC' as for release-candidate. case-insensitive)
|
||||
shopt -s nocasematch
|
||||
if [ -z $varPrerelease ]; then
|
||||
if [[ $varTagName == *-RC* ]]; then
|
||||
varPrerelease=true
|
||||
else
|
||||
varPrerelease=false
|
||||
fi
|
||||
fi
|
||||
shopt -u nocasematch
|
||||
|
||||
#
|
||||
# Prepare Markdown release notes:
|
||||
#################################
|
||||
# - annotated tags only, lightweight tags just display message of referred commit
|
||||
# - tag's description conversion to relnotes:
|
||||
# first 3 lines (tagname, commiter, blank): ignored
|
||||
# 4th line: relnotes heading
|
||||
# remaining lines: each converted to bullet-list item
|
||||
# empty lines ignored
|
||||
# if '* ' found as a first char pair, it's converted to '- ' to keep bulleting unified
|
||||
echo
|
||||
echo Preparing release notes
|
||||
echo -----------------------
|
||||
echo "Tag's message:"
|
||||
|
||||
relNotesRaw=`git show -s --format=%b $varTagName`
|
||||
readarray -t msgArray <<<"$relNotesRaw"
|
||||
arrLen=${#msgArray[@]}
|
||||
|
||||
#process annotated tags only
|
||||
if [ $arrLen > 3 ] && [ "${msgArray[0]:0:3}" == "tag" ]; then
|
||||
ind=3
|
||||
while [ $ind -lt $arrLen ]; do
|
||||
if [ $ind -eq 3 ]; then
|
||||
releaseNotes="#### ${msgArray[ind]}"
|
||||
releaseNotes+=$'\r\n'
|
||||
else
|
||||
oneLine="$(echo -e "${msgArray[ind]}" | sed -e 's/^[[:space:]]*//')"
|
||||
|
||||
if [ ${#oneLine} -gt 0 ]; then
|
||||
if [ "${oneLine:0:2}" == "* " ]; then oneLine=$(echo ${oneLine/\*/-}); fi
|
||||
if [ "${oneLine:0:2}" != "- " ]; then releaseNotes+="- "; fi
|
||||
releaseNotes+="$oneLine"
|
||||
releaseNotes+=$'\r\n'
|
||||
|
||||
#debug output
|
||||
echo " ${oneLine}"
|
||||
fi
|
||||
fi
|
||||
let ind=$ind+1
|
||||
done
|
||||
fi
|
||||
|
||||
echo "$releaseNotes"
|
||||
|
||||
# - list of commits (commits.txt must exit in the output dir)
|
||||
commitFile=$varAssetsDir/commits.txt
|
||||
if [ -s "$commitFile" ]; then
|
||||
|
||||
releaseNotes+=$'\r\n##### Commits\r\n'
|
||||
|
||||
echo
|
||||
echo "Commits:"
|
||||
|
||||
IFS=$'\n'
|
||||
for next in `cat $commitFile`
|
||||
do
|
||||
IFS=' ' read -r commitId commitMsg <<< "$next"
|
||||
commitLine="- [$commitId](https://github.com/$varRepoSlug/commit/$commitId) $commitMsg"
|
||||
echo " $commitLine"
|
||||
|
||||
releaseNotes+="$commitLine"
|
||||
releaseNotes+=$'\r\n'
|
||||
done
|
||||
rm -f $commitFile
|
||||
fi
|
||||
|
||||
# Check possibly existing release for current tag
|
||||
echo
|
||||
echo "Processing GitHub release record for $varTagName:"
|
||||
echo "-------------------------------------------------"
|
||||
|
||||
echo " - check $varTagName possible existence..."
|
||||
|
||||
# (eg build invoked by Create New Release GHUI button -> GH default release pack created immediately including default assests)
|
||||
HTTP_RESPONSE=$(curl -L --silent --write-out "HTTPSTATUS:%{http_code}" https://api.github.com/repos/$varRepoSlug/releases/tags/$varTagName?access_token=$varAccessToken)
|
||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
||||
|
||||
HTTP_BODY=$(echo $HTTP_RESPONSE | sed -e 's/HTTPSTATUS\:.*//g')
|
||||
HTTP_STATUS=$(echo $HTTP_RESPONSE | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
|
||||
echo " ---> GitHub server HTTP response: $HTTP_STATUS"
|
||||
|
||||
# if the release exists, append/update recent files to its assets vector
|
||||
if [ $HTTP_STATUS -eq 200 ]; then
|
||||
releaseId=$(echo $HTTP_BODY | jq -r '.id')
|
||||
echo " - $varTagName release found (id $releaseId)"
|
||||
|
||||
#Merge release notes and overwrite pre-release flag. all other attributes remain unchanged:
|
||||
|
||||
# 1. take existing notes from server (added by release creator)
|
||||
releaseNotesGH=$(echo $HTTP_BODY | jq -r '.body')
|
||||
|
||||
# - strip possibly trailing CR
|
||||
if [ "${releaseNotesGH: -1}" == $'\r' ]; then
|
||||
releaseNotesTemp="${releaseNotesGH:0:-1}"
|
||||
else
|
||||
releaseNotesTemp="$releaseNotesGH"
|
||||
fi
|
||||
# - add CRLF to make relnotes consistent for JSON encoding
|
||||
releaseNotesTemp+=$'\r\n'
|
||||
|
||||
# 2. #append generated relnotes (usually commit oneliners)
|
||||
releaseNotes="$releaseNotesTemp$releaseNotes"
|
||||
|
||||
# 3. JSON-encode whole string for GH API transfer
|
||||
releaseNotes=$(json_escape "$releaseNotes")
|
||||
|
||||
# 4. remove extra quotes returned by python (dummy but whatever)
|
||||
releaseNotes=${releaseNotes:1:-1}
|
||||
|
||||
#Update current GH release record
|
||||
echo " - updating release notes and pre-release flag:"
|
||||
|
||||
curlData="{\"body\": \"$releaseNotes\",\"prerelease\": $varPrerelease}"
|
||||
echo " <data.begin>$curlData<data.end>"
|
||||
echo
|
||||
#echo "DEBUG: curl --data \"$curlData\" https://api.github.com/repos/$varRepoSlug/releases/$releaseId?access_token=$varAccessToken"
|
||||
|
||||
curl --data "$curlData" https://api.github.com/repos/$varRepoSlug/releases/$releaseId?access_token=$varAccessToken
|
||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
||||
|
||||
echo " - $varTagName release record successfully updated"
|
||||
|
||||
#... or create a new release record
|
||||
else
|
||||
releaseNotes=$(json_escape "$releaseNotes")
|
||||
releaseNotes=${releaseNotes:1:-1}
|
||||
|
||||
echo " - release $varTagName not found, creating a new record:"
|
||||
|
||||
curlData="{\"tag_name\": \"$varTagName\",\"target_commitish\": \"master\",\"name\": \"v$varTagName\",\"body\": \"$releaseNotes\",\"draft\": false,\"prerelease\": $varPrerelease}"
|
||||
echo " <data.begin>$curlData<data.end>"
|
||||
|
||||
#echo "DEBUG: curl --data \"${curlData}\" https://api.github.com/repos/${varRepoSlug}/releases?access_token=$varAccessToken | jq -r '.id'"
|
||||
releaseId=$(curl --data "$curlData" https://api.github.com/repos/$varRepoSlug/releases?access_token=$varAccessToken | jq -r '.id')
|
||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
||||
|
||||
echo " - $varTagName release record successfully created (id $releaseId)"
|
||||
fi
|
||||
|
||||
# Assets defined by dir contents
|
||||
if [ ! -z $varAssetsDir ]; then
|
||||
varAssetsTemp=$(ls -p $varAssetsDir | grep -v / | tr '\n' ';')
|
||||
for item in $(echo $varAssetsTemp | tr ";" "\n")
|
||||
do
|
||||
varAssets+=$varAssetsDir/$item;
|
||||
varAssets+=';'
|
||||
done
|
||||
fi
|
||||
|
||||
#Upload additional assets
|
||||
if [ ! -z $varAssets ]; then
|
||||
echo
|
||||
echo "Uploading assets:"
|
||||
echo "-----------------"
|
||||
echo " Files to upload:"
|
||||
echo " $varAssets"
|
||||
echo
|
||||
|
||||
curlAuth="Authorization: token $varAccessToken"
|
||||
for filename in $(echo $varAssets | tr ";" "\n")
|
||||
do
|
||||
echo " - ${filename}:"
|
||||
curl -X POST -sH "$curlAuth" -H "Content-Type: application/octet-stream" --data-binary @"$filename" https://uploads.github.com/repos/$varRepoSlug/releases/$releaseId/assets?name=$(basename $filename)
|
||||
|
||||
if [ $? -ne 0 ]; then echo "FAILED: $? => aborting"; exit 1; fi
|
||||
|
||||
echo
|
||||
echo "OK"
|
||||
echo
|
||||
|
||||
done
|
||||
fi
|
13
tools/ci/prep-arduino-ide.sh
Executable file
13
tools/ci/prep-arduino-ide.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
pip install pyserial
|
||||
wget -O arduino.tar.xz https://www.arduino.cc/download.php?f=/arduino-nightly-linux64.tar.xz
|
||||
tar xf arduino.tar.xz
|
||||
mv arduino-nightly $HOME/arduino_ide
|
||||
mkdir -p $HOME/Arduino/libraries
|
||||
mkdir -p $HOME/Arduino/hardware/espressif
|
||||
cd $HOME/Arduino/hardware/espressif
|
||||
|
||||
ln -s $TRAVIS_BUILD_DIR esp32
|
||||
cd esp32/tools
|
||||
python get.py
|
6
tools/ci/prep-platformio.sh
Executable file
6
tools/ci/prep-platformio.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
pip install -U https://github.com/platformio/platformio/archive/develop.zip && \
|
||||
python -m platformio platform install https://github.com/platformio/platform-espressif32.git#feature/stage && \
|
||||
sed -i 's/https:\/\/github\.com\/espressif\/arduino-esp32\.git/*/' ~/.platformio/platforms/espressif32/platform.json && \
|
||||
ln -s $TRAVIS_BUILD_DIR ~/.platformio/packages/framework-arduinoespressif32
|
170
tools/ci/test-arduino-ide.sh
Executable file
170
tools/ci/test-arduino-ide.sh
Executable file
@ -0,0 +1,170 @@
|
||||
#!/bin/bash
|
||||
|
||||
CHUNK_INDEX=$1
|
||||
CHUNKS_CNT=$2
|
||||
if [ "$#" -lt 2 ]; then
|
||||
echo "Building all sketches"
|
||||
CHUNK_INDEX=0
|
||||
CHUNKS_CNT=1
|
||||
fi
|
||||
if [ "$CHUNKS_CNT" -le 0 ]; then
|
||||
echo "Chunks count must be positive number"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$CHUNK_INDEX" -ge "$CHUNKS_CNT" ]; then
|
||||
echo "Chunk index must be less than chunks count"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export ARDUINO_BOARD_FQBN="espressif:esp32:esp32:PSRAM=enabled,PartitionScheme=huge_app,CPUFreq=240,FlashMode=qio,FlashFreq=80,FlashSize=4M,UploadSpeed=921600,DebugLevel=none"
|
||||
|
||||
export ARDUINO_IDE_PATH=$HOME/arduino_ide
|
||||
export ARDUINO_USR_PATH=$HOME/Arduino
|
||||
|
||||
export EXAMPLES_PATH=$TRAVIS_BUILD_DIR/libraries
|
||||
export EXAMPLES_BUILD_DIR=$HOME/build.tmp
|
||||
export EXAMPLES_CACHE_DIR=$HOME/cache.tmp
|
||||
export EXAMPLES_BUILD_CMD="python $TRAVIS_BUILD_DIR/tools/ci/build-sketch.py -v -k -b $EXAMPLES_BUILD_DIR -c $EXAMPLES_CACHE_DIR -u $ARDUINO_USR_PATH -f $ARDUINO_BOARD_FQBN "
|
||||
export EXAMPLES_SIZE_BIN=$TRAVIS_BUILD_DIR/tools/xtensa-esp32-elf/bin/xtensa-esp32-elf-size
|
||||
|
||||
function print_size_info()
|
||||
{
|
||||
elf_file=$1
|
||||
|
||||
if [ -z "$elf_file" ]; then
|
||||
printf "sketch iram0.text flash.text flash.rodata dram0.data dram0.bss dram flash\n"
|
||||
return 0
|
||||
fi
|
||||
|
||||
elf_name=$(basename $elf_file)
|
||||
sketch_name="${elf_name%.*}"
|
||||
# echo $sketch_name
|
||||
declare -A segments
|
||||
while read -a tokens; do
|
||||
seg=${tokens[0]}
|
||||
seg=${seg//./}
|
||||
size=${tokens[1]}
|
||||
addr=${tokens[2]}
|
||||
if [ "$addr" -eq "$addr" -a "$addr" -ne "0" ] 2>/dev/null; then
|
||||
segments[$seg]=$size
|
||||
fi
|
||||
done < <($EXAMPLES_SIZE_BIN --format=sysv $elf_file)
|
||||
|
||||
total_ram=$((${segments[dram0data]} + ${segments[dram0bss]}))
|
||||
total_flash=$((${segments[iram0text]} + ${segments[flashtext]} + ${segments[dram0data]} + ${segments[flashrodata]}))
|
||||
printf "%-32s %-8d %-8d %-8d %-8d %-8d %-8d %-8d\n" $sketch_name ${segments[iram0text]} ${segments[flashtext]} ${segments[flashrodata]} ${segments[dram0data]} ${segments[dram0bss]} $total_ram $total_flash
|
||||
return 0
|
||||
}
|
||||
|
||||
function build_sketch()
|
||||
{
|
||||
local sketch=$1
|
||||
echo -e "\n------------ Building $sketch ------------\n";
|
||||
rm -rf $EXAMPLES_BUILD_DIR/*
|
||||
time ($EXAMPLES_BUILD_CMD $sketch >build.log)
|
||||
local result=$?
|
||||
if [ $result -ne 0 ]; then
|
||||
echo "Build failed ($1)"
|
||||
echo "Build log:"
|
||||
cat build.log
|
||||
return $result
|
||||
fi
|
||||
rm build.log
|
||||
return 0
|
||||
}
|
||||
|
||||
function count_sketches()
|
||||
{
|
||||
local sketches=$(find $EXAMPLES_PATH -name *.ino)
|
||||
local sketchnum=0
|
||||
rm -rf sketches.txt
|
||||
for sketch in $sketches; do
|
||||
local sketchdir=$(dirname $sketch)
|
||||
local sketchdirname=$(basename $sketchdir)
|
||||
local sketchname=$(basename $sketch)
|
||||
if [[ "${sketchdirname}.ino" != "$sketchname" ]]; then
|
||||
continue
|
||||
fi;
|
||||
if [[ -f "$sketchdir/.test.skip" ]]; then
|
||||
continue
|
||||
fi
|
||||
echo $sketch >> sketches.txt
|
||||
sketchnum=$(($sketchnum + 1))
|
||||
done
|
||||
return $sketchnum
|
||||
}
|
||||
|
||||
function build_sketches()
|
||||
{
|
||||
mkdir -p $EXAMPLES_BUILD_DIR
|
||||
mkdir -p $EXAMPLES_CACHE_DIR
|
||||
mkdir -p $ARDUINO_USR_PATH/libraries
|
||||
mkdir -p $ARDUINO_USR_PATH/hardware
|
||||
|
||||
local chunk_idex=$1
|
||||
local chunks_num=$2
|
||||
count_sketches
|
||||
local sketchcount=$?
|
||||
local sketches=$(cat sketches.txt)
|
||||
|
||||
local chunk_size=$(( $sketchcount / $chunks_num ))
|
||||
local all_chunks=$(( $chunks_num * $chunk_size ))
|
||||
if [ "$all_chunks" -lt "$sketchcount" ]; then
|
||||
chunk_size=$(( $chunk_size + 1 ))
|
||||
fi
|
||||
|
||||
local start_index=$(( $chunk_idex * $chunk_size ))
|
||||
if [ "$sketchcount" -le "$start_index" ]; then
|
||||
echo "Skipping job"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local end_index=$(( $(( $chunk_idex + 1 )) * $chunk_size ))
|
||||
if [ "$end_index" -gt "$sketchcount" ]; then
|
||||
end_index=$sketchcount
|
||||
fi
|
||||
|
||||
local start_num=$(( $start_index + 1 ))
|
||||
#echo -e "Sketches: \n$sketches\n"
|
||||
echo "Found $sketchcount Sketches";
|
||||
echo "Chunk Count : $chunks_num"
|
||||
echo "Chunk Size : $chunk_size"
|
||||
echo "Start Sketch: $start_num"
|
||||
echo "End Sketch : $end_index"
|
||||
|
||||
local sketchnum=0
|
||||
print_size_info >size.log
|
||||
for sketch in $sketches; do
|
||||
local sketchdir=$(dirname $sketch)
|
||||
local sketchdirname=$(basename $sketchdir)
|
||||
local sketchname=$(basename $sketch)
|
||||
if [[ "${sketchdirname}.ino" != "$sketchname" ]]; then
|
||||
#echo "Skipping $sketch, beacause it is not the main sketch file";
|
||||
continue
|
||||
fi;
|
||||
if [[ -f "$sketchdir/.test.skip" ]]; then
|
||||
#echo "Skipping $sketch marked";
|
||||
continue
|
||||
fi
|
||||
sketchnum=$(($sketchnum + 1))
|
||||
if [ "$sketchnum" -le "$start_index" ]; then
|
||||
#echo "Skipping $sketch index low"
|
||||
continue
|
||||
fi
|
||||
if [ "$sketchnum" -gt "$end_index" ]; then
|
||||
#echo "Skipping $sketch index high"
|
||||
continue
|
||||
fi
|
||||
build_sketch $sketch
|
||||
local result=$?
|
||||
if [ $result -ne 0 ]; then
|
||||
return $result
|
||||
fi
|
||||
print_size_info $EXAMPLES_BUILD_DIR/*.elf >>size.log
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
build_sketches $CHUNK_INDEX $CHUNKS_CNT
|
||||
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
9
tools/ci/test-platformio.sh
Executable file
9
tools/ci/test-platformio.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
python -m platformio ci --board esp32dev libraries/WiFi/examples/WiFiClient && \
|
||||
python -m platformio ci --board esp32dev libraries/WiFiClientSecure/examples/WiFiClientSecure && \
|
||||
python -m platformio ci --board esp32dev libraries/BluetoothSerial/examples/SerialToSerialBT && \
|
||||
python -m platformio ci --board esp32dev libraries/BLE/examples/BLE_server && \
|
||||
python -m platformio ci --board esp32dev libraries/AzureIoT/examples/GetStarted && \
|
||||
python -m platformio ci --board esp32dev libraries/ESP32/examples/Camera/CameraWebServer --project-option="board_build.partitions = huge_app.csv"
|
||||
if [ $? -ne 0 ]; then exit 1; fi
|
Reference in New Issue
Block a user