mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-24 15:48:07 +01:00
Compare commits
1 Commits
develop
...
feature/is
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5bc5712b1 |
21
.appveyor.yml
Normal file
21
.appveyor.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
build: off
|
||||
environment:
|
||||
|
||||
matrix:
|
||||
- TOXENV: "py27"
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: SET PATH=%PATH%;C:\Python27\Scripts;C:\MinGW\bin
|
||||
- cmd: pip install tox
|
||||
|
||||
test_script:
|
||||
- cmd: tox
|
||||
|
||||
notifications:
|
||||
- provider: Slack
|
||||
incoming_webhook:
|
||||
secure: E9H0SU0Ju7WLDvgxsV8cs3J62T3nTTX7QkEjsczN0Sto/c9hWkVfhc5gGWUkxhlD975cokHByKGJIdwYwCewqOI+7BrcT8U+nlga4Uau7J8=
|
||||
on_build_success: false
|
||||
on_build_failure: true
|
||||
on_build_status_changed: true
|
||||
@@ -12,7 +12,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from platformio.device.monitor.filters.base import (
|
||||
DeviceMonitorFilterBase as DeviceMonitorFilter,
|
||||
)
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1 +0,0 @@
|
||||
custom: https://platformio.org/donate
|
||||
27
.github/ISSUE_TEMPLATE.md
vendored
27
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,27 +1,22 @@
|
||||
What kind of issue is this?
|
||||
|
||||
- [ ] **Question**.
|
||||
This issue tracker is not the place for questions. If you want to ask how to do something,
|
||||
or to understand why something isn't working the way you expect it to,
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
- [ ] Question. This issue tracker is not the place for questions. If you want to ask how to do
|
||||
something, or to understand why something isn't working the way you expect it to, use
|
||||
our Community Forums https://community.platformio.org
|
||||
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to the
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues) repository
|
||||
- [ ] PlatformIO IDE. All issues related to PlatformIO IDE should be reported to appropriate repository
|
||||
https://github.com/platformio/platformio-atom-ide/issues
|
||||
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
should be reported to appropriate repository related to your hardware
|
||||
https://github.com/topics/platformio-platform
|
||||
- [ ] Development Platform or Board. All issues related to Development Platforms or Embedded Boards
|
||||
should be reported to appropriate repository.
|
||||
See full list with repositories and search for "platform-xxx" repository related to your hardware
|
||||
https://github.com/platformio?query=platform-
|
||||
|
||||
- [ ] **Feature Request**.
|
||||
Start by telling us what problem you’re trying to solve. Often a solution
|
||||
- [ ] Feature Request. Start by telling us what problem you’re trying to solve. Often a solution
|
||||
already exists! Don’t send pull requests to implement new features without first getting our
|
||||
support. Sometimes we leave features out on purpose to keep the project small.
|
||||
|
||||
- [ ] **PlatformIO Core**.
|
||||
If you’ve found a bug, please provide an information below.
|
||||
|
||||
- [ ] PlatformIO Core. If you’ve found a bug, please provide an information below.
|
||||
|
||||
*You can erase any parts of this template not applicable to your Issue.*
|
||||
|
||||
|
||||
53
.github/workflows/core.yml
vendored
53
.github/workflows/core.yml
vendored
@@ -1,53 +0,0 @@
|
||||
name: Core
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run "codespell" on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
python -m pip install codespell
|
||||
make codespell
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Integration Tests
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Core*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
45
.github/workflows/deployment.yml
vendored
45
.github/workflows/deployment.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: Deployment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "release/**"
|
||||
|
||||
jobs:
|
||||
deployment:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox build
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
|
||||
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
|
||||
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
109
.github/workflows/docs.yml
vendored
109
.github/workflows/docs.yml
vendored
@@ -1,109 +0,0 @@
|
||||
name: Docs
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Build docs
|
||||
run: |
|
||||
tox -e docs
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Docs*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
- name: Preserve Docs
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
tar -czvf docs.tar.gz -C docs/_build html rtdpage
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
|
||||
deploy:
|
||||
name: Deploy Docs
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCS_REPO: platformio/platformio-docs
|
||||
DOCS_DIR: platformio-docs
|
||||
LATEST_DOCS_DIR: latest-docs
|
||||
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
run: |
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
id: get-destination-dir
|
||||
run: |
|
||||
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
|
||||
echo "::set-output name=dst_dir::stable"
|
||||
else
|
||||
echo "::set-output name=dst_dir::latest"
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
ref: gh-pages
|
||||
- name: Synchronize Docs
|
||||
run: |
|
||||
rm -rf ${{ env.DOCS_DIR }}/.git
|
||||
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
|
||||
rm -rf ${{ env.DOCS_DIR }}/page
|
||||
mkdir -p ${{ env.DOCS_DIR }}/page
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
|
||||
fi
|
||||
- name: Validate Docs
|
||||
run: |
|
||||
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
|
||||
echo "Docs folder is empty. Aborting!"
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
publish_dir: ./${{ env.DOCS_DIR }}
|
||||
commit_message: Sync Docs
|
||||
73
.github/workflows/examples.yml
vendored
73
.github/workflows/examples.yml
vendored
@@ -1,73 +0,0 @@
|
||||
name: Examples
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORM_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- name: Free Disk Space
|
||||
uses: endersonmenezes/free-disk-space@v3
|
||||
with:
|
||||
remove_android: true
|
||||
remove_dotnet: true
|
||||
remove_haskell: true
|
||||
# Faster cleanup
|
||||
remove_packages_one_command: true
|
||||
rm_cmd: "rmz"
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on Windows
|
||||
if: startsWith(matrix.os, 'windows')
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Examples*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
56
.github/workflows/projects.yml
vendored
56
.github/workflows/projects.yml
vendored
@@ -1,56 +0,0 @@
|
||||
name: Projects
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- marlin:
|
||||
repository: "MarlinFirmware/Marlin"
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
config_dir: "smartknob"
|
||||
env_name: "view"
|
||||
- espurna:
|
||||
repository: "xoseperez/espurna"
|
||||
folder: "espurna"
|
||||
config_dir: "espurna/code"
|
||||
env_name: "nodemcu-lolin"
|
||||
- OpenMQTTGateway:
|
||||
repository: "1technophile/OpenMQTTGateway"
|
||||
folder: "OpenMQTTGateway"
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,6 +1,6 @@
|
||||
*.egg-info
|
||||
*.pyc
|
||||
__pycache__
|
||||
.pioenvs
|
||||
.tox
|
||||
docs/_build
|
||||
dist
|
||||
@@ -9,4 +9,3 @@ build
|
||||
coverage.xml
|
||||
.coverage
|
||||
htmlcov
|
||||
.pytest_cache
|
||||
|
||||
3
.isort.cfg
Normal file
3
.isort.cfg
Normal file
@@ -0,0 +1,3 @@
|
||||
[settings]
|
||||
line_length=79
|
||||
known_third_party=arrow,bottle,click,configobj,lockfile,pytest,requests,SCons,semantic_version,serial
|
||||
33
.pylintrc
33
.pylintrc
@@ -1,12 +1,23 @@
|
||||
[REPORTS]
|
||||
output-format=colorized
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=
|
||||
missing-docstring,
|
||||
duplicate-code,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
consider-using-f-string,
|
||||
cyclic-import,
|
||||
use-dict-literal
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time. See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating
|
||||
|
||||
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import
|
||||
|
||||
3
.style.yapf
Normal file
3
.style.yapf
Normal file
@@ -0,0 +1,3 @@
|
||||
[style]
|
||||
blank_line_before_nested_class_or_def = true
|
||||
allow_multiline_lambdas = true
|
||||
42
.travis.yml
Normal file
42
.travis.yml
Normal file
@@ -0,0 +1,42 @@
|
||||
language: python
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=docs
|
||||
- os: linux
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=lint
|
||||
- os: linux
|
||||
sudo: required
|
||||
python: 2.7
|
||||
env: TOX_ENV=py27
|
||||
- os: osx
|
||||
language: generic
|
||||
env: TOX_ENV=skipexamples
|
||||
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install -U tox; else pip install -U tox; fi
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
after_success:
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then tox -e coverage; fi
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then bash <(curl -s https://codecov.io/bash); fi
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
slack:
|
||||
rooms:
|
||||
secure: JD6VGfN4+SLU2CwDdiIOr1VgwD+zbYUCE/srwyGuHavnjIkPItkl6T6Bn8Y4VrU6ysbuKotfdV2TAJJ82ivFbY8BvZBc7FBcYp/AGQ4FaCCV5ySv8RDAcQgdE12oaGzMdODiLqsB85f65zOlAFa+htaXyEiRTcotn6Y2hupatrI=
|
||||
on_failure: always
|
||||
on_success: change
|
||||
15
.vscode/settings.json
vendored
Normal file
15
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"python.pythonPath": "${workspaceRoot}/.tox/develop/bin/python",
|
||||
"python.formatting.provider": "yapf",
|
||||
"files.exclude": {
|
||||
"**/*.pyc": true,
|
||||
"*.egg-info": true,
|
||||
".cache": true,
|
||||
"build": true,
|
||||
"dist": true
|
||||
},
|
||||
"editor.rulers": [79],
|
||||
"restructuredtext.builtDocumentationPath": "${workspaceRoot}/docs/_build/html",
|
||||
"restructuredtext.confPath": "${workspaceRoot}/docs",
|
||||
"restructuredtext.linter.executablePath": "${workspaceRoot}/.tox/docs/bin/restructuredtext-lint"
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
# Code of Conduct
|
||||
|
||||
See https://piolabs.com/legal/code-of-conduct.html
|
||||
@@ -1,22 +1,21 @@
|
||||
Contributing
|
||||
------------
|
||||
|
||||
To get started, <a href="https://cla-assistant.io/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of the PlatformIO Core project where `tox.ini` is located (``cd platformio-core``) and run `tox -e py39`.
|
||||
You can replace `py39` with your own Python version. For example, `py311` means Python 3.11.
|
||||
1. Fork the repository on GitHub.
|
||||
2. Make a branch off of ``develop``
|
||||
3. Run ``pip install tox``
|
||||
4. Go to the root of project where is located ``tox.ini`` and run ``tox -e develop``
|
||||
5. Activate current development environment:
|
||||
|
||||
* Windows: `.tox\py39\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py39/bin/activate`
|
||||
* Fish: `source .tox/py39/bin/activate.fish`
|
||||
* Windows: ``.tox\develop\Scripts\activate``
|
||||
* Bash/ZSH: ``source .tox/develop/bin/activate``
|
||||
* Fish: ``source .tox/bin/activate.fish``
|
||||
|
||||
6. Make changes to code, documentation, etc.
|
||||
7. Lint source code `make before-commit`
|
||||
8. Run the tests `make test`
|
||||
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
|
||||
7. Lint source code ``tox -e lint``
|
||||
8. Run the tests ``tox -e py27``
|
||||
9. Build documentation ``tox -e docs`` (creates a directory _build under docs where you can find the html)
|
||||
10. Commit changes to your forked repository
|
||||
11. Submit a Pull Request on GitHub
|
||||
11. Submit a Pull Request on GitHub.
|
||||
|
||||
1729
HISTORY.rst
1729
HISTORY.rst
File diff suppressed because it is too large
Load Diff
32
Makefile
32
Makefile
@@ -1,22 +1,15 @@
|
||||
|
||||
lint:
|
||||
pylint --rcfile=./.pylintrc ./tests
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
|
||||
isort:
|
||||
isort ./platformio
|
||||
isort ./tests
|
||||
isort -rc ./platformio
|
||||
isort -rc ./tests
|
||||
|
||||
format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
yapf:
|
||||
yapf --recursive --in-place platformio/
|
||||
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
test:
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: codespell isort format lint
|
||||
before-commit: isort yapf lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
@@ -27,15 +20,4 @@ clean: clean-docs
|
||||
rm -rf .cache
|
||||
rm -rf build
|
||||
rm -rf htmlcov
|
||||
rm -f .coverage
|
||||
|
||||
profile:
|
||||
# Usage $ > make PIOARGS="boards" profile
|
||||
python -m cProfile -o .tox/.tmp/cprofile.prof -m platformio ${PIOARGS}
|
||||
snakeviz .tox/.tmp/cprofile.prof
|
||||
|
||||
pack:
|
||||
python setup.py sdist
|
||||
|
||||
publish:
|
||||
python setup.py sdist upload
|
||||
rm -f .coverage
|
||||
165
README.rst
165
README.rst
@@ -1,95 +1,124 @@
|
||||
PlatformIO Core
|
||||
===============
|
||||
PlatformIO
|
||||
==========
|
||||
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
|
||||
:target: https://docs.platformio.org/en/latest/core/index.html
|
||||
:alt: CI Build for PlatformIO Core
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
|
||||
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
|
||||
:alt: CI Build for Docs
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
|
||||
:target: https://github.com/platformio/platformio-examples
|
||||
:alt: CI Build for dev-platform examples
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Projects/badge.svg
|
||||
:target: https://docs.platformio.org/en/latest/tutorials/index.html#projects
|
||||
:alt: CI Build for the Community Projects
|
||||
.. image:: https://travis-ci.org/platformio/platformio-core.svg?branch=develop
|
||||
:target: https://travis-ci.org/platformio/platformio-core
|
||||
:alt: Travis.CI Build Status
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://requires.io/github/platformio/platformio-core/requirements.svg?branch=develop
|
||||
:target: https://requires.io/github/platformio/platformio-core/requirements/?branch=develop
|
||||
:alt: Requirements Status
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
|
||||
:alt: PlatformIO Labs
|
||||
:target: https://piolabs.com/?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/pypi/l/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org
|
||||
.. image:: https://img.shields.io/PlatformIO/Plus.png?color=orange
|
||||
:alt: PlatformIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://pioplus.com
|
||||
|
||||
**Quick Links:** `Homepage <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Registry <https://registry.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://piolabs.com/?utm_source=github&utm_medium=core>`_
|
||||
**Quick Links:** `Home Page <http://platformio.org>`_ |
|
||||
`PlatformIO Plus <https://pioplus.com>`_ |
|
||||
`PlatformIO IDE <http://platformio.org/platformio-ide>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
||||
`Docs <http://docs.platformio.org>`_ |
|
||||
`Donate <http://platformio.org/donate>`_ |
|
||||
`Contact Us <https://pioplus.com/contact.html>`_
|
||||
|
||||
**Social:** `LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Community Forums <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
`Community <https://community.platformio.org>`_
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
:target: http://platformio.org
|
||||
|
||||
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
|
||||
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
* Open source, maximum permissive Apache 2.0 license
|
||||
* Cross-platform IDE and Unified Debugger
|
||||
* Static Code Analyzer and Remote Unit Testing
|
||||
* Multi-platform and Multi-architecture Build System
|
||||
* Firmware File Explorer and Memory Inspection
|
||||
`PlatformIO <http://platformio.org>`_ is an open source ecosystem for IoT
|
||||
development. Cross-platform IDE and unified debugger. Remote unit testing and
|
||||
firmware updates.
|
||||
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
* `What is PlatformIO? <http://docs.platformio.org/page/what-is-platformio.html>`_
|
||||
|
||||
Solutions
|
||||
---------
|
||||
Products
|
||||
--------
|
||||
|
||||
* `Library Management <https://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Advanced**
|
||||
|
||||
* `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Static Code Analysis <https://docs.platformio.org/en/latest/plus/pio-check.html?utm_source=github&utm_medium=core>`_
|
||||
* `Remote Development <https://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO IDE <http://platformio.org/platformio-ide>`_
|
||||
* `PlatformIO Core <http://docs.platformio.org/page/core.html>`_
|
||||
* `PIO Remote™ <http://docs.platformio.org/page/plus/pio-remote.html>`_
|
||||
* `PIO Unified Debugger <http://docs.platformio.org/page/plus/debugging.html>`_
|
||||
* `PIO Unit Testing <http://docs.platformio.org/page/plus/unit-testing.html>`_
|
||||
* `PIO Delivery™ <http://platformio.org/pricing#solution-pio-delivery>`_
|
||||
* `Cloud Builder <http://platformio.org/pricing#solution-cloud-builder>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
|
||||
* `Libraries <https://registry.platformio.org/search?t=library&utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://registry.platformio.org/search?t=platform&utm_source=github&utm_medium=core>`_
|
||||
* `Development Tools <https://registry.platformio.org/search?t=tool&utm_source=github&utm_medium=core>`_
|
||||
* `Libraries <http://platformio.org/lib>`_
|
||||
* `Development Platforms <http://platformio.org/platforms>`_
|
||||
* `Frameworks <http://platformio.org/frameworks>`_
|
||||
* `Embedded Boards <http://platformio.org/boards>`_
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Manager <http://docs.platformio.org/page/librarymanager/index.html>`_
|
||||
* `Cloud IDEs Integration <http://platformio.org/pricing#solution-cloud-ide>`_
|
||||
* `Standalone IDEs Integration <http://docs.platformio.org/page/ide.html#other-ide>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/page/ci/index.html>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
* `Atmel AVR <http://platformio.org/platforms/atmelavr>`_
|
||||
* `Atmel SAM <http://platformio.org/platforms/atmelsam>`_
|
||||
* `Espressif 32 <http://platformio.org/platforms/espressif32>`_
|
||||
* `Espressif 8266 <http://platformio.org/platforms/espressif8266>`_
|
||||
* `Freescale Kinetis <http://platformio.org/platforms/freescalekinetis>`_
|
||||
* `Intel ARC32 <http://platformio.org/platforms/intel_arc32>`_
|
||||
* `Lattice iCE40 <http://platformio.org/platforms/lattice_ice40>`_
|
||||
* `Maxim 32 <http://platformio.org/platforms/maxim32>`_
|
||||
* `Microchip PIC32 <http://platformio.org/platforms/microchippic32>`_
|
||||
* `Nordic nRF51 <http://platformio.org/platforms/nordicnrf51>`_
|
||||
* `Nordic nRF52 <http://platformio.org/platforms/nordicnrf52>`_
|
||||
* `NXP LPC <http://platformio.org/platforms/nxplpc>`_
|
||||
* `Silicon Labs EFM32 <http://platformio.org/platforms/siliconlabsefm32>`_
|
||||
* `ST STM32 <http://platformio.org/platforms/ststm32>`_
|
||||
* `Teensy <http://platformio.org/platforms/teensy>`_
|
||||
* `TI MSP430 <http://platformio.org/platforms/timsp430>`_
|
||||
* `TI Tiva <http://platformio.org/platforms/titiva>`_
|
||||
* `WIZNet W7500 <http://platformio.org/platforms/wiznet7500>`_
|
||||
|
||||
Frameworks
|
||||
----------
|
||||
|
||||
* `Arduino <http://platformio.org/frameworks/arduino>`_
|
||||
* `ARTIK SDK <http://platformio.org/frameworks/artik-sdk>`_
|
||||
* `CMSIS <http://platformio.org/frameworks/cmsis>`_
|
||||
* `Energia <http://platformio.org/frameworks/energia>`_
|
||||
* `ESP-IDF <http://platformio.org/frameworks/espidf>`_
|
||||
* `libOpenCM3 <http://platformio.org/frameworks/libopencm3>`_
|
||||
* `mbed <http://platformio.org/frameworks/mbed>`_
|
||||
* `Pumbaa <http://platformio.org/frameworks/pumbaa>`_
|
||||
* `Simba <http://platformio.org/frameworks/simba>`_
|
||||
* `SPL <http://platformio.org/frameworks/spl>`_
|
||||
* `STM32Cube <http://platformio.org/frameworks/stm32cube>`_
|
||||
* `WiringPi <http://platformio.org/frameworks/wiringpi>`_
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
See `contributing guidelines <https://github.com/platformio/platformio/blob/develop/CONTRIBUTING.md>`_.
|
||||
|
||||
Telemetry / Privacy Policy
|
||||
--------------------------
|
||||
|
||||
Share minimal diagnostics and usage information to help us make PlatformIO better.
|
||||
It is enabled by default. For more information see:
|
||||
|
||||
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
@@ -97,7 +126,3 @@ Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
|
||||
The PlatformIO is licensed under the permissive Apache 2.0 license,
|
||||
so you can use it in both commercial and personal projects with confidence.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
||||
34
SECURITY.md
34
SECURITY.md
@@ -1,34 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We are committed to ensuring the security and protection of PlatformIO Core.
|
||||
To this end, we support only the following versions:
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 6.1.x | :white_check_mark: |
|
||||
| < 6.1 | :x: |
|
||||
|
||||
Unsupported versions of the PlatformIO Core may have known vulnerabilities or security issues that could compromise the security of our organization's systems and data.
|
||||
Therefore, it is important that all developers use only supported versions of the PlatformIO Core.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We take the security of our systems and data very seriously. We encourage responsible disclosure of any vulnerabilities or security issues that you may find in our systems or applications. If you believe you have discovered a vulnerability, please report it to us immediately.
|
||||
|
||||
To report a vulnerability, please send an email to our security team at contact@piolabs.com. Please include as much information as possible, including:
|
||||
|
||||
- A description of the vulnerability and how it can be exploited
|
||||
- Steps to reproduce the vulnerability
|
||||
- Any additional information that can help us understand and reproduce the vulnerability
|
||||
|
||||
Once we receive your report, our security team will acknowledge receipt within 24 hours and will work to validate the reported vulnerability. We will provide periodic updates on the progress of the vulnerability assessment, and will notify you once a fix has been deployed.
|
||||
|
||||
If the vulnerability is accepted, we will work to remediate the issue as quickly as possible. We may also provide credit or recognition to the individual who reported the vulnerability, at our discretion.
|
||||
|
||||
If the vulnerability is declined, we will provide a justification for our decision and may offer guidance on how to improve the report or how to test the system more effectively.
|
||||
|
||||
Please note that we will not take any legal action against individuals who report vulnerabilities in good faith and in accordance with this policy.
|
||||
|
||||
Thank you for helping us keep our systems and data secure.
|
||||
2
docs
2
docs
Submodule docs updated: 23ef0f85ca...dafaa45eb9
2
examples
2
examples
Submodule examples updated: 0409a90a01...de48488cd2
@@ -12,34 +12,29 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
VERSION = (6, 1, "19a2")
|
||||
import sys
|
||||
|
||||
VERSION = (3, 5, "0a9")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
__description__ = (
|
||||
"Your Gateway to Embedded Software Development Excellence. "
|
||||
"Unlock the true potential of embedded software development "
|
||||
"with PlatformIO's collaborative ecosystem, embracing "
|
||||
"declarative principles, test-driven methodologies, and "
|
||||
"modern toolchains for unrivaled success."
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
__description__ = ("An open source ecosystem for IoT development. "
|
||||
"Cross-platform build system and library manager. "
|
||||
"Continuous and IDE integration. "
|
||||
"Arduino, ESP8266 and ARM mbed compatible")
|
||||
__url__ = "http://platformio.org"
|
||||
|
||||
__author__ = "PlatformIO Labs"
|
||||
__email__ = "contact@piolabs.com"
|
||||
__author__ = "Ivan Kravets"
|
||||
__email__ = "me@ikravets.com"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO Labs"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
|
||||
__accounts_api__ = "https://api.accounts.platformio.org"
|
||||
__registry_mirror_hosts__ = [
|
||||
"registry.platformio.org",
|
||||
"registry.nm1.platformio.org",
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
__apiurl__ = "https://api.platformio.org"
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
] + __registry_mirror_hosts__
|
||||
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
|
||||
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
|
||||
"Minimum supported version is 2.7, please upgrade Python.\n"
|
||||
"Python 3 is not yet supported.\n")
|
||||
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
|
||||
sys.exit(1)
|
||||
|
||||
@@ -14,67 +14,92 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from os.path import join
|
||||
from platform import system
|
||||
from traceback import format_exc
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, maintenance
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.compat import IS_CYGWIN, ensure_python3
|
||||
from platformio.util import get_source_dir
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand): # pylint: disable=R0904
|
||||
|
||||
def list_commands(self, ctx):
|
||||
cmds = []
|
||||
for filename in os.listdir(join(get_source_dir(), "commands")):
|
||||
if filename.startswith("__init__"):
|
||||
continue
|
||||
if filename.endswith(".py"):
|
||||
cmds.append(filename[:-3])
|
||||
cmds.sort()
|
||||
return cmds
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
mod = None
|
||||
try:
|
||||
mod = __import__("platformio.commands." + cmd_name, None, None,
|
||||
["cli"])
|
||||
except ImportError:
|
||||
try:
|
||||
return self._handle_obsolate_command(cmd_name)
|
||||
except AttributeError:
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
return mod.cli
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(name):
|
||||
if name == "platforms":
|
||||
from platformio.commands import platform
|
||||
return platform.cli
|
||||
elif name == "serialports":
|
||||
from platformio.commands import device
|
||||
return device.cli
|
||||
raise AttributeError()
|
||||
|
||||
|
||||
@click.command(
|
||||
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
|
||||
)
|
||||
@click.version_option(__version__, prog_name="PlatformIO Core")
|
||||
@click.option("--force", "-f", is_flag=True, help="DEPRECATED", hidden=True)
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
cls=PlatformioCLI,
|
||||
context_settings=dict(help_option_names=["-h", "--help"]))
|
||||
@click.version_option(__version__, prog_name="PlatformIO")
|
||||
@click.option(
|
||||
"--force",
|
||||
"-f",
|
||||
is_flag=True,
|
||||
help="Force to accept any confirmation prompts.")
|
||||
@click.option("--caller", "-c", help="Caller ID (service).")
|
||||
@click.pass_context
|
||||
def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
try:
|
||||
if (
|
||||
no_ansi
|
||||
or str(
|
||||
os.getenv("PLATFORMIO_NO_ANSI", os.getenv("PLATFORMIO_DISABLE_COLOR"))
|
||||
).lower()
|
||||
== "true"
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: False
|
||||
elif (
|
||||
str(
|
||||
os.getenv("PLATFORMIO_FORCE_ANSI", os.getenv("PLATFORMIO_FORCE_COLOR"))
|
||||
).lower()
|
||||
== "true"
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_cmd_start(ctx, caller)
|
||||
def cli(ctx, force, caller):
|
||||
maintenance.on_platformio_start(ctx, force, caller)
|
||||
|
||||
|
||||
@cli.result_callback()
|
||||
@cli.resultcallback()
|
||||
@click.pass_context
|
||||
def process_result(*_, **__):
|
||||
maintenance.on_cmd_end()
|
||||
def process_result(ctx, result, force, caller): # pylint: disable=W0613
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
|
||||
|
||||
def configure():
|
||||
if IS_CYGWIN:
|
||||
if "cygwin" in system().lower():
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
# /en/latest/security.html#insecureplatformwarning
|
||||
try:
|
||||
import urllib3 # pylint: disable=import-outside-toplevel
|
||||
|
||||
import urllib3
|
||||
urllib3.disable_warnings()
|
||||
except (AttributeError, ImportError):
|
||||
pass
|
||||
|
||||
# handle PLATFORMIO_FORCE_COLOR
|
||||
if str(os.getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true":
|
||||
try:
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# Handle IOError issue with VSCode's Terminal (Windows)
|
||||
click_echo_origin = [click.echo, click.secho]
|
||||
|
||||
@@ -82,46 +107,35 @@ def configure():
|
||||
try:
|
||||
click_echo_origin[origin](*args, **kwargs)
|
||||
except IOError:
|
||||
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
|
||||
"%s\n" % (args[0] if args else "")
|
||||
)
|
||||
(sys.stderr.write if kwargs.get("err") else
|
||||
sys.stdout.write)("%s\n" % (args[0] if args else ""))
|
||||
|
||||
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
|
||||
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
exit_code = 0
|
||||
prev_sys_argv = sys.argv[:]
|
||||
if argv:
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
|
||||
def main():
|
||||
try:
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
cli() # pylint: disable=no-value-for-parameter
|
||||
except SystemExit as exc:
|
||||
if exc.code and str(exc.code).isdigit():
|
||||
exit_code = int(exc.code)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if not isinstance(exc, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(exc)
|
||||
error_str = f"{exc.__class__.__name__}: "
|
||||
if isinstance(exc, exception.PlatformioException):
|
||||
error_str += str(exc)
|
||||
cli(None, None, None)
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
error_str = "Error: "
|
||||
if isinstance(e, exception.PlatformioException):
|
||||
error_str += str(e)
|
||||
else:
|
||||
error_str += traceback.format_exc()
|
||||
error_str += format_exc()
|
||||
error_str += """
|
||||
============================================================
|
||||
|
||||
An unexpected error occurred. Further steps:
|
||||
|
||||
* Verify that you have the latest version of PlatformIO using
|
||||
`python -m pip install -U platformio` command
|
||||
`pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
https://docs.platformio.org/page/faq/index.html
|
||||
http://docs.platformio.org/page/faq.html
|
||||
|
||||
* Report this problem to the developers
|
||||
https://github.com/platformio/platformio-core/issues
|
||||
@@ -129,15 +143,13 @@ An unexpected error occurred. Further steps:
|
||||
============================================================
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
exit_code = int(str(exc)) if str(exc).isdigit() else 1
|
||||
|
||||
maintenance.on_platformio_exit()
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
return int(str(e)) if str(e).isdigit() else 1
|
||||
return 0
|
||||
|
||||
|
||||
def debug_gdb_main():
|
||||
return main([sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:])
|
||||
sys.argv = [sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:]
|
||||
return main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.commands.destroy import account_destroy_cmd
|
||||
from platformio.account.commands.forgot import account_forgot_cmd
|
||||
from platformio.account.commands.login import account_login_cmd
|
||||
from platformio.account.commands.logout import account_logout_cmd
|
||||
from platformio.account.commands.password import account_password_cmd
|
||||
from platformio.account.commands.register import account_register_cmd
|
||||
from platformio.account.commands.show import account_show_cmd
|
||||
from platformio.account.commands.token import account_token_cmd
|
||||
from platformio.account.commands.update import account_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"account",
|
||||
commands=[
|
||||
account_destroy_cmd,
|
||||
account_forgot_cmd,
|
||||
account_login_cmd,
|
||||
account_logout_cmd,
|
||||
account_password_cmd,
|
||||
account_register_cmd,
|
||||
account_show_cmd,
|
||||
account_token_cmd,
|
||||
account_update_cmd,
|
||||
],
|
||||
short_help="Manage PlatformIO account",
|
||||
)
|
||||
def cli():
|
||||
pass
|
||||
@@ -1,352 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.http import HTTPClient, HTTPClientError
|
||||
|
||||
|
||||
class AccountError(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(__accounts_api__)
|
||||
|
||||
@staticmethod
|
||||
def get_refresh_token():
|
||||
try:
|
||||
return app.get_state_item("account").get("auth").get("refresh_token")
|
||||
except Exception as exc:
|
||||
raise AccountNotAuthorized() from exc
|
||||
|
||||
@staticmethod
|
||||
def delete_local_session():
|
||||
app.delete_state_item("account")
|
||||
|
||||
@staticmethod
|
||||
def delete_local_state(key):
|
||||
account = app.get_state_item("account")
|
||||
if not account or key not in account:
|
||||
return
|
||||
del account[key]
|
||||
app.set_state_item("account", account)
|
||||
|
||||
def fetch_json_data(self, *args, **kwargs):
|
||||
try:
|
||||
return super().fetch_json_data(*args, **kwargs)
|
||||
except HTTPClientError as exc:
|
||||
raise AccountError(exc) from exc
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
|
||||
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
||||
|
||||
def login(self, username, password):
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise AccountAlreadyAuthorized(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
data={"username": username, "password": password},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data
|
||||
|
||||
def login_with_code(self, client_id, code, redirect_uri):
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise AccountAlreadyAuthorized(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
result = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login/code",
|
||||
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
|
||||
)
|
||||
app.set_state_item("account", result)
|
||||
return result
|
||||
|
||||
def logout(self):
|
||||
refresh_token = self.get_refresh_token()
|
||||
self.delete_local_session()
|
||||
try:
|
||||
self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/logout",
|
||||
data={"refresh_token": refresh_token},
|
||||
)
|
||||
except AccountError:
|
||||
pass
|
||||
return True
|
||||
|
||||
def change_password(self, old_password, new_password):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/password",
|
||||
data={"old_password": old_password, "new_password": new_password},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise AccountAlreadyAuthorized(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/registration",
|
||||
data={
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": password,
|
||||
"firstname": firstname,
|
||||
"lastname": lastname,
|
||||
},
|
||||
)
|
||||
|
||||
def auth_token(self, password, regenerate):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/token",
|
||||
data={"password": password, "regenerate": 1 if regenerate else 0},
|
||||
x_with_authorization=True,
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/forgot",
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/profile",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
profile["current_password"] = current_password
|
||||
self.delete_local_state("summary")
|
||||
response = self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/profile",
|
||||
data=profile,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
return response
|
||||
|
||||
def get_account_info(self, offline=False):
|
||||
account = app.get_state_item("account") or {}
|
||||
if (
|
||||
account.get("summary")
|
||||
and account["summary"].get("expire_at", 0) > time.time()
|
||||
):
|
||||
return account["summary"]
|
||||
if offline and account.get("email"):
|
||||
return {
|
||||
"profile": {
|
||||
"email": account.get("email"),
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
result = self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/summary",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
account["summary"] = dict(
|
||||
profile=result.get("profile"),
|
||||
packages=result.get("packages"),
|
||||
subscriptions=result.get("subscriptions"),
|
||||
user_id=result.get("user_id"),
|
||||
expire_at=int(time.time()) + self.SUMMARY_CACHE_TTL,
|
||||
)
|
||||
app.set_state_item("account", account)
|
||||
return result
|
||||
|
||||
def get_logged_username(self):
|
||||
return self.get_account_info(offline=True).get("profile").get("username")
|
||||
|
||||
def destroy_account(self):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/account",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_org(self, orgname, email, displayname):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs",
|
||||
data={"orgname": orgname, "email": email, "displayname": displayname},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_org(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_orgs(self):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_org(self, orgname, data):
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_org(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_org_owner(self, orgname, username):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_org_owners(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_org_owner(self, orgname, username):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_team(self, orgname, teamname, description):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
data={"name": teamname, "description": description},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_team(self, orgname, teamname):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_team(self, orgname, teamname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_teams(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_team(self, orgname, teamname, data):
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_team_member(self, orgname, teamname, username):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_team_member(self, orgname, teamname, username):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -1,37 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient, AccountNotAuthorized
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy account")
|
||||
def account_destroy_cmd():
|
||||
client = AccountClient()
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the %s user account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% client.get_logged_username(),
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_account()
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
click.secho(
|
||||
"User account has been destroyed.",
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("forgot", short_help="Forgot password")
|
||||
@click.option("--username", prompt="Username or email")
|
||||
def account_forgot_cmd(username):
|
||||
client = AccountClient()
|
||||
client.forgot_password(username)
|
||||
click.secho(
|
||||
"If this account is registered, we will send the "
|
||||
"further instructions to your email.",
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("login", short_help="Log in to PlatformIO Account")
|
||||
@click.option("-u", "--username", prompt="Username or email")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
def account_login_cmd(username, password):
|
||||
client = AccountClient()
|
||||
client.login(username, password)
|
||||
click.secho("Successfully logged in!", fg="green")
|
||||
@@ -1,24 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("logout", short_help="Log out of PlatformIO Account")
|
||||
def account_logout_cmd():
|
||||
client = AccountClient()
|
||||
client.logout()
|
||||
click.secho("Successfully logged out!", fg="green")
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("password", short_help="Change password")
|
||||
@click.option("--old-password", prompt=True, hide_input=True)
|
||||
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
|
||||
def account_password_cmd(old_password, new_password):
|
||||
client = AccountClient()
|
||||
client.change_password(old_password, new_password)
|
||||
click.secho("Password successfully changed!", fg="green")
|
||||
@@ -1,52 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import (
|
||||
validate_email,
|
||||
validate_password,
|
||||
validate_username,
|
||||
)
|
||||
|
||||
|
||||
@click.command("register", short_help="Create new PlatformIO Account")
|
||||
@click.option(
|
||||
"-u",
|
||||
"--username",
|
||||
prompt=True,
|
||||
callback=lambda _, __, value: validate_username(value),
|
||||
)
|
||||
@click.option(
|
||||
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--password",
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
confirmation_prompt=True,
|
||||
callback=lambda _, __, value: validate_password(value),
|
||||
)
|
||||
@click.option("--firstname", prompt=True)
|
||||
@click.option("--lastname", prompt=True)
|
||||
def account_register_cmd(username, email, password, firstname, lastname):
|
||||
client = AccountClient()
|
||||
client.registration(username, email, password, firstname, lastname)
|
||||
click.secho(
|
||||
"An account has been successfully created. "
|
||||
"Please check your mail to activate your account and verify your email address.",
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,116 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import util
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("show", short_help="PlatformIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show_cmd(offline, json_output):
|
||||
client = AccountClient()
|
||||
info = client.get_account_info(offline)
|
||||
if json_output:
|
||||
click.echo(json.dumps(info))
|
||||
return
|
||||
click.echo()
|
||||
if info.get("profile"):
|
||||
print_profile(info["profile"])
|
||||
if info.get("packages"):
|
||||
print_packages(info["packages"])
|
||||
if info.get("subscriptions"):
|
||||
print_subscriptions(info["subscriptions"])
|
||||
click.echo()
|
||||
|
||||
|
||||
def print_profile(profile):
|
||||
click.secho("Profile", fg="cyan", bold=True)
|
||||
click.echo("=" * len("Profile"))
|
||||
data = []
|
||||
if profile.get("username"):
|
||||
data.append(("Username:", profile["username"]))
|
||||
if profile.get("email"):
|
||||
data.append(("Email:", profile["email"]))
|
||||
if profile.get("firstname"):
|
||||
data.append(("First name:", profile["firstname"]))
|
||||
if profile.get("lastname"):
|
||||
data.append(("Last name:", profile["lastname"]))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_packages(packages):
|
||||
click.echo()
|
||||
click.secho("Packages", fg="cyan")
|
||||
click.echo("=" * len("Packages"))
|
||||
for package in packages:
|
||||
click.echo()
|
||||
click.secho(package.get("name"), bold=True)
|
||||
click.echo("-" * len(package.get("name")))
|
||||
if package.get("description"):
|
||||
click.echo(package.get("description"))
|
||||
data = []
|
||||
expire = "-"
|
||||
if "subscription" in package:
|
||||
expire = util.parse_datetime(
|
||||
package["subscription"].get("end_at")
|
||||
or package["subscription"].get("next_bill_at")
|
||||
).strftime("%Y-%m-%d")
|
||||
data.append(("Expire:", expire))
|
||||
services = []
|
||||
for key in package:
|
||||
if not key.startswith("service."):
|
||||
continue
|
||||
if isinstance(package[key], dict):
|
||||
services.append(package[key].get("title"))
|
||||
else:
|
||||
services.append(package[key])
|
||||
if services:
|
||||
data.append(("Services:", ", ".join(services)))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_subscriptions(subscriptions):
|
||||
click.echo()
|
||||
click.secho("Subscriptions", fg="cyan")
|
||||
click.echo("=" * len("Subscriptions"))
|
||||
for subscription in subscriptions:
|
||||
click.echo()
|
||||
click.secho(subscription.get("product_name"), bold=True)
|
||||
click.echo("-" * len(subscription.get("product_name")))
|
||||
data = [("State:", subscription.get("status"))]
|
||||
begin_at = util.parse_datetime(subscription.get("begin_at")).strftime("%c")
|
||||
data.append(("Start date:", begin_at or "-"))
|
||||
end_at = subscription.get("end_at")
|
||||
if end_at:
|
||||
end_at = util.parse_datetime(subscription.get("end_at")).strftime("%c")
|
||||
data.append(("End date:", end_at or "-"))
|
||||
next_bill_at = subscription.get("next_bill_at")
|
||||
if next_bill_at:
|
||||
next_bill_at = util.parse_datetime(
|
||||
subscription.get("next_bill_at")
|
||||
).strftime("%c")
|
||||
data.append(("Next payment:", next_bill_at or "-"))
|
||||
data.append(
|
||||
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
|
||||
)
|
||||
data.append(
|
||||
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
@@ -1,32 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token_cmd(password, regenerate, json_output):
|
||||
client = AccountClient()
|
||||
auth_token = client.auth_token(password, regenerate)
|
||||
if json_output:
|
||||
click.echo(json.dumps({"status": "success", "result": auth_token}))
|
||||
return
|
||||
click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
|
||||
@@ -1,59 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient, AccountNotAuthorized
|
||||
from platformio.account.validate import validate_email, validate_username
|
||||
|
||||
|
||||
@click.command("update", short_help="Update profile information")
|
||||
@click.option("--current-password", prompt=True, hide_input=True)
|
||||
@click.option("--username")
|
||||
@click.option("--email")
|
||||
@click.option("--firstname")
|
||||
@click.option("--lastname")
|
||||
def account_update_cmd(current_password, **kwargs):
|
||||
client = AccountClient()
|
||||
profile = client.get_profile()
|
||||
new_profile = profile.copy()
|
||||
if not any(kwargs.values()):
|
||||
for field in profile:
|
||||
new_profile[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=profile[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_profile[field])
|
||||
if field == "username":
|
||||
validate_username(new_profile[field])
|
||||
else:
|
||||
new_profile.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_profile(new_profile, current_password)
|
||||
click.secho("Profile successfully updated!", fg="green")
|
||||
username_changed = new_profile["username"] != profile["username"]
|
||||
email_changed = new_profile["email"] != profile["email"]
|
||||
if not username_changed and not email_changed:
|
||||
return None
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
if email_changed:
|
||||
click.secho(
|
||||
"Please check your mail to verify your new email address and re-login. ",
|
||||
fg="yellow",
|
||||
)
|
||||
return None
|
||||
click.secho("Please re-login.", fg="yellow")
|
||||
return None
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -1,38 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.org.commands.add import org_add_cmd
|
||||
from platformio.account.org.commands.create import org_create_cmd
|
||||
from platformio.account.org.commands.destroy import org_destroy_cmd
|
||||
from platformio.account.org.commands.list import org_list_cmd
|
||||
from platformio.account.org.commands.remove import org_remove_cmd
|
||||
from platformio.account.org.commands.update import org_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"account",
|
||||
commands=[
|
||||
org_add_cmd,
|
||||
org_create_cmd,
|
||||
org_destroy_cmd,
|
||||
org_list_cmd,
|
||||
org_remove_cmd,
|
||||
org_update_cmd,
|
||||
],
|
||||
short_help="Manage organizations",
|
||||
)
|
||||
def cli():
|
||||
pass
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("add", short_help="Add a new owner to organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_add_cmd(orgname, username):
|
||||
client = AccountClient()
|
||||
client.add_org_owner(orgname, username)
|
||||
return click.secho(
|
||||
"The new owner `%s` has been successfully added to the `%s` organization."
|
||||
% (username, orgname),
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,38 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_email, validate_orgname
|
||||
|
||||
|
||||
@click.command("create", short_help="Create a new organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--email", callback=lambda _, __, value: validate_email(value) if value else value
|
||||
)
|
||||
@click.option(
|
||||
"--displayname",
|
||||
)
|
||||
def org_create_cmd(orgname, email, displayname):
|
||||
client = AccountClient()
|
||||
client.create_org(orgname, email, displayname)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully created." % orgname,
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy organization")
|
||||
@click.argument("orgname")
|
||||
def org_destroy_cmd(orgname):
|
||||
client = AccountClient()
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the `%s` organization account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% orgname,
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_org(orgname)
|
||||
return click.secho(
|
||||
"Organization `%s` has been destroyed." % orgname,
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,48 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("list", short_help="List organizations and their members")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def org_list_cmd(json_output):
|
||||
client = AccountClient()
|
||||
orgs = client.list_orgs()
|
||||
if json_output:
|
||||
return click.echo(json.dumps(orgs))
|
||||
if not orgs:
|
||||
return click.echo("You do not have any organization")
|
||||
for org in orgs:
|
||||
click.echo()
|
||||
click.secho(org.get("orgname"), fg="cyan")
|
||||
click.echo("-" * len(org.get("orgname")))
|
||||
data = []
|
||||
if org.get("displayname"):
|
||||
data.append(("Display Name:", org.get("displayname")))
|
||||
if org.get("email"):
|
||||
data.append(("Email:", org.get("email")))
|
||||
data.append(
|
||||
(
|
||||
"Owners:",
|
||||
", ".join((owner.get("username") for owner in org.get("owners"))),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
return click.echo()
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("remove", short_help="Remove an owner from organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_remove_cmd(orgname, username):
|
||||
client = AccountClient()
|
||||
client.remove_org_owner(orgname, username)
|
||||
return click.secho(
|
||||
"The `%s` owner has been successfully removed from the `%s` organization."
|
||||
% (username, orgname),
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,50 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_email, validate_orgname
|
||||
|
||||
|
||||
@click.command("update", short_help="Update organization")
|
||||
@click.argument("cur_orgname")
|
||||
@click.option(
|
||||
"--orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value) if value else value,
|
||||
help="A new orgname",
|
||||
)
|
||||
@click.option(
|
||||
"--email",
|
||||
callback=lambda _, __, value: validate_email(value) if value else value,
|
||||
)
|
||||
@click.option("--displayname")
|
||||
def org_update_cmd(cur_orgname, **kwargs):
|
||||
client = AccountClient()
|
||||
org = client.get_org(cur_orgname)
|
||||
new_org = {
|
||||
key: value if value is not None else org[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for key in kwargs:
|
||||
new_org[key] = click.prompt(key.capitalize(), default=org[key])
|
||||
if key == "email":
|
||||
validate_email(new_org[key])
|
||||
if key == "orgname":
|
||||
validate_orgname(new_org[key])
|
||||
client.update_org(cur_orgname, new_org)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully updated." % cur_orgname,
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -1,38 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.team.commands.add import team_add_cmd
|
||||
from platformio.account.team.commands.create import team_create_cmd
|
||||
from platformio.account.team.commands.destroy import team_destroy_cmd
|
||||
from platformio.account.team.commands.list import team_list_cmd
|
||||
from platformio.account.team.commands.remove import team_remove_cmd
|
||||
from platformio.account.team.commands.update import team_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"team",
|
||||
commands=[
|
||||
team_add_cmd,
|
||||
team_create_cmd,
|
||||
team_destroy_cmd,
|
||||
team_list_cmd,
|
||||
team_remove_cmd,
|
||||
team_update_cmd,
|
||||
],
|
||||
short_help="Manage organization teams",
|
||||
)
|
||||
def cli():
|
||||
pass
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -1,38 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("add", short_help="Add a new member to team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def team_add_cmd(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.add_team_member(orgname, teamname, username)
|
||||
return click.secho(
|
||||
"The new member %s has been successfully added to the %s team."
|
||||
% (username, teamname),
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,37 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("create", short_help="Create a new team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_create_cmd(orgname_teamname, description):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.create_team(orgname, teamname, description)
|
||||
return click.secho(
|
||||
"The team %s has been successfully created." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,40 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy a team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
def team_destroy_cmd(orgname_teamname):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
click.confirm(
|
||||
click.style(
|
||||
"Are you sure you want to destroy the %s team?" % teamname, fg="yellow"
|
||||
),
|
||||
abort=True,
|
||||
)
|
||||
client = AccountClient()
|
||||
client.destroy_team(orgname, teamname)
|
||||
return click.secho(
|
||||
"The team %s has been successfully destroyed." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,61 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("list", short_help="List teams")
|
||||
@click.argument("orgname", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def team_list_cmd(orgname, json_output):
|
||||
client = AccountClient()
|
||||
data = {}
|
||||
if not orgname:
|
||||
for item in client.list_orgs():
|
||||
teams = client.list_teams(item.get("orgname"))
|
||||
data[item.get("orgname")] = teams
|
||||
else:
|
||||
teams = client.list_teams(orgname)
|
||||
data[orgname] = teams
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data[orgname] if orgname else data))
|
||||
if not any(data.values()):
|
||||
return click.secho("You do not have any teams.", fg="yellow")
|
||||
for org_name, teams in data.items():
|
||||
for team in teams:
|
||||
click.echo()
|
||||
click.secho("%s:%s" % (org_name, team.get("name")), fg="cyan")
|
||||
click.echo("-" * len("%s:%s" % (org_name, team.get("name"))))
|
||||
table_data = []
|
||||
if team.get("description"):
|
||||
table_data.append(("Description:", team.get("description")))
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
return click.echo()
|
||||
@@ -1,36 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("remove", short_help="Remove a member from team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument("username")
|
||||
def team_remove_cmd(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.remove_team_member(orgname, teamname, username)
|
||||
return click.secho(
|
||||
"The %s member has been successfully removed from the %s team."
|
||||
% (username, teamname),
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,51 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname, validate_teamname
|
||||
|
||||
|
||||
@click.command("update", short_help="Update team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--name",
|
||||
callback=lambda _, __, value: validate_teamname(value) if value else value,
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_update_cmd(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
team = client.get_team(orgname, teamname)
|
||||
new_team = {
|
||||
key: value if value is not None else team[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for key in kwargs:
|
||||
new_team[key] = click.prompt(key.capitalize(), default=team[key])
|
||||
if key == "name":
|
||||
validate_teamname(new_team[key])
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
@@ -1,84 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
|
||||
import click
|
||||
|
||||
|
||||
def validate_username(value, field="username"):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid %s format. "
|
||||
"%s must contain only alphanumeric characters "
|
||||
"or single hyphens, cannot begin or end with a hyphen, "
|
||||
"and must not be longer than 38 characters."
|
||||
% (field.lower(), field.capitalize())
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
" including a number and a lowercase letter"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid team name format. "
|
||||
"Team name must only contain alphanumeric characters, "
|
||||
"single hyphens, underscores, spaces. It can not "
|
||||
"begin or end with a hyphen or a underscore and must"
|
||||
" not be longer than 20 characters."
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name using the following"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
orgname, teamname = value.split(":", 1)
|
||||
validate_orgname(orgname)
|
||||
validate_teamname(teamname)
|
||||
return value
|
||||
@@ -12,218 +12,315 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import getpass
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import dirname, getmtime, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
from platformio import __version__, exception, fs, proc
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_default_projects_dir
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
assert os.path.isdir(projects_dir)
|
||||
return os.path.abspath(projects_dir)
|
||||
import requests
|
||||
from lockfile import LockFailed, LockFile
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio.exception import InvalidSettingName, InvalidSettingValue
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
"check_platformio_interval": {
|
||||
"description": "Check for the new PlatformIO Core interval (days)",
|
||||
"value": 7,
|
||||
"description": "Check for the new PlatformIO interval (days)",
|
||||
"value": 3
|
||||
},
|
||||
"check_prune_system_threshold": {
|
||||
"description": "Check for pruning unnecessary data threshold (megabytes)",
|
||||
"value": 1024,
|
||||
"check_platforms_interval": {
|
||||
"description": "Check for the platform updates interval (days)",
|
||||
"value": 7
|
||||
},
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for HTTP API requests",
|
||||
"value": True,
|
||||
"check_libraries_interval": {
|
||||
"description": "Check for the library updates interval (days)",
|
||||
"value": 7
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description": ("Telemetry service <https://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"value": True,
|
||||
"auto_update_platforms": {
|
||||
"description": "Automatically update platforms (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"auto_update_libraries": {
|
||||
"description": "Automatically update libraries (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"force_verbose": {
|
||||
"description": "Force verbose output when processing environments",
|
||||
"value": False,
|
||||
"value": False
|
||||
},
|
||||
"projects_dir": {
|
||||
"description": "Default location for PlatformIO projects (PlatformIO Home)",
|
||||
"value": get_default_projects_dir(),
|
||||
"validator": projects_dir_validate,
|
||||
"enable_ssl": {
|
||||
"description": "Enable SSL for PlatformIO Services",
|
||||
"value": False
|
||||
},
|
||||
"enable_proxy_strict_ssl": {
|
||||
"description": "Verify the proxy server certificate against the list of supplied CAs",
|
||||
"value": True,
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for API requests and Library Manager",
|
||||
"value": True
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description":
|
||||
("Telemetry service <http://docs.platformio.org/page/"
|
||||
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
||||
"value":
|
||||
True
|
||||
}
|
||||
}
|
||||
|
||||
SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
"pause_telemetry": False,
|
||||
}
|
||||
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
|
||||
|
||||
|
||||
def resolve_state_path(conf_option_dir, file_name, ensure_dir_exists=True):
|
||||
state_dir = ProjectConfig.get_instance().get("platformio", conf_option_dir)
|
||||
if ensure_dir_exists and not os.path.isdir(state_dir):
|
||||
os.makedirs(state_dir)
|
||||
return os.path.join(state_dir, file_name)
|
||||
class State(object):
|
||||
|
||||
|
||||
class State:
|
||||
def __init__(self, path=None, lock=False):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
self.path = resolve_state_path("core_dir", "appstate.json")
|
||||
self._storage = {}
|
||||
self.path = join(util.get_home_dir(), "appstate.json")
|
||||
self._state = {}
|
||||
self._prev_state = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._lock_state_file()
|
||||
if os.path.isfile(self.path):
|
||||
self._storage = fs.load_json(self.path)
|
||||
assert isinstance(self._storage, dict)
|
||||
except (
|
||||
AssertionError,
|
||||
ValueError,
|
||||
UnicodeDecodeError,
|
||||
exception.InvalidJSONFile,
|
||||
):
|
||||
self._storage = {}
|
||||
return self
|
||||
if isfile(self.path):
|
||||
self._state = util.load_json(self.path)
|
||||
except exception.PlatformioException:
|
||||
self._state = {}
|
||||
self._prev_state = deepcopy(self._state)
|
||||
return self._state
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
if self.modified:
|
||||
try:
|
||||
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(json.dumps(self._storage))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(
|
||||
os.path.dirname(self.path)
|
||||
) from exc
|
||||
if self._prev_state != self._state:
|
||||
with open(self.path, "w") as fp:
|
||||
if "dev" in __version__:
|
||||
json.dump(self._state, fp, indent=4)
|
||||
else:
|
||||
json.dump(self._state, fp)
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = LockFile(self.path)
|
||||
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path)) from exc
|
||||
except LockFailed:
|
||||
raise exception.PlatformioException(
|
||||
"The directory `{0}` or its parent directory is not owned by "
|
||||
"the current user and PlatformIO can not store configuration "
|
||||
"data. \nPlease check the permissions and owner of that "
|
||||
"directory. Otherwise, please remove manually `{0}` "
|
||||
"directory and PlatformIO will create new from the current "
|
||||
"user.".format(dirname(self.path)))
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
|
||||
def __del__(self):
|
||||
self._unlock_state_file()
|
||||
|
||||
# Dictionary Proxy
|
||||
class ContentCache(object):
|
||||
|
||||
def as_dict(self):
|
||||
return self._storage
|
||||
def __init__(self, cache_dir=None):
|
||||
self.cache_dir = None
|
||||
self._db_path = None
|
||||
self._lockfile = None
|
||||
|
||||
def keys(self):
|
||||
return self._storage.keys()
|
||||
if not get_setting("enable_cache"):
|
||||
return
|
||||
|
||||
def get(self, key, default=True):
|
||||
return self._storage.get(key, default)
|
||||
self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
|
||||
self._db_path = join(self.cache_dir, "db.data")
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self.modified = True
|
||||
return self._storage.update(*args, **kwargs)
|
||||
def __enter__(self):
|
||||
if not self._db_path or not isfile(self._db_path):
|
||||
return self
|
||||
|
||||
def clear(self):
|
||||
return self._storage.clear()
|
||||
self.delete()
|
||||
return self
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._storage[key]
|
||||
def __exit__(self, type_, value, traceback):
|
||||
pass
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.modified = True
|
||||
self._storage[key] = value
|
||||
def _lock_dbindex(self):
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.modified = True
|
||||
del self._storage[key]
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
return False
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._storage
|
||||
return True
|
||||
|
||||
def _unlock_dbindex(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
return True
|
||||
|
||||
def get_cache_path(self, key):
|
||||
assert len(key) > 3
|
||||
return join(self.cache_dir, key[-2:], key)
|
||||
|
||||
@staticmethod
|
||||
def key_from_args(*args):
|
||||
h = hashlib.md5()
|
||||
for data in args:
|
||||
h.update(str(data))
|
||||
return h.hexdigest()
|
||||
|
||||
def get(self, key):
|
||||
cache_path = self.get_cache_path(key)
|
||||
if not isfile(cache_path):
|
||||
return None
|
||||
with open(cache_path, "rb") as fp:
|
||||
data = fp.read()
|
||||
if data and data[0] in ("{", "["):
|
||||
return json.loads(data)
|
||||
return data
|
||||
|
||||
def set(self, key, data, valid):
|
||||
cache_path = self.get_cache_path(key)
|
||||
if isfile(cache_path):
|
||||
self.delete(key)
|
||||
if not data:
|
||||
return
|
||||
if not isdir(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
||||
assert valid.endswith(tuple(tdmap.keys()))
|
||||
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
||||
|
||||
if not self._lock_dbindex():
|
||||
return False
|
||||
|
||||
if not isdir(dirname(cache_path)):
|
||||
os.makedirs(dirname(cache_path))
|
||||
with open(cache_path, "wb") as fp:
|
||||
if isinstance(data, (dict, list)):
|
||||
json.dump(data, fp)
|
||||
else:
|
||||
fp.write(str(data))
|
||||
with open(self._db_path, "a") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
||||
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
""" Keys=None, delete expired items """
|
||||
if not keys:
|
||||
keys = []
|
||||
if not isinstance(keys, list):
|
||||
keys = [keys]
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
for line in fp.readlines():
|
||||
if "=" not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
expire, path = line.split("=")
|
||||
if time() < int(expire) and isfile(path) and \
|
||||
path not in paths_for_delete:
|
||||
newlines.append(line)
|
||||
continue
|
||||
found = True
|
||||
if isfile(path):
|
||||
try:
|
||||
remove(path)
|
||||
if not listdir(dirname(path)):
|
||||
util.rmtree_(dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, "w") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
return True
|
||||
|
||||
def clean(self):
|
||||
if not self.cache_dir or not isdir(self.cache_dir):
|
||||
return
|
||||
util.rmtree_(self.cache_dir)
|
||||
|
||||
|
||||
def clean_cache():
|
||||
with ContentCache() as cc:
|
||||
cc.clean()
|
||||
|
||||
|
||||
def sanitize_setting(name, value):
|
||||
if name not in DEFAULT_SETTINGS:
|
||||
raise exception.InvalidSettingName(name)
|
||||
raise InvalidSettingName(name)
|
||||
|
||||
defdata = DEFAULT_SETTINGS[name]
|
||||
try:
|
||||
if "validator" in defdata:
|
||||
value = defdata["validator"](value)
|
||||
elif isinstance(defdata["value"], bool):
|
||||
value = defdata['validator']()
|
||||
elif isinstance(defdata['value'], bool):
|
||||
if not isinstance(value, bool):
|
||||
value = str(value).lower() in ("true", "yes", "y", "1")
|
||||
elif isinstance(defdata["value"], int):
|
||||
elif isinstance(defdata['value'], int):
|
||||
value = int(value)
|
||||
except Exception as exc:
|
||||
raise exception.InvalidSettingValue(value, name) from exc
|
||||
except Exception:
|
||||
raise InvalidSettingValue(value, name)
|
||||
return value
|
||||
|
||||
|
||||
def get_state_item(name, default=None):
|
||||
with State() as state:
|
||||
return state.get(name, default)
|
||||
with State() as data:
|
||||
return data.get(name, default)
|
||||
|
||||
|
||||
def set_state_item(name, value):
|
||||
with State(lock=True) as state:
|
||||
state[name] = value
|
||||
state.modified = True
|
||||
with State(lock=True) as data:
|
||||
data[name] = value
|
||||
|
||||
|
||||
def delete_state_item(name):
|
||||
with State(lock=True) as state:
|
||||
if name in state:
|
||||
del state[name]
|
||||
with State(lock=True) as data:
|
||||
if name in data:
|
||||
del data[name]
|
||||
|
||||
|
||||
def get_setting(name):
|
||||
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
|
||||
if _env_name in os.environ:
|
||||
return sanitize_setting(name, os.getenv(_env_name))
|
||||
if _env_name in environ:
|
||||
return sanitize_setting(name, getenv(_env_name))
|
||||
|
||||
with State() as state:
|
||||
if "settings" in state and name in state["settings"]:
|
||||
return state["settings"][name]
|
||||
with State() as data:
|
||||
if "settings" in data and name in data['settings']:
|
||||
return data['settings'][name]
|
||||
|
||||
return DEFAULT_SETTINGS[name]["value"]
|
||||
return DEFAULT_SETTINGS[name]['value']
|
||||
|
||||
|
||||
def set_setting(name, value):
|
||||
with State(lock=True) as state:
|
||||
if "settings" not in state:
|
||||
state["settings"] = {}
|
||||
state["settings"][name] = sanitize_setting(name, value)
|
||||
state.modified = True
|
||||
with State(lock=True) as data:
|
||||
if "settings" not in data:
|
||||
data['settings'] = {}
|
||||
data['settings'][name] = sanitize_setting(name, value)
|
||||
|
||||
|
||||
def reset_settings():
|
||||
with State(lock=True) as state:
|
||||
if "settings" in state:
|
||||
del state["settings"]
|
||||
with State(lock=True) as data:
|
||||
if "settings" in data:
|
||||
del data['settings']
|
||||
|
||||
|
||||
def get_session_var(name, default=None):
|
||||
@@ -236,58 +333,28 @@ def set_session_var(name, value):
|
||||
|
||||
|
||||
def is_disabled_progressbar():
|
||||
return os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
return any([
|
||||
get_session_var("force_option"),
|
||||
util.is_ci(),
|
||||
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
])
|
||||
|
||||
|
||||
def get_cid():
|
||||
cid = get_state_item("cid")
|
||||
if cid:
|
||||
return cid
|
||||
uid = None
|
||||
if os.getenv("GITHUB_USER"):
|
||||
uid = os.getenv("GITHUB_USER")
|
||||
elif os.getenv("GITPOD_GIT_USER_NAME"):
|
||||
uid = os.getenv("GITPOD_GIT_USER_NAME")
|
||||
if not uid:
|
||||
uid = uuid.getnode()
|
||||
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
||||
cid = str(cid)
|
||||
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
if not cid:
|
||||
_uid = None
|
||||
if getenv("C9_UID"):
|
||||
_uid = getenv("C9_UID")
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
_uid = requests.get("{api}/user?token={token}".format(
|
||||
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
|
||||
token=getenv("USER_TOKEN"))).json().get("id")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
cid = str(
|
||||
uuid.UUID(bytes=hashlib.md5(str(_uid if _uid else uuid.getnode()))
|
||||
.digest()))
|
||||
set_state_item("cid", cid)
|
||||
set_state_item("created_at", int(time.time()))
|
||||
return cid
|
||||
|
||||
|
||||
def get_project_id(project_dir):
|
||||
return hashlib.sha1(hashlib_encode_data(project_dir)).hexdigest()
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
data = [
|
||||
"PlatformIO/%s" % __version__,
|
||||
"CI/%d" % int(proc.is_ci()),
|
||||
"Container/%d" % int(proc.is_container()),
|
||||
]
|
||||
if get_session_var("caller_id"):
|
||||
data.append("Caller/%s" % get_session_var("caller_id"))
|
||||
if os.getenv("PLATFORMIO_IDE"):
|
||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
data.append("Python/%s" % platform.python_version())
|
||||
data.append("Platform/%s" % platform.platform())
|
||||
if not get_setting("enable_telemetry"):
|
||||
data.append("Telemetry/0")
|
||||
return " ".join(data)
|
||||
|
||||
|
||||
def get_host_id():
|
||||
h = hashlib.sha1(hashlib_encode_data(get_cid()))
|
||||
try:
|
||||
username = getpass.getuser()
|
||||
h.update(hashlib_encode_data(username))
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def get_host_name():
|
||||
return str(socket.gethostname())[:255]
|
||||
|
||||
@@ -1,487 +0,0 @@
|
||||
{
|
||||
"$id": "https://example.com/library.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "library.json schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"maxLength": 50,
|
||||
"description": "A name of a library.\nMust be unique in the PlatformIO Registry\nShould be slug style for simplicity, consistency, and compatibility. Example: HelloWorld\nCan contain a-z, digits, and dashes (but not start/end with them)\nConsecutive dashes and [:;/,@<>] chars are not allowed.",
|
||||
"required": true
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"description": "A version of a current library source code. Can contain a-z, digits, dots or dash and should be Semantic Versioning compatible.",
|
||||
"required": true
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "The field helps users to identify and search for your library with a brief description. Describe the hardware devices (sensors, boards and etc.) which are suitable with it.",
|
||||
"required": true
|
||||
},
|
||||
"keywords": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Used for search by keyword. Helps to make your library easier to discover without people needing to know its name.\nThe keyword should be lowercased, can contain a-z, digits and dash (but not start/end with them). A list from the keywords can be specified with separator , or declared as Array.",
|
||||
"required": true
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "Home page of a library (if is different from repository url).",
|
||||
"required": false
|
||||
},
|
||||
"repository": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"git",
|
||||
"hg",
|
||||
"svn"
|
||||
],
|
||||
"description": "only “git”, “hg” or “svn” are supported"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"branch": {
|
||||
"type": "string",
|
||||
"description": "if is not specified, default branch will be used. This field will be ignored if tag/release exists with the value of version."
|
||||
}
|
||||
},
|
||||
"description": "The repository in which the source code can be found.",
|
||||
"required": false
|
||||
},
|
||||
"authors": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "An author contact information\nIf authors field is not defined, PlatformIO will try to fetch data from VCS provider (Github, Gitlab, etc) if repository is declared.",
|
||||
"required": false
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "A SPDX license ID or SPDX Expression. You can check the full list of SPDX license IDs (see “Identifier” column).",
|
||||
"required": false
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible frameworks. The available framework names are defined in the Frameworks section.\nIf the library is compatible with the all frameworks, then do not declare this field or you use *",
|
||||
"required": false
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible development platforms. The available platform name are defined in Development Platforms section.\nIf the library is compatible with the all platforms, then do not declare this field or use *.\nPlatformIO does not check platforms for compatibility in default mode. See Compatibility Mode for details. If you need a strict checking for compatible platforms for a library, please set libCompatMode to strict.",
|
||||
"required": false
|
||||
},
|
||||
"headers": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "MyLibrary.h"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "FooCore.h, FooFeature.h"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of header files that can be included in a project source files using #include <...> directive.",
|
||||
"required": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"base": {
|
||||
"type": "string"
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "A list of example patterns.",
|
||||
"required": "false"
|
||||
},
|
||||
"dependencies": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of dependent libraries that will be automatically installed.",
|
||||
"required": false
|
||||
},
|
||||
"export": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Export only files that matched declared patterns.\n* - matches everything\n? - matches any single character\n[seq] - matches any character in seq\n[!seq] - matches any character not in seq"
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Exclude the directories and files which match with exclude patterns."
|
||||
}
|
||||
},
|
||||
"description": "This option is useful if you need to exclude extra data (test code, docs, images, PDFs, etc). It allows one to reduce the size of the final archive.\nTo check which files will be included in the final packages, please use pio pkg pack command.",
|
||||
"required": false
|
||||
},
|
||||
"scripts": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postinstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script AFTER the package has been installed.\nRun a custom Python script located in the package “scripts” folder AFTER the package is installed. Please note that you don’t need to specify a Python interpreter for Python scripts"
|
||||
},
|
||||
"preuninstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script BEFORE the package is removed.\nRun a custom Bash script BEFORE the package is uninstalled. The script is declared as a list of command arguments and is located at the root of a package"
|
||||
}
|
||||
},
|
||||
"description": "Execute custom scripts during the special Package Management CLI life cycle events",
|
||||
"required": false
|
||||
},
|
||||
"build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"flags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Extra flags to control preprocessing, compilation, assembly, and linking processes. More details build_flags.\nKeep in mind when operating with the -I flag (directories to be searched for header files). The path should be relative to the root directory where the library.json manifest is located."
|
||||
},
|
||||
"unflags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Remove base/initial flags which were set by development platform. More details build_unflags."
|
||||
},
|
||||
"includeDir": {
|
||||
"type": "string",
|
||||
"description": "Custom directory to be searched for header files. A default value is include and means that folder is located at the root of a library.\nThe Library Dependency Finder (LDF) will pick a library automatically only when a project or other dependent libraries include any header file located in includeDir or srcDir.",
|
||||
"required": false
|
||||
},
|
||||
"srcDir": {
|
||||
"type": "string",
|
||||
"description": "Custom location of library source code. A default value is src and means that folder is located in the root of a library.",
|
||||
"required": "false"
|
||||
},
|
||||
"srcFilter": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Specify which source files should be included/excluded from build process. The path in filter should be relative to the srcDir option of a library.\nSee syntax for build_src_filter.\nPlease note that you can generate source filter “on-the-fly” using extraScript",
|
||||
"required": false
|
||||
},
|
||||
"extraScript": {
|
||||
"type": "string",
|
||||
"description": "Launch extra script before a build process.",
|
||||
"required": "false"
|
||||
},
|
||||
"libArchive": {
|
||||
"type": "boolean",
|
||||
"description": "Create an archive (*.a, static library) from the object files and link it into a firmware (program). This is default behavior of PlatformIO Build System (\"libArchive\": true).\nSetting \"libArchive\": false will instruct PlatformIO Build System to link object files directly (in-line). This could be useful if you need to override weak symbols defined in framework or other libraries.\nYou can disable library archiving globally using lib_archive option in “platformio.ini” (Project Configuration File).",
|
||||
"required": "false"
|
||||
},
|
||||
"libLDFMode": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"off"
|
||||
],
|
||||
"description": "“Manual mode”, does not process source files of a project and dependencies. Builds only the libraries that are specified in manifests (library.json, module.json) or using lib_deps option."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain"
|
||||
],
|
||||
"description": "[DEFAULT] Parses ALL C/C++ source files of the project and follows only by nested includes (#include ..., chain...) from the libraries. It also parses C, CC, CPP files from libraries which have the same name as included header file. Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep"
|
||||
],
|
||||
"description": "Parses ALL C/C++ source files of the project and parses ALL C/C++ source files of the each found dependency (recursively). Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain+"
|
||||
],
|
||||
"description": "The same behavior as for the chain but evaluates C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep+"
|
||||
],
|
||||
"description": "The same behavior as for the deep but evaluates C/C++ Preprocessor conditional syntax."
|
||||
}
|
||||
],
|
||||
"description": "Specify Library Dependency Finder Mode. See Dependency Finder Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"libCompatMode": {
|
||||
"type": "string",
|
||||
"description": "Specify Library Compatibility Mode. See Compatibility Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"builder": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"PlatformIOLibBuilder"
|
||||
],
|
||||
"description": "Default Builder"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"ArduinoLibBuilder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"MbedLibBuilder"
|
||||
]
|
||||
}
|
||||
],
|
||||
"description": "Override default PlatformIOLibBuilder with another builder.",
|
||||
"required": false
|
||||
}
|
||||
},
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,183 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#####################################################################################
|
||||
#
|
||||
# INSTALLATION
|
||||
#
|
||||
# Please visit > https://docs.platformio.org/en/latest/core/installation/udev-rules.html
|
||||
#
|
||||
#####################################################################################
|
||||
|
||||
#
|
||||
# Boards
|
||||
#
|
||||
|
||||
# CP210X USB UART
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea[67][013]", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="80a9", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# FT231XS USB UART
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6015", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Prolific Technology, Inc. PL2303 Serial Port
|
||||
ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Arduino boards
|
||||
ATTRS{idVendor}=="2341", ATTRS{idProduct}=="[08][023]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="2a03", ATTRS{idProduct}=="[08][02]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Arduino SAM-BA
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="6124", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{MTP_NO_PROBE}="1"
|
||||
|
||||
# Digistump boards
|
||||
ATTRS{idVendor}=="16d0", ATTRS{idProduct}=="0753", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Maple with DFU
|
||||
ATTRS{idVendor}=="1eaf", ATTRS{idProduct}=="000[34]", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# USBtiny
|
||||
ATTRS{idProduct}=="0c9f", ATTRS{idVendor}=="1781", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# USBasp V2.0
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="05dc", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Teensy boards
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789A]?", ENV{MTP_NO_PROBE}="1"
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789ABCD]?", MODE:="0666"
|
||||
KERNEL=="ttyACM*", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", MODE:="0666"
|
||||
|
||||
# TI Stellaris Launchpad
|
||||
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI MSP430 Launchpad
|
||||
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="f432", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# GD32V DFU Bootloader
|
||||
ATTRS{idVendor}=="28e9", ATTRS{idProduct}=="0189", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# FireBeetle-ESP32
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7522", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Wio Terminal
|
||||
ATTRS{idVendor}=="2886", ATTRS{idProduct}=="[08]02d", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raspberry Pi Pico
|
||||
ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
#
|
||||
|
||||
# Black Magic Probe
|
||||
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic GDB Server", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# opendous and estick
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT232/FT245/FT2232/FT232H/FT4232
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="60[01][104]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# DISTORTEC JTAG-lock-pick Tiny 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TUMPA, TUMPA Lite
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a9[89]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# XDS100v2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris Evaluation Board FTDI (several)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd[9a]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# egnite Turtelizer 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Section5 ICEbear
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c14[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Amontec JTAGkey and JTAGkey-tiny
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI ICDI
|
||||
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="c32a", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STLink probes
|
||||
ATTRS{idVendor}=="0483", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Hilscher NXHX Boards
|
||||
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="0028", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Hitex probes
|
||||
ATTRS{idVendor}=="0640", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Altera USB Blaster
|
||||
ATTRS{idVendor}=="09fb", ATTRS{idProduct}=="6001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Amontec JTAGkey-HiSpeed
|
||||
ATTRS{idVendor}=="0fbb", ATTRS{idProduct}=="1000", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# SEGGER J-Link
|
||||
ATTRS{idVendor}=="1366", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raisonance RLink
|
||||
ATTRS{idVendor}=="138e", ATTRS{idProduct}=="9000", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Debug Board for Neo1973
|
||||
ATTRS{idVendor}=="1457", ATTRS{idProduct}=="5118", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Olimex probes
|
||||
ATTRS{idVendor}=="15ba", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# USBprog with OpenOCD firmware
|
||||
ATTRS{idVendor}=="1781", ATTRS{idProduct}=="0c63", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris In-Circuit Debug Interface (ICDI) Board
|
||||
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Marvell Sheevaplug
|
||||
ATTRS{idVendor}=="9e88", ATTRS{idProduct}=="9e8f", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Keil Software, Inc. ULink
|
||||
ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Zephyr framework USB CDC-ACM
|
||||
ATTRS{idVendor}=="2fe3", ATTRS{idProduct}=="0100", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
@@ -12,239 +12,167 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from os import environ
|
||||
from os.path import join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DEFAULT_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AllowSubstExceptions # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import Default # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
|
||||
AllowSubstExceptions, AlwaysBuild,
|
||||
DefaultEnvironment, Variables)
|
||||
|
||||
from platformio import app, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
from platformio import util
|
||||
|
||||
AllowSubstExceptions(NameError)
|
||||
|
||||
# append CLI arguments to build environment
|
||||
clivars = Variables(None)
|
||||
clivars.AddVariables(
|
||||
# allow common variables from INI file
|
||||
commonvars = Variables(None)
|
||||
commonvars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
("BUILD_SCRIPT",),
|
||||
("PROJECT_CONFIG",),
|
||||
("EXTRA_SCRIPTS",),
|
||||
("PIOENV",),
|
||||
("PIOTEST_RUNNING_NAME",),
|
||||
("PIOTEST",),
|
||||
("PIOPLATFORM",),
|
||||
("PIOFRAMEWORK",),
|
||||
|
||||
# build options
|
||||
("BUILD_FLAGS",),
|
||||
("SRC_BUILD_FLAGS",),
|
||||
("BUILD_UNFLAGS",),
|
||||
("SRC_FILTER",),
|
||||
|
||||
# library options
|
||||
("LIB_LDF_MODE",),
|
||||
("LIB_COMPAT_MODE",),
|
||||
("LIB_DEPS",),
|
||||
("LIB_IGNORE",),
|
||||
("LIB_EXTRA_DIRS",),
|
||||
("LIB_ARCHIVE",),
|
||||
|
||||
# board options
|
||||
("BOARD",),
|
||||
("BOARD_MCU",),
|
||||
("BOARD_F_CPU",),
|
||||
("BOARD_F_FLASH",),
|
||||
("BOARD_FLASH_MODE",),
|
||||
|
||||
# upload options
|
||||
("UPLOAD_PORT",),
|
||||
("PROGRAM_ARGS",),
|
||||
)
|
||||
("UPLOAD_PROTOCOL",),
|
||||
("UPLOAD_SPEED",),
|
||||
("UPLOAD_FLAGS",),
|
||||
("UPLOAD_RESETMETHOD",)
|
||||
|
||||
) # yapf: disable
|
||||
|
||||
MULTILINE_VARS = [
|
||||
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
|
||||
"BUILD_UNFLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE", "LIB_EXTRA_DIRS"
|
||||
]
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
tools=[
|
||||
"ar",
|
||||
"cc",
|
||||
"c++",
|
||||
"link",
|
||||
"piohooks",
|
||||
"pioasm",
|
||||
"piobuild",
|
||||
"pioproject",
|
||||
"pioplatform",
|
||||
"piotest",
|
||||
"piotarget",
|
||||
"piolib",
|
||||
"pioupload",
|
||||
"piosize",
|
||||
"pioino",
|
||||
"piomisc",
|
||||
"piointegration",
|
||||
"piomaxlen",
|
||||
],
|
||||
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
|
||||
variables=clivars,
|
||||
# Propagating External Environment
|
||||
ENV=os.environ,
|
||||
UNIX_TIME=int(time()),
|
||||
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
||||
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
||||
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PROGNAME="program",
|
||||
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROG_PATH="$PROGPATH", # deprecated
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
)
|
||||
"ar", "as", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
||||
"piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
||||
], # yapf: disable
|
||||
toolpath=[join(util.get_source_dir(), "builder", "tools")],
|
||||
variables=commonvars,
|
||||
|
||||
# Propagating External Environment
|
||||
PIOVARIABLES=commonvars.keys(),
|
||||
ENV=environ,
|
||||
UNIX_TIME=int(time()),
|
||||
PIOHOME_DIR=util.get_home_dir(),
|
||||
PROJECT_DIR=util.get_project_dir(),
|
||||
PROJECTSRC_DIR=util.get_projectsrc_dir(),
|
||||
PROJECTTEST_DIR=util.get_projecttest_dir(),
|
||||
PROJECTDATA_DIR=util.get_projectdata_dir(),
|
||||
PROJECTPIOENVS_DIR=util.get_projectpioenvs_dir(),
|
||||
BUILD_DIR=join("$PROJECTPIOENVS_DIR", "$PIOENV"),
|
||||
BUILDSRC_DIR=join("$BUILD_DIR", "src"),
|
||||
BUILDTEST_DIR=join("$BUILD_DIR", "test"),
|
||||
LIBSOURCE_DIRS=[
|
||||
util.get_projectlib_dir(),
|
||||
util.get_projectlibdeps_dir(),
|
||||
join("$PIOHOME_DIR", "lib")
|
||||
],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=util.get_pythonexe_path())
|
||||
|
||||
# Declare command verbose messages
|
||||
command_strings = dict(
|
||||
ARCOM="Archiving",
|
||||
LINKCOM="Linking",
|
||||
RANLIBCOM="Indexing",
|
||||
ASCOM="Compiling",
|
||||
ASPPCOM="Compiling",
|
||||
CCCOM="Compiling",
|
||||
CXXCOM="Compiling",
|
||||
)
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
for name, value in command_strings.items():
|
||||
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
|
||||
DEFAULT_ENV_OPTIONS['ARCOMSTR'] = "Archiving $TARGET"
|
||||
DEFAULT_ENV_OPTIONS['LINKCOMSTR'] = "Linking $TARGET"
|
||||
DEFAULT_ENV_OPTIONS['RANLIBCOMSTR'] = "Indexing $TARGET"
|
||||
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
|
||||
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
env.SConscriptChdir(False)
|
||||
|
||||
# Load variables from CLI
|
||||
env.Replace(
|
||||
**{
|
||||
key: PlatformBase.decode_scons_arg(env[key])
|
||||
for key in list(clivars.keys())
|
||||
if key in env
|
||||
}
|
||||
)
|
||||
# decode common variables
|
||||
for k in commonvars.keys():
|
||||
if k in env:
|
||||
env[k] = base64.b64decode(env[k])
|
||||
if k in MULTILINE_VARS:
|
||||
env[k] = util.parse_conf_multi_values(env[k])
|
||||
|
||||
# Setup project optional directories
|
||||
config = env.GetProjectConfig()
|
||||
app.set_session_var("custom_project_conf", config.path)
|
||||
|
||||
env.Replace(
|
||||
PROJECT_DIR=get_project_dir(),
|
||||
PROJECT_CORE_DIR=config.get("platformio", "core_dir"),
|
||||
PROJECT_PACKAGES_DIR=config.get("platformio", "packages_dir"),
|
||||
PROJECT_WORKSPACE_DIR=config.get("platformio", "workspace_dir"),
|
||||
PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"),
|
||||
PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"),
|
||||
PROJECT_SRC_DIR=config.get("platformio", "src_dir"),
|
||||
PROJECTSRC_DIR="$PROJECT_SRC_DIR", # legacy for dev/platform
|
||||
PROJECT_TEST_DIR=config.get("platformio", "test_dir"),
|
||||
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
|
||||
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
|
||||
BUILD_TYPE=env.GetBuildType(),
|
||||
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
|
||||
LIBSOURCE_DIRS=[
|
||||
config.get("platformio", "lib_dir"),
|
||||
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||
config.get("platformio", "globallib_dir"),
|
||||
],
|
||||
)
|
||||
|
||||
if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConsignFile(
|
||||
os.path.join(
|
||||
"$BUILD_CACHE_DIR" if env.subst("$BUILD_CACHE_DIR") else "$BUILD_DIR",
|
||||
".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]),
|
||||
)
|
||||
)
|
||||
|
||||
env.SConscript(env.GetExtraScripts("pre"), exports="env")
|
||||
|
||||
if env.IsCleanTarget():
|
||||
env.CleanProject(fullclean=int(ARGUMENTS.get("FULLCLEAN", 0)))
|
||||
if env.GetOption('clean'):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
env.Exit(0)
|
||||
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print "Verbose mode can be enabled via `-v, --verbose` option"
|
||||
|
||||
# Handle custom variables from system environment
|
||||
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS",
|
||||
"UPLOAD_PORT", "UPLOAD_FLAGS", "LIB_EXTRA_DIRS"):
|
||||
k = "PLATFORMIO_%s" % var
|
||||
if k not in environ:
|
||||
continue
|
||||
if var in ("UPLOAD_PORT", ):
|
||||
env[var] = environ.get(k)
|
||||
continue
|
||||
env.Append(**{var: util.parse_conf_multi_values(environ.get(k))})
|
||||
|
||||
# Configure extra library source directories for LDF
|
||||
if util.get_project_optional_dir("lib_extra_dirs"):
|
||||
env.Prepend(LIBSOURCE_DIRS=util.parse_conf_multi_values(
|
||||
util.get_project_optional_dir("lib_extra_dirs")))
|
||||
env.Prepend(LIBSOURCE_DIRS=env.get("LIB_EXTRA_DIRS", []))
|
||||
|
||||
env.LoadPioPlatform(commonvars)
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(join("$PROJECTPIOENVS_DIR", ".sconsign.dblite"))
|
||||
|
||||
for item in env.GetPreExtraScripts():
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS + ["size"]))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
|
||||
|
||||
if "UPLOAD_FLAGS" in env:
|
||||
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
if env.GetProjectOption("upload_command"):
|
||||
env.Replace(UPLOADCMD=env.GetProjectOption("upload_command"))
|
||||
env.Append(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
|
||||
env.SConscript(env.GetExtraScripts("post"), exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and not (
|
||||
set(["nobuild", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
):
|
||||
env.Depends("upload", "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
Default(_new_targets)
|
||||
Default("checkprogsize")
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Alias("compiledb", env.CompilationDatabase("$COMPILATIONDB_PATH"))
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
"upload",
|
||||
env.VerboseAction(
|
||||
lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol...",
|
||||
),
|
||||
)
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
env.ProcessDelayedActions()
|
||||
|
||||
##############################################################################
|
||||
for item in env.GetPostExtraScripts():
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
click.echo(env.Dump())
|
||||
print env.Dump()
|
||||
env.Exit(0)
|
||||
|
||||
if env.IsIntegrationDump():
|
||||
projenv = None
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
try:
|
||||
Import("projenv")
|
||||
except: # pylint: disable=bare-except
|
||||
projenv = env
|
||||
data = projenv.DumpIntegrationData(env)
|
||||
# dump to file for the further reading by project.helpers.load_build_metadata
|
||||
with open(
|
||||
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
|
||||
mode="w",
|
||||
encoding="utf8",
|
||||
) as fp:
|
||||
json.dump(data, fp)
|
||||
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
|
||||
env.Exit(0)
|
||||
|
||||
if "sizedata" in COMMAND_LINE_TARGETS:
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"sizedata",
|
||||
DEFAULT_TARGETS,
|
||||
env.VerboseAction(env.DumpSizeData, "Generating memory usage report..."),
|
||||
)
|
||||
)
|
||||
|
||||
Default("sizedata")
|
||||
|
||||
# issue #4604: process targets sequentially
|
||||
for index, target in enumerate(
|
||||
[t for t in COMMAND_LINE_TARGETS if not t.startswith("__")][1:]
|
||||
):
|
||||
env.Depends(target, COMMAND_LINE_TARGETS[index])
|
||||
print "\n%s\n" % json.dumps(env.DumpIDEData())
|
||||
env.Exit(0)
|
||||
except UnicodeDecodeError:
|
||||
sys.stderr.write(
|
||||
"\nUnicodeDecodeError: Non-ASCII characters found in build "
|
||||
"environment\n"
|
||||
"See explanation in FAQ > Troubleshooting > Building\n"
|
||||
"http://docs.platformio.org/page/faq.html\n\n")
|
||||
env.Exit(1)
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import SCons.Tool.asm # pylint: disable=import-error
|
||||
|
||||
#
|
||||
# Resolve https://github.com/platformio/platformio-core/issues/3917
|
||||
# Avoid forcing .S to bare assembly on Windows OS
|
||||
#
|
||||
|
||||
if ".S" in SCons.Tool.asm.ASSuffixes:
|
||||
SCons.Tool.asm.ASSuffixes.remove(".S")
|
||||
if ".S" not in SCons.Tool.asm.ASPPSuffixes:
|
||||
SCons.Tool.asm.ASPPSuffixes.append(".S")
|
||||
|
||||
|
||||
generate = SCons.Tool.asm.generate
|
||||
exists = SCons.Tool.asm.exists
|
||||
@@ -1,402 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import sys
|
||||
|
||||
from SCons import Builder, Util # pylint: disable=import-error
|
||||
from SCons.Node import FS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import IS_MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_C_EXT = ["c"]
|
||||
SRC_CXX_EXT = ["cc", "cpp", "cxx", "c++"]
|
||||
SRC_BUILD_EXT = SRC_C_EXT + SRC_CXX_EXT + SRC_ASM_EXT
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % os.sep, "-<.svn%s>" % os.sep]
|
||||
|
||||
|
||||
def scons_patched_match_splitext(path, suffixes=None):
|
||||
"""Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
|
||||
tokens = Util.splitext(path)
|
||||
if suffixes and tokens[1] and tokens[1] in suffixes:
|
||||
return (path, tokens[1])
|
||||
return tokens
|
||||
|
||||
|
||||
def GetBuildType(env):
|
||||
modes = []
|
||||
if (
|
||||
set(["__debug", "sizedata"]) # sizedata = for memory inspection
|
||||
& set(COMMAND_LINE_TARGETS)
|
||||
or env.GetProjectOption("build_type") == "debug"
|
||||
):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
if env.get("LDSCRIPT_PATH") and not any("-Wl,-T" in f for f in env["LINKFLAGS"]):
|
||||
env.Prepend(LINKFLAGS=["-T", env.subst("$LDSCRIPT_PATH")])
|
||||
|
||||
# enable "cyclic reference" for linker
|
||||
if (
|
||||
env.get("LIBS")
|
||||
and env.GetCompilerType() == "gcc"
|
||||
and (env.PioPlatform().is_embedded() or not IS_MACOS)
|
||||
):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
program = env.Program(env.subst("$PROGPATH"), env["PIOBUILDFILES"])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize",
|
||||
program,
|
||||
env.VerboseAction(env.CheckUploadSize, "Checking size $PIOMAINPROG"),
|
||||
)
|
||||
)
|
||||
|
||||
print("Building in %s mode" % env["BUILD_TYPE"])
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ProcessProgramDeps(env):
|
||||
def _append_pio_macros():
|
||||
core_version = pepver_to_semver(__version__)
|
||||
env.AppendUnique(
|
||||
CPPDEFINES=[
|
||||
(
|
||||
"PLATFORMIO",
|
||||
int(
|
||||
"{0:02d}{1:02d}{2:02d}".format(
|
||||
core_version.major, core_version.minor, core_version.patch
|
||||
)
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
_append_pio_macros()
|
||||
|
||||
env.PrintConfiguration()
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
|
||||
# apply user flags
|
||||
env.ProcessFlags(env.get("BUILD_FLAGS"))
|
||||
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
if "debug" in env["BUILD_TYPE"]:
|
||||
env.ConfigureDebugTarget()
|
||||
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
plb = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=plb.build())
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: plb.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if plb.env.get(key)
|
||||
}
|
||||
)
|
||||
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
build_files_before_nums = len(env.get("PIOBUILDFILES", []))
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
|
||||
)
|
||||
if len(env.get("PIOBUILDFILES", [])) - build_files_before_nums < 1:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your test suites "
|
||||
"to the '%s' folder\n" % env.subst("$PROJECT_TEST_DIR")
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
if "test" not in env["BUILD_TYPE"] or env.GetProjectOption("test_build_src"):
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
|
||||
)
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to the '%s' folder\n" % env.subst("$PROJECT_SRC_DIR")
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
flags = [flags]
|
||||
result = {}
|
||||
for raw in flags:
|
||||
for key, value in env.ParseFlags(str(raw)).items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
result[key].extend(value)
|
||||
|
||||
cppdefines = []
|
||||
for item in result["CPPDEFINES"]:
|
||||
if not Util.is_Sequence(item):
|
||||
cppdefines.append(item)
|
||||
continue
|
||||
name, value = item[:2]
|
||||
if '"' in value:
|
||||
value = value.replace('"', '\\"')
|
||||
elif value.isdigit():
|
||||
value = int(value)
|
||||
elif value.replace(".", "", 1).isdigit():
|
||||
value = float(value)
|
||||
cppdefines.append((name, value))
|
||||
result["CPPDEFINES"] = cppdefines
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
p = env.subst(p)
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result["CCFLAGS"][i] = (f[0], env.subst(f[1].get_path()))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
if not flags:
|
||||
return
|
||||
env.Append(**env.ParseFlagsExtended(flags))
|
||||
|
||||
# Cancel any previous definition of name, either built in or
|
||||
# provided with a -U option // Issue #191
|
||||
undefines = [
|
||||
u
|
||||
for u in env.get("CCFLAGS", [])
|
||||
if isinstance(u, string_types) and u.startswith("-U")
|
||||
]
|
||||
if undefines:
|
||||
for undef in undefines:
|
||||
env["CCFLAGS"].remove(undef)
|
||||
if undef[2:] in env["CPPDEFINES"]:
|
||||
env["CPPDEFINES"].remove(undef[2:])
|
||||
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
|
||||
|
||||
|
||||
def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
unflag_scopes = tuple(set(["ASPPFLAGS"] + list(parsed.keys())))
|
||||
for scope in unflag_scopes:
|
||||
for unflags in parsed.values():
|
||||
for unflag in unflags:
|
||||
for current in list(env.get(scope, [])):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
not isinstance(unflag, (tuple, list))
|
||||
and isinstance(current, (tuple, list))
|
||||
and unflag == current[0],
|
||||
]
|
||||
if any(conditions):
|
||||
env[scope].remove(current)
|
||||
|
||||
|
||||
def StringifyMacro(env, value): # pylint: disable=unused-argument
|
||||
return '\\"%s\\"' % value.replace('"', '\\\\\\"')
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None, src_exts=None):
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
src_exts = src_exts or (SRC_BUILD_EXT + SRC_HEADER_EXT)
|
||||
return fs.match_src_files(env.subst(src_dir), src_filter, src_exts)
|
||||
|
||||
|
||||
def CollectBuildFiles(
|
||||
env, variant_dir, src_dir, src_filter=None, duplicate=False
|
||||
): # pylint: disable=too-many-locals
|
||||
sources = []
|
||||
variants = []
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
if src_dir.endswith(os.sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter, SRC_BUILD_EXT):
|
||||
_reldir = os.path.dirname(item)
|
||||
_src_dir = os.path.join(src_dir, _reldir) if _reldir else src_dir
|
||||
_var_dir = os.path.join(variant_dir, _reldir) if _reldir else variant_dir
|
||||
|
||||
if _var_dir not in variants:
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
|
||||
|
||||
middlewares = env.get("__PIO_BUILD_MIDDLEWARES")
|
||||
if not middlewares:
|
||||
return sources
|
||||
|
||||
new_sources = []
|
||||
for node in sources:
|
||||
new_node = node
|
||||
for callback, pattern in middlewares:
|
||||
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
|
||||
continue
|
||||
if callback.__code__.co_argcount == 2:
|
||||
new_node = callback(env, new_node)
|
||||
else:
|
||||
new_node = callback(new_node)
|
||||
if not new_node:
|
||||
break
|
||||
if new_node:
|
||||
new_sources.append(new_node)
|
||||
|
||||
return new_sources
|
||||
|
||||
|
||||
def AddBuildMiddleware(env, callback, pattern=None):
|
||||
env.Append(__PIO_BUILD_MIDDLEWARES=[(callback, pattern)])
|
||||
|
||||
|
||||
def BuildFrameworks(env, frameworks):
|
||||
if not frameworks:
|
||||
return
|
||||
|
||||
if "BOARD" not in env:
|
||||
sys.stderr.write(
|
||||
"Please specify `board` in `platformio.ini` to use "
|
||||
"with '%s' framework\n" % ", ".join(frameworks)
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
supported_frameworks = env.BoardConfig().get("frameworks", [])
|
||||
for name in frameworks:
|
||||
if name == "arduino":
|
||||
# Arduino IDE appends .o to the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if name in supported_frameworks:
|
||||
SConscript(env.GetFrameworkScript(name), exports="env")
|
||||
else:
|
||||
sys.stderr.write("Error: This board doesn't support %s framework!\n" % name)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
return env.StaticLibrary(env.subst(variant_dir), nodes)
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
if env.get("PIOMAINPROG"):
|
||||
sys.stderr.write(
|
||||
"Error: The main program is already constructed and the inline "
|
||||
"source files are not allowed. Please use `env.BuildLibrary(...)` "
|
||||
"or PRE-type script instead."
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[
|
||||
env.Object(node) if isinstance(node, FS.File) else node for node in nodes
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetBuildType)
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessProgramDeps)
|
||||
env.AddMethod(ProcessCompileDbToolchainOption)
|
||||
env.AddMethod(ProcessProjectDeps)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(StringifyMacro)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(AddBuildMiddleware)
|
||||
env.AddMethod(BuildFrameworks)
|
||||
env.AddMethod(BuildLibrary)
|
||||
env.AddMethod(BuildSources)
|
||||
return env
|
||||
@@ -1,50 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
def AddActionWrapper(handler):
|
||||
def wraps(env, files, action):
|
||||
if not isinstance(files, (list, tuple, set)):
|
||||
files = [files]
|
||||
known_nodes = []
|
||||
unknown_files = []
|
||||
for item in files:
|
||||
nodes = env.arg2nodes(item, env.fs.Entry)
|
||||
if nodes and nodes[0].exists():
|
||||
known_nodes.extend(nodes)
|
||||
else:
|
||||
unknown_files.append(item)
|
||||
if unknown_files:
|
||||
env.Append(**{"_PIO_DELAYED_ACTIONS": [(handler, unknown_files, action)]})
|
||||
if known_nodes:
|
||||
return handler(known_nodes, action)
|
||||
return []
|
||||
|
||||
return wraps
|
||||
|
||||
|
||||
def ProcessDelayedActions(env):
|
||||
for func, nodes, action in env.get("_PIO_DELAYED_ACTIONS", []):
|
||||
func(nodes, action)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.Replace(**{"_PIO_DELAYED_ACTIONS": []})
|
||||
env.AddMethod(AddActionWrapper(env.AddPreAction), "AddPreAction")
|
||||
env.AddMethod(AddActionWrapper(env.AddPostAction), "AddPostAction")
|
||||
env.AddMethod(ProcessDelayedActions)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
120
platformio/builder/tools/pioide.py
Normal file
120
platformio/builder/tools/pioide.py
Normal file
@@ -0,0 +1,120 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from glob import glob
|
||||
from os.path import join
|
||||
|
||||
from SCons.Defaults import processDefines
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
|
||||
|
||||
def dump_includes(env):
|
||||
includes = []
|
||||
|
||||
for item in env.get("CPPPATH", []):
|
||||
includes.append(env.subst(item))
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
includes.extend(lb.get_inc_dirs())
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
for name in p.get_installed_packages():
|
||||
if p.get_package_type(name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = util.glob_escape(p.get_package_dir(name))
|
||||
toolchain_incglobs = [
|
||||
join(toolchain_dir, "*", "include*"),
|
||||
join(toolchain_dir, "lib", "gcc", "*", "*", "include*")
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes.extend(glob(g))
|
||||
|
||||
unity_dir = get_core_package_dir("tool-unity")
|
||||
if unity_dir:
|
||||
includes.append(unity_dir)
|
||||
|
||||
return includes
|
||||
|
||||
|
||||
def dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
defines.append(env.subst(item).replace('\\', ''))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env['PIOPLATFORM'] == "atmelavr":
|
||||
defines.append(
|
||||
"__AVR_%s__" % env.BoardConfig().get("build.mcu").upper()
|
||||
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny"))
|
||||
return defines
|
||||
|
||||
|
||||
def DumpIDEData(env):
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
|
||||
data = {
|
||||
"libsource_dirs":
|
||||
[env.subst(l) for l in env.get("LIBSOURCE_DIRS", [])],
|
||||
"defines":
|
||||
dump_defines(env),
|
||||
"includes":
|
||||
dump_includes(env),
|
||||
"cc_flags":
|
||||
env.subst(LINTCCOM),
|
||||
"cxx_flags":
|
||||
env.subst(LINTCXXCOM),
|
||||
"cc_path":
|
||||
util.where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path":
|
||||
util.where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path":
|
||||
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path":
|
||||
env.subst("$PROG_PATH")
|
||||
}
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
data.update({
|
||||
"cc_flags": env_.subst(LINTCCOM),
|
||||
"cxx_flags": env_.subst(LINTCXXCOM)
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(DumpIDEData)
|
||||
return env
|
||||
@@ -1,256 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import atexit
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
|
||||
|
||||
class InoToCPPConverter:
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""",
|
||||
re.X | re.M | re.I,
|
||||
)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self._main_ino = None
|
||||
self._safe_encoding = None
|
||||
|
||||
def read_safe_contents(self, path):
|
||||
error_reported = False
|
||||
for encoding in (
|
||||
"utf-8",
|
||||
None,
|
||||
get_filesystem_encoding(),
|
||||
get_locale_encoding(),
|
||||
"latin-1",
|
||||
):
|
||||
try:
|
||||
with io.open(path, encoding=encoding) as fp:
|
||||
contents = fp.read()
|
||||
self._safe_encoding = encoding
|
||||
return contents
|
||||
except UnicodeDecodeError:
|
||||
if not error_reported:
|
||||
error_reported = True
|
||||
click.secho(
|
||||
"Unicode decode error has occurred, please remove invalid "
|
||||
"(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8"
|
||||
% path,
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
return ""
|
||||
|
||||
def write_safe_contents(self, path, contents):
|
||||
with io.open(
|
||||
path, "w", encoding=self._safe_encoding, errors="backslashreplace"
|
||||
) as fp:
|
||||
return fp.write(contents)
|
||||
|
||||
def is_main_node(self, contents):
|
||||
return self.DETECTMAIN_RE.search(contents)
|
||||
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return None
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = self.read_safe_contents(node.get_path())
|
||||
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
self._main_ino = node.get_path()
|
||||
else:
|
||||
lines.extend(_lines)
|
||||
|
||||
if not self._main_ino:
|
||||
self._main_ino = nodes[0].get_path()
|
||||
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = re.sub(r"[\"\'\;]+", "", self._main_ino) + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = self.read_safe_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
self.write_safe_contents(out_file, self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = tempfile.mkstemp()[1]
|
||||
self.write_safe_contents(tmp_path, contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file, tmp_path
|
||||
),
|
||||
"Converting " + os.path.basename(out_file[:-4]),
|
||||
)
|
||||
)
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return os.path.isfile(out_file)
|
||||
|
||||
def _join_multiline_strings(self, contents):
|
||||
if "\\\n" not in contents:
|
||||
return contents
|
||||
newlines = []
|
||||
linenum = 0
|
||||
stropen = False
|
||||
for line in contents.split("\n"):
|
||||
_linenum = self._parse_preproc_line_num(line)
|
||||
if _linenum is not None:
|
||||
linenum = _linenum
|
||||
else:
|
||||
linenum += 1
|
||||
|
||||
if line.endswith("\\"):
|
||||
if line.startswith('"'):
|
||||
stropen = True
|
||||
newlines.append(line[:-1])
|
||||
continue
|
||||
if stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append(
|
||||
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
|
||||
)
|
||||
continue
|
||||
|
||||
newlines.append(line)
|
||||
|
||||
return "\n".join(newlines)
|
||||
|
||||
@staticmethod
|
||||
def _parse_preproc_line_num(line):
|
||||
if not line.startswith("#"):
|
||||
return None
|
||||
tokens = line.split(" ", 3)
|
||||
if len(tokens) > 2 and tokens[1].isdigit():
|
||||
return int(tokens[1])
|
||||
return None
|
||||
|
||||
def _parse_prototypes(self, contents):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (
|
||||
set([match.group(2).strip(), match.group(3).strip()])
|
||||
& reserved_keywords
|
||||
):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
|
||||
def _get_total_lines(self, contents):
|
||||
total = 0
|
||||
if contents.endswith("\n"):
|
||||
contents = contents[:-1]
|
||||
for line in contents.split("\n")[::-1]:
|
||||
linenum = self._parse_preproc_line_num(line)
|
||||
if linenum is not None:
|
||||
return total + linenum
|
||||
total += 1
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents) or []
|
||||
|
||||
# skip already declared prototypes
|
||||
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
|
||||
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos],
|
||||
re.M,
|
||||
)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append(
|
||||
'#line %d "%s"'
|
||||
% (
|
||||
self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/"),
|
||||
)
|
||||
)
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def FindInoNodes(env):
|
||||
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
|
||||
return env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
||||
os.path.join(src_dir, "*.pde")
|
||||
)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
ino_nodes = env.FindInoNodes()
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
out_file = c.convert(ino_nodes)
|
||||
|
||||
atexit.register(_delete_file, out_file)
|
||||
|
||||
|
||||
def _delete_file(path):
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(FindInoNodes)
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
@@ -1,188 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
import SCons.Defaults # pylint: disable=import-error
|
||||
import SCons.Subst # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
|
||||
from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def IsIntegrationDump(_):
|
||||
return set(["__idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def DumpIntegrationIncludes(env):
|
||||
result = dict(build=[], compatlib=[], toolchain=[])
|
||||
|
||||
# `env`(project) CPPPATH
|
||||
result["build"].extend(
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
result["compatlib"].extend(
|
||||
[os.path.abspath(inc) for inc in lb.get_include_dirs()]
|
||||
)
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
for pkg in p.get_installed_packages(with_optional=False):
|
||||
if p.get_package_type(pkg.metadata.name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = glob.escape(pkg.path)
|
||||
toolchain_incglobs = [
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
os.path.join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
|
||||
os.path.join(toolchain_dir, "*", "include*"),
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
result["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_gcc_defines(env):
|
||||
items = []
|
||||
try:
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command(
|
||||
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True
|
||||
)
|
||||
except OSError:
|
||||
return items
|
||||
if result["returncode"] != 0:
|
||||
return items
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
items.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
items.append(tokens[1])
|
||||
return items
|
||||
|
||||
|
||||
def dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace('\\"', '"'))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
board_mcu = env.get("BOARD_MCU")
|
||||
if not board_mcu and "BOARD" in env:
|
||||
board_mcu = env.BoardConfig().get("build.mcu")
|
||||
if board_mcu:
|
||||
defines.append(
|
||||
str(
|
||||
"__AVR_%s__"
|
||||
% board_mcu.upper()
|
||||
.replace("ATMEGA", "ATmega")
|
||||
.replace("ATTINY", "ATtiny")
|
||||
)
|
||||
)
|
||||
|
||||
# built-in GCC marcos
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
# defines.extend(get_gcc_defines(env))
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def dump_svd_path(env):
|
||||
svd_path = env.GetProjectOption("debug_svd_path")
|
||||
if svd_path:
|
||||
return os.path.abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
try:
|
||||
svd_path = env.BoardConfig().get("debug.svd_path")
|
||||
assert svd_path
|
||||
except (AssertionError, KeyError):
|
||||
return None
|
||||
# custom path to SVD file
|
||||
if os.path.isfile(svd_path):
|
||||
return svd_path
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return os.path.abspath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
def _split_flags_string(env, s):
|
||||
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
|
||||
return [str(arg) for arg in args]
|
||||
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
|
||||
data = {
|
||||
"build_type": globalenv.GetBuildType(),
|
||||
"env_name": globalenv["PIOENV"],
|
||||
"libsource_dirs": [
|
||||
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"cxx_path": where_is_program(
|
||||
globalenv.subst("$CXX"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"gdb_path": where_is_program(
|
||||
globalenv.subst("$GDB"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"prog_path": globalenv.subst("$PROGPATH"),
|
||||
"svd_path": dump_svd_path(globalenv),
|
||||
"compiler_type": globalenv.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": globalenv.subst(item[1])}
|
||||
for item in globalenv.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
for key in ("IDE_EXTRA_DATA", "INTEGRATION_EXTRA_DATA"):
|
||||
data["extra"].update(globalenv.get(key, {}))
|
||||
return data
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env["IDE_EXTRA_DATA"] = {} # legacy support
|
||||
env["INTEGRATION_EXTRA_DATA"] = {}
|
||||
env.AddMethod(IsIntegrationDump)
|
||||
env.AddMethod(DumpIntegrationIncludes)
|
||||
env.AddMethod(DumpIntegrationData)
|
||||
return env
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,96 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
||||
def tempfile_arg_esc_func(arg):
|
||||
arg = quote_spaces(arg)
|
||||
if not IS_WINDOWS:
|
||||
return arg
|
||||
# GCC requires double Windows slashes, let's use UNIX separator
|
||||
return WINPATHSEP_RE.sub(r"/\1", arg)
|
||||
|
||||
|
||||
def long_sources_hook(env, sources):
|
||||
_sources = str(sources).replace("\\", "/")
|
||||
if len(str(_sources)) < MAX_LINE_LENGTH:
|
||||
return sources
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _sources.split(".o "):
|
||||
line = line.strip()
|
||||
if not line.endswith(".o"):
|
||||
line += ".o"
|
||||
data.append('"%s"' % line)
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
tmp_file = os.path.join(
|
||||
build_dir, "longcmd-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
|
||||
)
|
||||
if os.path.isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
def exists(env):
|
||||
return "compiledb" not in COMMAND_LINE_TARGETS and not env.IsIntegrationDump()
|
||||
|
||||
|
||||
def generate(env):
|
||||
if not exists(env):
|
||||
return env
|
||||
kwargs = dict(
|
||||
_long_sources_hook=long_sources_hook,
|
||||
TEMPFILE=TempFileMunge,
|
||||
MAXLINELENGTH=MAX_LINE_LENGTH,
|
||||
TEMPFILEARGESCFUNC=tempfile_arg_esc_func,
|
||||
TEMPFILESUFFIX=".tmp",
|
||||
TEMPFILEDIR="$BUILD_DIR",
|
||||
)
|
||||
|
||||
for name in ("LINKCOM", "ASCOM", "ASPPCOM", "CCCOM", "CXXCOM"):
|
||||
kwargs[name] = "${TEMPFILE('%s','$%sSTR')}" % (env.get(name), name)
|
||||
|
||||
kwargs["ARCOM"] = env.get("ARCOM", "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
|
||||
)
|
||||
env.Replace(**kwargs)
|
||||
|
||||
return env
|
||||
@@ -12,75 +12,237 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import re
|
||||
import sys
|
||||
from os import environ, remove, walk
|
||||
from os.path import basename, isdir, isfile, join, relpath, sep
|
||||
from tempfile import mkstemp
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.proc import exec_command
|
||||
from SCons.Action import Action
|
||||
from SCons.Script import ARGUMENTS
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def GetCompilerType(env): # pylint: disable=too-many-return-statements
|
||||
CC = env.subst("$CC")
|
||||
if CC.endswith("-gcc"):
|
||||
return "gcc"
|
||||
if os.path.basename(CC) == "clang":
|
||||
return "clang"
|
||||
class InoToCPPConverter(object):
|
||||
|
||||
PROTOTYPE_RE = re.compile(r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*\{ # must end with {
|
||||
""", re.X | re.M | re.I)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self._main_ino = None
|
||||
|
||||
def is_main_node(self, contents):
|
||||
return self.DETECTMAIN_RE.search(contents)
|
||||
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = node.get_text_contents()
|
||||
_lines = [
|
||||
'# 1 "%s"' % node.get_path().replace("\\", "/"), contents
|
||||
]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
self._main_ino = node.get_path()
|
||||
else:
|
||||
lines.extend(_lines)
|
||||
|
||||
if not self._main_ino:
|
||||
self._main_ino = nodes[0].get_path()
|
||||
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
with open(out_file) as fp:
|
||||
contents = fp.read()
|
||||
contents = self._join_multiline_strings(contents)
|
||||
with open(out_file, "w") as fp:
|
||||
fp.write(self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = mkstemp()[1]
|
||||
with open(tmp_path, "w") as fp:
|
||||
fp.write(contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file,
|
||||
tmp_path), "Converting " + basename(out_file[:-4])))
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return isfile(out_file)
|
||||
|
||||
def _join_multiline_strings(self, contents):
|
||||
if "\\\n" not in contents:
|
||||
return contents
|
||||
newlines = []
|
||||
linenum = 0
|
||||
stropen = False
|
||||
for line in contents.split("\n"):
|
||||
_linenum = self._parse_preproc_line_num(line)
|
||||
if _linenum is not None:
|
||||
linenum = _linenum
|
||||
else:
|
||||
linenum += 1
|
||||
|
||||
if line.endswith("\\"):
|
||||
if line.startswith('"'):
|
||||
stropen = True
|
||||
newlines.append(line[:-1])
|
||||
continue
|
||||
elif stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append('#line %d "%s"' %
|
||||
(linenum, self._main_ino.replace("\\", "/")))
|
||||
continue
|
||||
|
||||
newlines.append(line)
|
||||
|
||||
return "\n".join(newlines)
|
||||
|
||||
@staticmethod
|
||||
def _parse_preproc_line_num(line):
|
||||
if not line.startswith("#"):
|
||||
return None
|
||||
tokens = line.split(" ", 3)
|
||||
if len(tokens) > 2 and tokens[1].isdigit():
|
||||
return int(tokens[1])
|
||||
return None
|
||||
|
||||
def _parse_prototypes(self, contents):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (set([match.group(2).strip(),
|
||||
match.group(3).strip()]) & reserved_keywords):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
|
||||
def _get_total_lines(self, contents):
|
||||
total = 0
|
||||
if contents.endswith("\n"):
|
||||
contents = contents[:-1]
|
||||
for line in contents.split("\n")[::-1]:
|
||||
linenum = self._parse_preproc_line_num(line)
|
||||
if linenum is not None:
|
||||
return total + linenum
|
||||
total += 1
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents)
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set([m.group(3).strip() for m in prototypes])
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(self.PROTOPTRS_TPLRE %
|
||||
("|".join(prototype_names)),
|
||||
contents[:split_pos], re.M)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append('#line %d "%s"' %
|
||||
(self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/")))
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
src_dir = util.glob_escape(env.subst("$PROJECTSRC_DIR"))
|
||||
ino_nodes = (
|
||||
env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde")))
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
out_file = c.convert(ino_nodes)
|
||||
|
||||
atexit.register(_delete_file, out_file)
|
||||
|
||||
|
||||
def _delete_file(path):
|
||||
try:
|
||||
if isfile(path):
|
||||
remove(path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command([CC, "-v"], env=sysenv)
|
||||
|
||||
def GetCompilerType(env):
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result["returncode"] != 0:
|
||||
if result['returncode'] != 0:
|
||||
return None
|
||||
output = "".join([result["out"], result["err"]]).lower()
|
||||
if "clang version" in output:
|
||||
output = "".join([result['out'], result['err']]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
return "clang"
|
||||
if "gcc" in output:
|
||||
elif "gcc" in output:
|
||||
return "gcc"
|
||||
return None
|
||||
|
||||
|
||||
def GetActualLDScript(env):
|
||||
|
||||
def _lookup_in_ldpath(script):
|
||||
for d in env.get("LIBPATH", []):
|
||||
path = os.path.join(env.subst(d), script)
|
||||
if os.path.isfile(path):
|
||||
path = join(env.subst(d), script)
|
||||
if isfile(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
script = None
|
||||
script_in_next = False
|
||||
for f in env.get("LINKFLAGS", []):
|
||||
raw_script = None
|
||||
if f == "-T":
|
||||
script_in_next = True
|
||||
continue
|
||||
if script_in_next:
|
||||
script_in_next = False
|
||||
raw_script = f
|
||||
elif f.startswith("-Wl,-T"):
|
||||
raw_script = f[6:]
|
||||
else:
|
||||
continue
|
||||
script = env.subst(raw_script.replace('"', "").strip())
|
||||
if os.path.isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
return path
|
||||
if f.startswith("-Wl,-T"):
|
||||
script = env.subst(f[6:].replace('"', "").strip())
|
||||
if isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
return path
|
||||
|
||||
if script:
|
||||
sys.stderr.write(
|
||||
"Error: Could not find '%s' LD script in LDPATH '%s'\n"
|
||||
% (script, env.subst("$LIBPATH"))
|
||||
)
|
||||
"Error: Could not find '%s' LD script in LDPATH '%s'\n" %
|
||||
(script, env.subst("$LIBPATH")))
|
||||
env.Exit(1)
|
||||
|
||||
if not script and "LDSCRIPT_PATH" in env:
|
||||
path = _lookup_in_ldpath(env["LDSCRIPT_PATH"])
|
||||
path = _lookup_in_ldpath(env['LDSCRIPT_PATH'])
|
||||
if path:
|
||||
return path
|
||||
|
||||
@@ -88,66 +250,78 @@ def GetActualLDScript(env):
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def ConfigureDebugTarget(env):
|
||||
def _cleanup_debug_flags(scope):
|
||||
if scope not in env:
|
||||
return
|
||||
unflags = ["-Os", "-g"]
|
||||
for level in [0, 1, 2, 3]:
|
||||
for flag in ("O", "g", "ggdb"):
|
||||
unflags.append("-%s%d" % (flag, level))
|
||||
env[scope] = [f for f in env.get(scope, []) if f not in unflags]
|
||||
def VerboseAction(_, act, actstr):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
return act
|
||||
return Action(act, actstr)
|
||||
|
||||
env.Append(CPPDEFINES=["__PLATFORMIO_BUILD_DEBUG__"])
|
||||
|
||||
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
|
||||
_cleanup_debug_flags(scope)
|
||||
def PioClean(env, clean_dir):
|
||||
if not isdir(clean_dir):
|
||||
print "Build environment is clean"
|
||||
env.Exit(0)
|
||||
for root, _, files in walk(clean_dir):
|
||||
for file_ in files:
|
||||
remove(join(root, file_))
|
||||
print "Removed %s" % relpath(join(root, file_))
|
||||
print "Done cleaning"
|
||||
util.rmtree_(clean_dir)
|
||||
env.Exit(0)
|
||||
|
||||
debug_flags = env.ParseFlags(
|
||||
env.get("PIODEBUGFLAGS")
|
||||
if env.get("PIODEBUGFLAGS")
|
||||
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
|
||||
else env.GetProjectOption("debug_build_flags")
|
||||
)
|
||||
|
||||
env.MergeFlags(debug_flags)
|
||||
optimization_flags = [
|
||||
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
|
||||
def ProcessDebug(env):
|
||||
if not env.subst("$PIODEBUGFLAGS"):
|
||||
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb"])
|
||||
env.Append(
|
||||
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []),
|
||||
BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"])
|
||||
|
||||
|
||||
def ProcessTest(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
|
||||
unitylib = env.BuildLibrary(
|
||||
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity"))
|
||||
env.Prepend(LIBS=[unitylib])
|
||||
|
||||
src_filter = ["+<*.cpp>", "+<*.c>"]
|
||||
if "PIOTEST" in env:
|
||||
src_filter.append("+<%s%s>" % (env['PIOTEST'], sep))
|
||||
|
||||
return env.CollectBuildFiles(
|
||||
"$BUILDTEST_DIR",
|
||||
"$PROJECTTEST_DIR",
|
||||
src_filter=src_filter,
|
||||
duplicate=False)
|
||||
|
||||
|
||||
def GetPreExtraScripts(env):
|
||||
return [
|
||||
item[4:] for item in env.get("EXTRA_SCRIPTS", [])
|
||||
if item.startswith("pre:")
|
||||
]
|
||||
|
||||
if optimization_flags:
|
||||
env.AppendUnique(
|
||||
ASFLAGS=[
|
||||
# skip -O flags for assembler
|
||||
f
|
||||
for f in optimization_flags
|
||||
if f.startswith("-g")
|
||||
],
|
||||
LINKFLAGS=optimization_flags,
|
||||
)
|
||||
|
||||
|
||||
def GetExtraScripts(env, scope):
|
||||
items = []
|
||||
for item in env.GetProjectOption("extra_scripts", []):
|
||||
if scope == "post" and ":" not in item:
|
||||
items.append(item)
|
||||
elif item.startswith("%s:" % scope):
|
||||
items.append(item[len(scope) + 1 :])
|
||||
if not items:
|
||||
return items
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
return [os.path.abspath(env.subst(item)) for item in items]
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(ConfigureDebugTarget)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
# bakward-compatibility with Zephyr build script
|
||||
env.AddMethod(ConfigureDebugTarget, "ConfigureDebugFlags")
|
||||
def GetPostExtraScripts(env):
|
||||
return [
|
||||
item[5:] if item.startswith("post:") else item
|
||||
for item in env.get("EXTRA_SCRIPTS", []) if not item.startswith("pre:")
|
||||
]
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(ProcessDebug)
|
||||
env.AddMethod(ProcessTest)
|
||||
env.AddMethod(GetPreExtraScripts)
|
||||
env.AddMethod(GetPostExtraScripts)
|
||||
return env
|
||||
|
||||
@@ -12,231 +12,88 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.version import get_original_version
|
||||
from platformio.platform.exception import UnknownBoard
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectOptions
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
from platformio import exception, util
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def _PioPlatform():
|
||||
env = DefaultEnvironment()
|
||||
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
|
||||
@util.memoized
|
||||
def initPioPlatform(name):
|
||||
return PlatformFactory.newPlatform(name)
|
||||
|
||||
|
||||
def PioPlatform(_):
|
||||
return _PioPlatform()
|
||||
def PioPlatform(env):
|
||||
variables = {}
|
||||
for name in env['PIOVARIABLES']:
|
||||
if name in env:
|
||||
variables[name.lower()] = env[name]
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
|
||||
return p
|
||||
|
||||
|
||||
def BoardConfig(env, board=None):
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
try:
|
||||
p = env.PioPlatform()
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
return p.board_config(board)
|
||||
except (AssertionError, UnknownBoard) as exc:
|
||||
sys.stderr.write("Error: %s\n" % str(exc))
|
||||
env.Exit(1)
|
||||
return None
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
try:
|
||||
config = p.board_config(board if board else env['BOARD'])
|
||||
except exception.UnknownBoard as e:
|
||||
sys.stderr.write("Error: %s\n" % str(e))
|
||||
env.Exit(1)
|
||||
return config
|
||||
|
||||
|
||||
def GetFrameworkScript(env, framework):
|
||||
p = env.PioPlatform()
|
||||
assert p.frameworks and framework in p.frameworks
|
||||
script_path = env.subst(p.frameworks[framework]["script"])
|
||||
if not os.path.isfile(script_path):
|
||||
script_path = os.path.join(p.get_dir(), script_path)
|
||||
script_path = env.subst(p.frameworks[framework]['script'])
|
||||
if not isfile(script_path):
|
||||
script_path = join(p.get_dir(), script_path)
|
||||
return script_path
|
||||
|
||||
|
||||
def LoadPioPlatform(env):
|
||||
def LoadPioPlatform(env, variables):
|
||||
p = env.PioPlatform()
|
||||
installed_packages = p.get_installed_packages()
|
||||
|
||||
# Ensure real platform name
|
||||
env["PIOPLATFORM"] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
for pkg in p.get_installed_packages():
|
||||
type_ = p.get_package_type(pkg.metadata.name)
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
# Add toolchains and uploaders to $PATH
|
||||
for name in installed_packages:
|
||||
type_ = p.get_package_type(name)
|
||||
if type_ not in ("toolchain", "uploader"):
|
||||
continue
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
and os.path.isdir(os.path.join(pkg.path, "lib"))
|
||||
and type_ != "toolchain"
|
||||
):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
|
||||
os.path.join(pkg.path, "lib"),
|
||||
)
|
||||
path = p.get_package_dir(name)
|
||||
if isdir(join(path, "bin")):
|
||||
path = join(path, "bin")
|
||||
env.PrependENVPath("PATH", path)
|
||||
|
||||
# Platform specific LD Scripts
|
||||
if os.path.isdir(os.path.join(p.get_dir(), "ldscripts")):
|
||||
env.Prepend(LIBPATH=[os.path.join(p.get_dir(), "ldscripts")])
|
||||
if isdir(join(p.get_dir(), "ldscripts")):
|
||||
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
|
||||
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
|
||||
# update board manifest with overridden data from INI config
|
||||
board_config = env.BoardConfig()
|
||||
for option, value in env.GetProjectOptions():
|
||||
if not option.startswith("board_"):
|
||||
for k in variables.keys():
|
||||
if (k in env
|
||||
or not any([k.startswith("BOARD_"),
|
||||
k.startswith("UPLOAD_")])):
|
||||
continue
|
||||
option = option.lower()[6:]
|
||||
try:
|
||||
if isinstance(board_config.get(option), bool):
|
||||
value = str(value).lower() in ("1", "yes", "true")
|
||||
elif isinstance(board_config.get(option), int):
|
||||
value = int(value)
|
||||
except KeyError:
|
||||
pass
|
||||
board_config.update(option, value)
|
||||
|
||||
# load default variables from board config
|
||||
for option_meta in ProjectOptions.values():
|
||||
if not option_meta.buildenvvar or option_meta.buildenvvar in env:
|
||||
continue
|
||||
data_path = (
|
||||
option_meta.name[6:]
|
||||
if option_meta.name.startswith("board_")
|
||||
else option_meta.name.replace("_", ".")
|
||||
)
|
||||
try:
|
||||
env[option_meta.buildenvvar] = board_config.get(data_path)
|
||||
except KeyError:
|
||||
pass
|
||||
_opt, _val = k.lower().split("_", 1)
|
||||
if _opt == "board":
|
||||
_opt = "build"
|
||||
if _val in board_config.get(_opt):
|
||||
env.Replace(**{k: board_config.get("%s.%s" % (_opt, _val))})
|
||||
|
||||
if "build.ldscript" in board_config:
|
||||
env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript"))
|
||||
|
||||
|
||||
def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
platform = env.PioPlatform()
|
||||
pkg_metadata = PackageItem(platform.get_dir()).metadata
|
||||
board_config = env.BoardConfig() if "BOARD" in env else None
|
||||
|
||||
def _get_configuration_data():
|
||||
return (
|
||||
None
|
||||
if not board_config
|
||||
else [
|
||||
"CONFIGURATION:",
|
||||
"https://docs.platformio.org/page/boards/%s/%s.html"
|
||||
% (platform.name, board_config.id),
|
||||
]
|
||||
)
|
||||
|
||||
def _get_plaform_data():
|
||||
data = [
|
||||
"PLATFORM: %s (%s)"
|
||||
% (
|
||||
platform.title,
|
||||
pkg_metadata.version if pkg_metadata else platform.version,
|
||||
)
|
||||
]
|
||||
if (
|
||||
int(ARGUMENTS.get("PIOVERBOSE", 0))
|
||||
and pkg_metadata
|
||||
and pkg_metadata.spec.external
|
||||
):
|
||||
data.append("(%s)" % pkg_metadata.spec.uri)
|
||||
if board_config:
|
||||
data.extend([">", board_config.get("name")])
|
||||
return data
|
||||
|
||||
def _get_hardware_data():
|
||||
data = ["HARDWARE:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
data.append("%dMHz," % (f_cpu / 1000000))
|
||||
if not board_config:
|
||||
return data
|
||||
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
||||
flash = board_config.get("upload", {}).get("maximum_size")
|
||||
data.append(
|
||||
"%s RAM, %s Flash"
|
||||
% (fs.humanize_file_size(ram), fs.humanize_file_size(flash))
|
||||
)
|
||||
return data
|
||||
|
||||
def _get_debug_data():
|
||||
debug_tools = (
|
||||
board_config.get("debug", {}).get("tools") if board_config else None
|
||||
)
|
||||
if not debug_tools:
|
||||
return None
|
||||
data = [
|
||||
"DEBUG:",
|
||||
"Current",
|
||||
"(%s)"
|
||||
% board_config.get_debug_tool_name(env.GetProjectOption("debug_tool")),
|
||||
]
|
||||
onboard = []
|
||||
external = []
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("onboard"):
|
||||
onboard.append(key)
|
||||
else:
|
||||
external.append(key)
|
||||
if onboard:
|
||||
data.extend(["On-board", "(%s)" % ", ".join(sorted(onboard))])
|
||||
if external:
|
||||
data.extend(["External", "(%s)" % ", ".join(sorted(external))])
|
||||
return data
|
||||
|
||||
def _get_packages_data():
|
||||
data = []
|
||||
for item in platform.dump_used_packages():
|
||||
original_version = get_original_version(item["version"])
|
||||
info = "%s @ %s" % (item["name"], item["version"])
|
||||
extra = []
|
||||
if original_version:
|
||||
extra.append(original_version)
|
||||
if "src_url" in item and int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
extra.append(item["src_url"])
|
||||
if extra:
|
||||
info += " (%s)" % ", ".join(extra)
|
||||
data.append(info)
|
||||
if not data:
|
||||
return None
|
||||
return ["PACKAGES:"] + ["\n - %s" % d for d in sorted(data)]
|
||||
|
||||
for data in (
|
||||
_get_configuration_data(),
|
||||
_get_plaform_data(),
|
||||
_get_hardware_data(),
|
||||
_get_debug_data(),
|
||||
_get_packages_data(),
|
||||
):
|
||||
if data and len(data) > 1:
|
||||
print(" ".join(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
@@ -246,5 +103,4 @@ def generate(env):
|
||||
env.AddMethod(BoardConfig)
|
||||
env.AddMethod(GetFrameworkScript)
|
||||
env.AddMethod(LoadPioPlatform)
|
||||
env.AddMethod(PrintConfiguration)
|
||||
return env
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.compat import MISSING
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
def GetProjectConfig(env):
|
||||
return ProjectConfig.get_instance(env["PROJECT_CONFIG"])
|
||||
|
||||
|
||||
def GetProjectOptions(env, as_dict=False):
|
||||
return env.GetProjectConfig().items(env=env["PIOENV"], as_dict=as_dict)
|
||||
|
||||
|
||||
def GetProjectOption(env, option, default=MISSING):
|
||||
return env.GetProjectConfig().get("env:" + env["PIOENV"], option, default)
|
||||
|
||||
|
||||
def LoadProjectOptions(env):
|
||||
config = env.GetProjectConfig()
|
||||
section = "env:" + env["PIOENV"]
|
||||
for option in config.options(section):
|
||||
option_meta = config.find_option_meta(section, option)
|
||||
if (
|
||||
not option_meta
|
||||
or not option_meta.buildenvvar
|
||||
or option_meta.buildenvvar in env
|
||||
):
|
||||
continue
|
||||
env[option_meta.buildenvvar] = config.get(section, option)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetProjectConfig)
|
||||
env.AddMethod(GetProjectOptions)
|
||||
env.AddMethod(GetProjectOption)
|
||||
env.AddMethod(LoadProjectOptions)
|
||||
return env
|
||||
@@ -1,266 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import environ, makedirs, remove
|
||||
from os.path import isdir, join, splitdrive
|
||||
|
||||
from elftools.elf.descriptions import describe_sh_flags
|
||||
from elftools.elf.elffile import ELFFile
|
||||
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
def _run_tool(cmd, env, tool_args):
|
||||
sysenv = environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
||||
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(tool_args))
|
||||
|
||||
cmd.append("@" + tmp_file)
|
||||
result = exec_command(cmd, env=sysenv)
|
||||
remove(tmp_file)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_symbol_locations(env, elf_path, addrs):
|
||||
if not addrs:
|
||||
return {}
|
||||
cmd = [env.subst("$CC").replace("-gcc", "-addr2line"), "-e", elf_path]
|
||||
result = _run_tool(cmd, env, addrs)
|
||||
locations = [line for line in result["out"].split("\n") if line]
|
||||
assert len(addrs) == len(locations)
|
||||
|
||||
return dict(zip(addrs, [loc.strip() for loc in locations]))
|
||||
|
||||
|
||||
def _get_demangled_names(env, mangled_names):
|
||||
if not mangled_names:
|
||||
return {}
|
||||
result = _run_tool(
|
||||
[env.subst("$CC").replace("-gcc", "-c++filt")], env, mangled_names
|
||||
)
|
||||
demangled_names = [line for line in result["out"].split("\n") if line]
|
||||
assert len(mangled_names) == len(demangled_names)
|
||||
|
||||
return dict(
|
||||
zip(
|
||||
mangled_names,
|
||||
[dn.strip().replace("::__FUNCTION__", "") for dn in demangled_names],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _collect_sections_info(env, elffile):
|
||||
sections = {}
|
||||
for section in elffile.iter_sections():
|
||||
if section.is_null() or section.name.startswith(".debug"):
|
||||
continue
|
||||
|
||||
section_type = section["sh_type"]
|
||||
section_flags = describe_sh_flags(section["sh_flags"])
|
||||
section_size = section.data_size
|
||||
|
||||
section_data = {
|
||||
"name": section.name,
|
||||
"size": section_size,
|
||||
"start_addr": section["sh_addr"],
|
||||
"type": section_type,
|
||||
"flags": section_flags,
|
||||
}
|
||||
|
||||
sections[section.name] = section_data
|
||||
sections[section.name]["in_flash"] = env.pioSizeIsFlashSection(section_data)
|
||||
sections[section.name]["in_ram"] = env.pioSizeIsRamSection(section_data)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
symbols = []
|
||||
|
||||
symbol_section = elffile.get_section_by_name(".symtab")
|
||||
if symbol_section.is_null():
|
||||
sys.stderr.write("Couldn't find symbol table. Is ELF file stripped?")
|
||||
env.Exit(1)
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
|
||||
symbol_addrs = []
|
||||
mangled_names = []
|
||||
for s in symbol_section.iter_symbols():
|
||||
symbol_info = s.entry["st_info"]
|
||||
symbol_addr = s["st_value"]
|
||||
symbol_size = s["st_size"]
|
||||
symbol_type = symbol_info["type"]
|
||||
|
||||
if not env.pioSizeIsValidSymbol(s.name, symbol_type, symbol_addr):
|
||||
continue
|
||||
|
||||
symbol = {
|
||||
"addr": symbol_addr,
|
||||
"bind": symbol_info["bind"],
|
||||
"name": s.name,
|
||||
"type": symbol_type,
|
||||
"size": symbol_size,
|
||||
"section": env.pioSizeDetermineSection(sections, symbol_addr),
|
||||
}
|
||||
|
||||
if s.name.startswith("_Z"):
|
||||
mangled_names.append(s.name)
|
||||
|
||||
symbol_addrs.append(hex(symbol_addr))
|
||||
symbols.append(symbol)
|
||||
|
||||
symbol_locations = _get_symbol_locations(env, elf_path, symbol_addrs)
|
||||
demangled_names = _get_demangled_names(env, mangled_names)
|
||||
for symbol in symbols:
|
||||
if symbol["name"].startswith("_Z"):
|
||||
symbol["demangled_name"] = demangled_names.get(symbol["name"])
|
||||
location = symbol_locations.get(hex(symbol["addr"]))
|
||||
if not location or "?" in location:
|
||||
continue
|
||||
if IS_WINDOWS:
|
||||
drive, tail = splitdrive(location)
|
||||
location = join(drive.upper(), tail)
|
||||
symbol["file"] = location
|
||||
symbol["line"] = 0
|
||||
if ":" in location:
|
||||
file_, line = location.rsplit(":", 1)
|
||||
if line.isdigit():
|
||||
symbol["file"] = file_
|
||||
symbol["line"] = int(line)
|
||||
return symbols
|
||||
|
||||
|
||||
def pioSizeDetermineSection(_, sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not info.get("in_flash", False) and not info.get("in_ram", False):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def pioSizeIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def pioSizeIsRamSection(_, section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def pioSizeIsFlashSection(_, section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def pioSizeCalculateFirmwareSize(_, sections):
|
||||
flash_size = ram_size = 0
|
||||
for section_info in sections.values():
|
||||
if section_info.get("in_flash", False):
|
||||
flash_size += section_info.get("size", 0)
|
||||
if section_info.get("in_ram", False):
|
||||
ram_size += section_info.get("size", 0)
|
||||
|
||||
return ram_size, flash_size
|
||||
|
||||
|
||||
def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
data = {"device": {}, "memory": {}, "version": 1}
|
||||
|
||||
board = env.BoardConfig()
|
||||
if board:
|
||||
data["device"] = {
|
||||
"mcu": board.get("build.mcu", ""),
|
||||
"cpu": board.get("build.cpu", ""),
|
||||
"frequency": board.get("build.f_cpu"),
|
||||
"flash": int(board.get("upload.maximum_size", 0)),
|
||||
"ram": int(board.get("upload.maximum_ram_size", 0)),
|
||||
}
|
||||
if data["device"]["frequency"] and data["device"]["frequency"].endswith("L"):
|
||||
data["device"]["frequency"] = int(data["device"]["frequency"][0:-1])
|
||||
|
||||
elf_path = env.subst("$PIOMAINPROG")
|
||||
|
||||
with open(elf_path, "rb") as fp:
|
||||
elffile = ELFFile(fp)
|
||||
|
||||
if not elffile.has_dwarf_info():
|
||||
sys.stderr.write("Elf file doesn't contain DWARF information")
|
||||
env.Exit(1)
|
||||
|
||||
sections = _collect_sections_info(env, elffile)
|
||||
firmware_ram, firmware_flash = env.pioSizeCalculateFirmwareSize(sections)
|
||||
data["memory"]["total"] = {
|
||||
"ram_size": firmware_ram,
|
||||
"flash_size": firmware_flash,
|
||||
"sections": sections,
|
||||
}
|
||||
|
||||
files = {}
|
||||
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
file_path = symbol.get("file") or "unknown"
|
||||
if not files.get(file_path, {}):
|
||||
files[file_path] = {"symbols": [], "ram_size": 0, "flash_size": 0}
|
||||
|
||||
symbol_size = symbol.get("size", 0)
|
||||
section = sections.get(symbol.get("section", ""), {})
|
||||
if not section:
|
||||
continue
|
||||
if section.get("in_ram", False):
|
||||
files[file_path]["ram_size"] += symbol_size
|
||||
if section.get("in_flash", False):
|
||||
files[file_path]["flash_size"] += symbol_size
|
||||
|
||||
files[file_path]["symbols"].append(symbol)
|
||||
|
||||
data["memory"]["files"] = []
|
||||
for k, v in files.items():
|
||||
file_data = {"path": k}
|
||||
file_data.update(v)
|
||||
data["memory"]["files"].append(file_data)
|
||||
|
||||
with open(
|
||||
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
|
||||
) as fp:
|
||||
fp.write(json.dumps(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(pioSizeIsRamSection)
|
||||
env.AddMethod(pioSizeIsFlashSection)
|
||||
env.AddMethod(pioSizeCalculateFirmwareSize)
|
||||
env.AddMethod(pioSizeDetermineSection)
|
||||
env.AddMethod(pioSizeIsValidSymbol)
|
||||
env.AddMethod(DumpSizeData)
|
||||
return env
|
||||
@@ -1,116 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from SCons.Action import Action # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
|
||||
from platformio import compat, fs
|
||||
|
||||
|
||||
def VerboseAction(_, act, actstr):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
return act
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def IsCleanTarget(env):
|
||||
return env.GetOption("clean")
|
||||
|
||||
|
||||
def CleanProject(env, fullclean=False):
|
||||
def _relpath(path):
|
||||
if compat.IS_WINDOWS:
|
||||
prefix = os.getcwd()[:2].lower()
|
||||
if (
|
||||
":" not in prefix
|
||||
or not path.lower().startswith(prefix)
|
||||
or os.path.relpath(path).startswith("..")
|
||||
):
|
||||
return path
|
||||
return os.path.relpath(path)
|
||||
|
||||
def _clean_dir(path):
|
||||
clean_rel_path = _relpath(path)
|
||||
print(f"Removing {clean_rel_path}")
|
||||
fs.rmtree(path)
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
libdeps_dir = env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
|
||||
if os.path.isdir(build_dir):
|
||||
_clean_dir(build_dir)
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
|
||||
if fullclean and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
env,
|
||||
name,
|
||||
dependencies,
|
||||
actions,
|
||||
title=None,
|
||||
description=None,
|
||||
group="General",
|
||||
always_build=True,
|
||||
):
|
||||
if "__PIO_TARGETS" not in env:
|
||||
env["__PIO_TARGETS"] = {}
|
||||
assert name not in env["__PIO_TARGETS"]
|
||||
env["__PIO_TARGETS"][name] = dict(
|
||||
name=name, title=title, description=description, group=group
|
||||
)
|
||||
target = env.Alias(name, dependencies, actions)
|
||||
if always_build:
|
||||
AlwaysBuild(target)
|
||||
return target
|
||||
|
||||
|
||||
def AddPlatformTarget(env, *args, **kwargs):
|
||||
return env.AddTarget(group="Platform", *args, **kwargs)
|
||||
|
||||
|
||||
def AddCustomTarget(env, *args, **kwargs):
|
||||
return env.AddTarget(group="Custom", *args, **kwargs)
|
||||
|
||||
|
||||
def DumpTargets(env):
|
||||
targets = env.get("__PIO_TARGETS") or {}
|
||||
# pre-fill default targets if embedded dev-platform
|
||||
if env.PioPlatform().is_embedded() and not any(
|
||||
t["group"] == "Platform" for t in targets.values()
|
||||
):
|
||||
targets["upload"] = dict(name="upload", group="Platform", title="Upload")
|
||||
return list(targets.values())
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(IsCleanTarget)
|
||||
env.AddMethod(CleanProject)
|
||||
env.AddMethod(AddTarget)
|
||||
env.AddMethod(AddPlatformTarget)
|
||||
env.AddMethod(AddCustomTarget)
|
||||
env.AddMethod(DumpTargets)
|
||||
return env
|
||||
@@ -1,61 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.test.result import TestSuite
|
||||
from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
|
||||
def ConfigureTestTarget(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST"], # deprecated, use PIO_UNIT_TESTING
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piobuild.SRC_BUILD_EXT],
|
||||
)
|
||||
env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"])
|
||||
|
||||
if "PIOTEST_RUNNING_NAME" in env:
|
||||
test_name = env["PIOTEST_RUNNING_NAME"]
|
||||
while True:
|
||||
test_name = os.path.dirname(test_name) # parent dir
|
||||
# skip nested tests (user's side issue?)
|
||||
if not test_name or os.path.basename(test_name).startswith("test_"):
|
||||
break
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[
|
||||
f"+<{test_name}{os.path.sep}*.{ext}>"
|
||||
for ext in piobuild.SRC_BUILD_EXT
|
||||
],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)],
|
||||
)
|
||||
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[f"+<$PIOTEST_RUNNING_NAME{os.path.sep}>"],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", "$PIOTEST_RUNNING_NAME")],
|
||||
)
|
||||
|
||||
test_runner = TestRunnerFactory.new(
|
||||
TestSuite(env["PIOENV"], env.get("PIOTEST_RUNNING_NAME", "*")),
|
||||
env.GetProjectConfig(),
|
||||
)
|
||||
test_runner.configure_build_env(env)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConfigureTestTarget)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
@@ -12,21 +12,20 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
from os.path import isfile, join
|
||||
from platform import system
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Node.Alias import Alias
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import SerialPortFinder, find_mbed_disk, is_pattern_port
|
||||
from platformio.device.list.util import list_serial_ports
|
||||
from platformio.proc import exec_command
|
||||
from platformio import util
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
@@ -42,24 +41,24 @@ def FlushSerialBuffer(env, port):
|
||||
|
||||
def TouchSerialPort(env, port, baudrate):
|
||||
port = env.subst(port)
|
||||
print("Forcing reset using %dbps open/close on port %s" % (baudrate, port))
|
||||
print "Forcing reset using %dbps open/close on port %s" % (baudrate, port)
|
||||
try:
|
||||
s = Serial(port=port, baudrate=baudrate)
|
||||
s.setDTR(False)
|
||||
s.close()
|
||||
except: # pylint: disable=bare-except
|
||||
except: # pylint: disable=W0702
|
||||
pass
|
||||
sleep(0.4) # DO NOT REMOVE THAT (required by SAM-BA based boards)
|
||||
|
||||
|
||||
def WaitForNewSerialPort(env, before):
|
||||
print("Waiting for the new upload port...")
|
||||
print "Waiting for the new upload port..."
|
||||
prev_port = env.subst("$UPLOAD_PORT")
|
||||
new_port = None
|
||||
elapsed = 0
|
||||
before = [p["port"] for p in before]
|
||||
before = [p['port'] for p in before]
|
||||
while elapsed < 5 and new_port is None:
|
||||
now = [p["port"] for p in list_serial_ports()]
|
||||
now = [p['port'] for p in util.get_serialports()]
|
||||
for p in now:
|
||||
if p not in before:
|
||||
new_port = p
|
||||
@@ -81,167 +80,133 @@ def WaitForNewSerialPort(env, before):
|
||||
sleep(1)
|
||||
|
||||
if not new_port:
|
||||
sys.stderr.write(
|
||||
"Error: Couldn't find a board on the selected port. "
|
||||
"Check that you have the correct port selected. "
|
||||
"If it is correct, try pressing the board's reset "
|
||||
"button after initiating the upload.\n"
|
||||
)
|
||||
sys.stderr.write("Error: Couldn't find a board on the selected port. "
|
||||
"Check that you have the correct port selected. "
|
||||
"If it is correct, try pressing the board's reset "
|
||||
"button after initiating the upload.\n")
|
||||
env.Exit(1)
|
||||
|
||||
return new_port
|
||||
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
env = args[0]
|
||||
initial_port = env.subst("$UPLOAD_PORT")
|
||||
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
|
||||
if initial_port and not is_pattern_port(initial_port):
|
||||
print(env.subst("Using manually specified: $UPLOAD_PORT"))
|
||||
|
||||
def _get_pattern():
|
||||
if "UPLOAD_PORT" not in env:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env['UPLOAD_PORT']):
|
||||
return env['UPLOAD_PORT']
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_mbed_disk():
|
||||
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
|
||||
for item in util.get_logicaldisks():
|
||||
if item['disk'].startswith(
|
||||
"/net") or not _is_match_pattern(item['disk']):
|
||||
continue
|
||||
mbed_pages = [
|
||||
join(item['disk'], n) for n in ("mbed.htm", "mbed.html")
|
||||
]
|
||||
if any([isfile(p) for p in mbed_pages]):
|
||||
return item['disk']
|
||||
if (item['name']
|
||||
and any([l in item['name'].lower() for l in msdlabels])):
|
||||
return item['disk']
|
||||
return None
|
||||
|
||||
def _look_for_serial_port():
|
||||
port = None
|
||||
board_hwids = []
|
||||
if "BOARD" in env and "build.hwids" in env.BoardConfig():
|
||||
board_hwids = env.BoardConfig().get("build.hwids")
|
||||
for item in util.get_serialports(filter_hwid=True):
|
||||
if not _is_match_pattern(item['port']):
|
||||
continue
|
||||
port = item['port']
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item['hwid']:
|
||||
return port
|
||||
return port
|
||||
|
||||
if "UPLOAD_PORT" in env and not _get_pattern():
|
||||
print env.subst("Use manually specified: $UPLOAD_PORT")
|
||||
return
|
||||
|
||||
if upload_protocol == "mbed" or (
|
||||
"mbed" in env.subst("$PIOFRAMEWORK") and not upload_protocol
|
||||
):
|
||||
env.Replace(UPLOAD_PORT=find_mbed_disk(initial_port))
|
||||
if "mbed" in env.subst("$PIOFRAMEWORK"):
|
||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||
else:
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as exc:
|
||||
sys.stderr.write("\n%s\n\n" % exc)
|
||||
env.Replace(
|
||||
UPLOAD_PORT=SerialPortFinder(
|
||||
board_config=env.BoardConfig() if "BOARD" in env else None,
|
||||
upload_protocol=upload_protocol,
|
||||
prefer_gdb_port="blackmagic" in upload_protocol,
|
||||
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)),
|
||||
).find(initial_port)
|
||||
)
|
||||
if (system() == "Linux" and not any([
|
||||
isfile("/etc/udev/rules.d/99-platformio-udev.rules"),
|
||||
isfile("/lib/udev/rules.d/99-platformio-udev.rules")
|
||||
])):
|
||||
sys.stderr.write(
|
||||
"\nWarning! Please install `99-platformio-udev.rules` and "
|
||||
"check that your board's PID and VID are listed in the rules."
|
||||
"\n https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/scripts/99-platformio-udev.rules\n")
|
||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
print(env.subst("Auto-detected: $UPLOAD_PORT"))
|
||||
print env.subst("Auto-detected: $UPLOAD_PORT")
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: Please specify `upload_port` for environment or use "
|
||||
"global `--upload-port` option.\n"
|
||||
"For some development platforms it can be a USB flash "
|
||||
"drive (i.e. /media/<user>/<device name>)\n"
|
||||
)
|
||||
"drive (i.e. /media/<user>/<device name>)\n")
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def UploadToDisk(_, target, source, env):
|
||||
def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
fpath = os.path.join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
|
||||
if not os.path.isfile(fpath):
|
||||
fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
|
||||
if not isfile(fpath):
|
||||
continue
|
||||
copyfile(
|
||||
fpath, os.path.join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext))
|
||||
)
|
||||
print(
|
||||
"Firmware has been successfully uploaded.\n"
|
||||
"(Some boards may require manual hard reset)"
|
||||
)
|
||||
copyfile(fpath,
|
||||
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
|
||||
print "Firmware has been successfully uploaded.\n"\
|
||||
"(Some boards may require manual hard reset)"
|
||||
|
||||
|
||||
def CheckUploadSize(_, target, source, env):
|
||||
check_conditions = [
|
||||
env.get("BOARD"),
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD"),
|
||||
def CheckUploadSize(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
if max_size == 0 or "SIZETOOL" not in env:
|
||||
return
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
cmd = [
|
||||
env.subst("$SIZETOOL"), "-B",
|
||||
str(source[0] if isinstance(target[0], Alias) else target[0])
|
||||
]
|
||||
if not all(check_conditions):
|
||||
return
|
||||
program_max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
data_max_size = int(env.BoardConfig().get("upload.maximum_ram_size", 0))
|
||||
if program_max_size == 0:
|
||||
result = util.exec_command(cmd, env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
return
|
||||
print result['out'].strip()
|
||||
|
||||
def _configure_defaults():
|
||||
env.Replace(
|
||||
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
|
||||
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+",
|
||||
)
|
||||
line = result['out'].strip().splitlines()[1]
|
||||
values = [v.strip() for v in line.split("\t")]
|
||||
used_size = int(values[0]) + int(values[1])
|
||||
|
||||
def _get_size_output():
|
||||
cmd = env.get("SIZECHECKCMD")
|
||||
if not cmd:
|
||||
return None
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command(env.subst(cmd), env=sysenv)
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
return result["out"].strip()
|
||||
|
||||
def _calculate_size(output, pattern):
|
||||
if not output or not pattern:
|
||||
return -1
|
||||
size = 0
|
||||
regexp = re.compile(pattern)
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = regexp.search(line)
|
||||
if not match:
|
||||
continue
|
||||
size += sum(int(value) for value in match.groups())
|
||||
return size
|
||||
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = min(
|
||||
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
|
||||
)
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
|
||||
)
|
||||
|
||||
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
|
||||
_configure_defaults()
|
||||
output = _get_size_output()
|
||||
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
|
||||
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
|
||||
|
||||
print('Advanced Memory Usage is available via "PlatformIO Home > Project Inspect"')
|
||||
if data_max_size and data_size > -1:
|
||||
print("RAM: %s" % _format_availale_bytes(data_size, data_max_size))
|
||||
if program_size > -1:
|
||||
print("Flash: %s" % _format_availale_bytes(program_size, program_max_size))
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
if data_max_size and data_size > data_max_size:
|
||||
sys.stderr.write(
|
||||
"Warning! The data size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
|
||||
)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (program_size, program_max_size)
|
||||
)
|
||||
if used_size > max_size:
|
||||
sys.stderr.write("Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (used_size,
|
||||
max_size))
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def PrintUploadInfo(env):
|
||||
configured = env.subst("$UPLOAD_PROTOCOL")
|
||||
available = [configured] if configured else []
|
||||
if "BOARD" in env:
|
||||
available.extend(env.BoardConfig().get("upload", {}).get("protocols", []))
|
||||
if available:
|
||||
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
|
||||
if configured:
|
||||
print("CURRENT: upload_protocol = %s" % configured)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
@@ -253,5 +218,4 @@ def generate(env):
|
||||
env.AddMethod(AutodetectUploadPort)
|
||||
env.AddMethod(UploadToDisk)
|
||||
env.AddMethod(CheckUploadSize)
|
||||
env.AddMethod(PrintUploadInfo)
|
||||
return env
|
||||
|
||||
88
platformio/builder/tools/piowinhooks.py
Normal file
88
platformio/builder/tools/piowinhooks.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from hashlib import md5
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
from platform import system
|
||||
|
||||
# Windows CLI has limit with command length to 8192
|
||||
# Leave 2000 chars for flags and other options
|
||||
MAX_SOURCES_LENGTH = 6000
|
||||
|
||||
|
||||
def long_sources_hook(env, sources):
|
||||
_sources = str(sources).replace("\\", "/")
|
||||
if len(str(_sources)) < MAX_SOURCES_LENGTH:
|
||||
return sources
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _sources.split(".o "):
|
||||
line = line.strip()
|
||||
if not line.endswith(".o"):
|
||||
line += ".o"
|
||||
data.append('"%s"' % line)
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def long_incflags_hook(env, incflags):
|
||||
_incflags = env.subst(incflags).replace("\\", "/")
|
||||
if len(_incflags) < MAX_SOURCES_LENGTH:
|
||||
return incflags
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _incflags.split(" -I"):
|
||||
line = line.strip()
|
||||
if not line.startswith("-I"):
|
||||
line = "-I" + line
|
||||
data.append('-I"%s"' % line[2:])
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "longcmd-%s" % md5(data).hexdigest())
|
||||
if isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
fp.write(data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
if system() != "Windows":
|
||||
return
|
||||
|
||||
env.Replace(_long_sources_hook=long_sources_hook)
|
||||
env.Replace(_long_incflags_hook=long_incflags_hook)
|
||||
coms = {}
|
||||
for key in ("ARCOM", "LINKCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}")
|
||||
for key in ("_CCCOMCOM", "ASPPCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}")
|
||||
env.Replace(**coms)
|
||||
|
||||
return env
|
||||
294
platformio/builder/tools/platformio.py
Normal file
294
platformio/builder/tools/platformio.py
Normal file
@@ -0,0 +1,294 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
from os import sep, walk
|
||||
from os.path import basename, dirname, isdir, join, realpath
|
||||
|
||||
from SCons.Action import Action
|
||||
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild,
|
||||
DefaultEnvironment, SConscript)
|
||||
from SCons.Util import case_sensitive_suffixes, is_Sequence
|
||||
|
||||
from platformio.util import glob_escape, pioversion_to_intstr
|
||||
|
||||
SRC_BUILD_EXT = ["c", "cc", "cpp", "S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
|
||||
def _append_pio_macros():
|
||||
env.AppendUnique(CPPDEFINES=[
|
||||
("PLATFORMIO",
|
||||
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())))
|
||||
])
|
||||
|
||||
_append_pio_macros()
|
||||
|
||||
# fix ASM handling under non-casitive OS
|
||||
if not case_sensitive_suffixes(".s", ".S"):
|
||||
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
|
||||
|
||||
if "__debug" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessDebug()
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
# remove base flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
# apply user flags
|
||||
env.ProcessFlags(env.get("BUILD_FLAGS"))
|
||||
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
# restore PIO macros if it was deleted by framework
|
||||
_append_pio_macros()
|
||||
|
||||
# build dependent libs
|
||||
deplibs = env.BuildProjectLibraries()
|
||||
|
||||
# append specified LD_SCRIPT
|
||||
if ("LDSCRIPT_PATH" in env
|
||||
and not any(["-Wl,-T" in f for f in env['LINKFLAGS']])):
|
||||
env.Append(LINKFLAGS=['-Wl,-T"$LDSCRIPT_PATH"'])
|
||||
|
||||
# enable "cyclic reference" for linker
|
||||
if env.get("LIBS", deplibs) and env.GetCompilerType() == "gcc":
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
# Handle SRC_BUILD_FLAGS
|
||||
env.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
env.Append(
|
||||
LIBS=deplibs,
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PIOBUILDFILES=env.CollectBuildFiles(
|
||||
"$BUILDSRC_DIR",
|
||||
"$PROJECTSRC_DIR",
|
||||
src_filter=env.get("SRC_FILTER"),
|
||||
duplicate=False))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.Append(PIOBUILDFILES=env.ProcessTest())
|
||||
|
||||
if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
|
||||
env.Exit(1)
|
||||
|
||||
program = env.Program(
|
||||
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
|
||||
|
||||
checksize_action = Action(env.CheckUploadSize, "Checking program size")
|
||||
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
|
||||
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.AddPostAction(program, checksize_action)
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
if not flags:
|
||||
return
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
parsed_flags = env.ParseFlags(str(flags))
|
||||
for flag in parsed_flags.pop("CPPDEFINES"):
|
||||
if not is_Sequence(flag):
|
||||
env.Append(CPPDEFINES=flag)
|
||||
continue
|
||||
_key, _value = flag[:2]
|
||||
if '\"' in _value:
|
||||
_value = _value.replace('\"', '\\\"')
|
||||
elif _value.isdigit():
|
||||
_value = int(_value)
|
||||
elif _value.replace(".", "", 1).isdigit():
|
||||
_value = float(_value)
|
||||
env.Append(CPPDEFINES=(_key, _value))
|
||||
env.Append(**parsed_flags)
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(env.get(k, [])):
|
||||
if isdir(p):
|
||||
env[k][i] = realpath(p)
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(env.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
env['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
|
||||
|
||||
# Cancel any previous definition of name, either built in or
|
||||
# provided with a -D option // Issue #191
|
||||
undefines = [
|
||||
u for u in env.get("CCFLAGS", [])
|
||||
if isinstance(u, basestring) and u.startswith("-U")
|
||||
]
|
||||
if undefines:
|
||||
for undef in undefines:
|
||||
env['CCFLAGS'].remove(undef)
|
||||
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
|
||||
|
||||
|
||||
def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
parsed_flags = env.ParseFlags(str(flags))
|
||||
all_flags = []
|
||||
for items in parsed_flags.values():
|
||||
all_flags.extend(items)
|
||||
all_flags = set(all_flags)
|
||||
|
||||
for key in parsed_flags:
|
||||
cur_flags = set(env.Flatten(env.get(key, [])))
|
||||
for item in cur_flags & all_flags:
|
||||
while item in env[key]:
|
||||
env[key].remove(item)
|
||||
|
||||
|
||||
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
|
||||
if basename(file_).startswith("."):
|
||||
return False
|
||||
for e in ext:
|
||||
if file_.endswith(".%s" % e):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
|
||||
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
|
||||
|
||||
def _append_build_item(items, item, src_dir):
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
|
||||
items.add(item.replace(src_dir + sep, ""))
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
if isinstance(src_filter, (list, tuple)):
|
||||
src_filter = " ".join(src_filter)
|
||||
|
||||
matches = set()
|
||||
# correct fs directory separator
|
||||
src_filter = src_filter.replace("/", sep).replace("\\", sep)
|
||||
for (action, pattern) in SRC_FILTER_PATTERNS_RE.findall(src_filter):
|
||||
items = set()
|
||||
for item in glob(join(glob_escape(src_dir), pattern)):
|
||||
if isdir(item):
|
||||
for root, _, files in walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_append_build_item(items, join(root, f), src_dir)
|
||||
else:
|
||||
_append_build_item(items, item, src_dir)
|
||||
if action == "+":
|
||||
matches |= items
|
||||
else:
|
||||
matches -= items
|
||||
return sorted(list(matches))
|
||||
|
||||
|
||||
def CollectBuildFiles(env,
|
||||
variant_dir,
|
||||
src_dir,
|
||||
src_filter=None,
|
||||
duplicate=False):
|
||||
sources = []
|
||||
variants = []
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
if src_dir.endswith(sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter):
|
||||
_reldir = dirname(item)
|
||||
_src_dir = join(src_dir, _reldir) if _reldir else src_dir
|
||||
_var_dir = join(variant_dir, _reldir) if _reldir else variant_dir
|
||||
|
||||
if _var_dir not in variants:
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT):
|
||||
sources.append(env.File(join(_var_dir, basename(item))))
|
||||
|
||||
return sources
|
||||
|
||||
|
||||
def BuildFrameworks(env, frameworks):
|
||||
if not frameworks:
|
||||
return
|
||||
|
||||
if "BOARD" not in env:
|
||||
sys.stderr.write("Please specify `board` in `platformio.ini` to use "
|
||||
"with '%s' framework\n" % ", ".join(frameworks))
|
||||
env.Exit(1)
|
||||
|
||||
board_frameworks = env.BoardConfig().get("frameworks", [])
|
||||
if frameworks == ["platformio"]:
|
||||
if board_frameworks:
|
||||
frameworks.insert(0, board_frameworks[0])
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: Please specify `board` in `platformio.ini`\n")
|
||||
env.Exit(1)
|
||||
|
||||
for f in frameworks:
|
||||
if f in ("arduino", "energia"):
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f))
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: This board doesn't support %s framework!\n" % f)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
lib = env.Clone()
|
||||
return lib.StaticLibrary(
|
||||
lib.subst(variant_dir),
|
||||
lib.CollectBuildFiles(variant_dir, src_dir, src_filter=src_filter))
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
DefaultEnvironment().Append(PIOBUILDFILES=env.Clone().CollectBuildFiles(
|
||||
variant_dir, src_dir, src_filter=src_filter))
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(IsFileWithExt)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(BuildFrameworks)
|
||||
env.AddMethod(BuildLibrary)
|
||||
env.AddMethod(BuildSources)
|
||||
return env
|
||||
@@ -1,165 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import hashlib
|
||||
import os
|
||||
from time import time
|
||||
|
||||
from platformio import app, fs
|
||||
from platformio.compat import hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class ContentCache:
|
||||
def __init__(self, namespace=None):
|
||||
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
|
||||
self._db_path = os.path.join(self.cache_dir, "db.data")
|
||||
self._lockfile = None
|
||||
if not os.path.isdir(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
|
||||
def __enter__(self):
|
||||
# cleanup obsolete items
|
||||
self.delete()
|
||||
return self
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def key_from_args(*args):
|
||||
h = hashlib.sha1()
|
||||
for arg in args:
|
||||
if arg:
|
||||
h.update(hashlib_encode_data(arg))
|
||||
return h.hexdigest()
|
||||
|
||||
def get_cache_path(self, key):
|
||||
assert "/" not in key and "\\" not in key
|
||||
key = str(key)
|
||||
assert len(key) > 3
|
||||
return os.path.join(self.cache_dir, key)
|
||||
|
||||
def get(self, key):
|
||||
cache_path = self.get_cache_path(key)
|
||||
if not os.path.isfile(cache_path):
|
||||
return None
|
||||
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
|
||||
return fp.read()
|
||||
|
||||
def set(self, key, data, valid):
|
||||
if not app.get_setting("enable_cache"):
|
||||
return False
|
||||
cache_path = self.get_cache_path(key)
|
||||
if os.path.isfile(cache_path):
|
||||
self.delete(key)
|
||||
if not data:
|
||||
return False
|
||||
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
||||
assert valid.endswith(tuple(tdmap))
|
||||
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
||||
|
||||
if not self._lock_dbindex():
|
||||
return False
|
||||
|
||||
if not os.path.isdir(os.path.dirname(cache_path)):
|
||||
os.makedirs(os.path.dirname(cache_path))
|
||||
try:
|
||||
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, mode="a", encoding="utf8") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
||||
except UnicodeError:
|
||||
if os.path.isfile(cache_path):
|
||||
try:
|
||||
os.remove(cache_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
"""Keys=None, delete expired items"""
|
||||
if not os.path.isfile(self._db_path):
|
||||
return None
|
||||
if not keys:
|
||||
keys = []
|
||||
if not isinstance(keys, list):
|
||||
keys = [keys]
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
continue
|
||||
expire, fname = line.split("=")
|
||||
path = os.path.join(self.cache_dir, fname)
|
||||
try:
|
||||
if (
|
||||
time() < int(expire)
|
||||
and os.path.isfile(path)
|
||||
and path not in paths_for_delete
|
||||
):
|
||||
newlines.append(line)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
found = True
|
||||
if os.path.isfile(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
if not os.listdir(os.path.dirname(path)):
|
||||
fs.rmtree(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
return True
|
||||
|
||||
def clean(self):
|
||||
if not os.path.isdir(self.cache_dir):
|
||||
return
|
||||
fs.rmtree(self.cache_dir)
|
||||
|
||||
def _lock_dbindex(self):
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _unlock_dbindex(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
return True
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def cleanup_content_cache(namespace=None):
|
||||
with ContentCache(namespace) as cc:
|
||||
cc.clean()
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -1,348 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
# pylint: disable=redefined-builtin,too-many-statements
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app, exception, fs, util
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools import CheckToolFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
|
||||
|
||||
@click.command("check", short_help="Static Code Analysis")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--pattern", multiple=True, hidden=True)
|
||||
@click.option("-f", "--src-filters", multiple=True)
|
||||
@click.option("--flags", multiple=True)
|
||||
@click.option(
|
||||
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option(
|
||||
"--fail-on-defect",
|
||||
multiple=True,
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
src_filters,
|
||||
pattern,
|
||||
flags,
|
||||
severity,
|
||||
silent,
|
||||
verbose,
|
||||
json_output,
|
||||
fail_on_defect,
|
||||
skip_packages,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(project_conf)
|
||||
config.validate(environment)
|
||||
|
||||
default_envs = config.default_envs()
|
||||
for envname in config.envs():
|
||||
skipenv = any(
|
||||
[
|
||||
environment and envname not in environment,
|
||||
not environment and default_envs and envname not in default_envs,
|
||||
]
|
||||
)
|
||||
|
||||
env_options = config.items(env=envname, as_dict=True)
|
||||
env_dump = []
|
||||
for k, v in env_options.items():
|
||||
if k not in ("platform", "framework", "board"):
|
||||
continue
|
||||
env_dump.append(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
"check_src_filters",
|
||||
env_options.get("check_patterns", default_src_filters),
|
||||
)
|
||||
)
|
||||
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
|
||||
for tool in config.get("env:" + envname, "check_tool"):
|
||||
if skipenv:
|
||||
results.append({"env": envname, "tool": tool})
|
||||
continue
|
||||
if not silent and not json_output:
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
result["duration"] = time() - result["duration"]
|
||||
|
||||
result["succeeded"] = rc == 0
|
||||
if fail_on_defect:
|
||||
result["succeeded"] = rc == 0 and not any(
|
||||
DefectItem.SEVERITY_LABELS[d.severity] in fail_on_defect
|
||||
for d in result["defects"]
|
||||
)
|
||||
result["stats"] = collect_component_stats(result)
|
||||
results.append(result)
|
||||
|
||||
if verbose:
|
||||
click.echo("\n".join(repr(d) for d in result["defects"]))
|
||||
|
||||
if not json_output and not silent:
|
||||
if rc != 0:
|
||||
click.echo(
|
||||
"Error: %s failed to perform check! Please "
|
||||
"examine tool output in verbose mode." % tool
|
||||
)
|
||||
elif not result["defects"]:
|
||||
click.echo("No defects found")
|
||||
print_processing_footer(result)
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(results_to_json(results)))
|
||||
elif not silent:
|
||||
print_check_summary(results, verbose=verbose)
|
||||
|
||||
# Reset custom project config
|
||||
app.set_session_var("custom_project_conf", None)
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
if command_failed:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
|
||||
def results_to_json(raw):
|
||||
results = []
|
||||
for item in raw:
|
||||
if item.get("succeeded") is None:
|
||||
continue
|
||||
item.update(
|
||||
{
|
||||
"succeeded": bool(item.get("succeeded")),
|
||||
"defects": [d.as_dict() for d in item.get("defects", [])],
|
||||
}
|
||||
)
|
||||
results.append(item)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def print_processing_header(tool, envname, envdump):
|
||||
click.echo(
|
||||
"Checking %s > %s (%s)"
|
||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||
)
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
def print_processing_footer(result):
|
||||
is_failed = not result.get("succeeded")
|
||||
util.print_labeled_bar(
|
||||
"[%s] Took %.2f seconds"
|
||||
% (
|
||||
(
|
||||
click.style("FAILED", fg="red", bold=True)
|
||||
if is_failed
|
||||
else click.style("PASSED", fg="green", bold=True)
|
||||
),
|
||||
result["duration"],
|
||||
),
|
||||
is_error=is_failed,
|
||||
)
|
||||
|
||||
|
||||
def collect_component_stats(result):
|
||||
components = {}
|
||||
|
||||
def _append_defect(component, defect):
|
||||
if not components.get(component):
|
||||
components[component] = Counter()
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
|
||||
|
||||
def print_defects_stats(results):
|
||||
if not results:
|
||||
return
|
||||
|
||||
component_stats = {}
|
||||
for r in results:
|
||||
for k, v in r.get("stats", {}).items():
|
||||
if not component_stats.get(k):
|
||||
component_stats[k] = Counter()
|
||||
component_stats[k].update(r["stats"][k])
|
||||
|
||||
if not component_stats:
|
||||
return
|
||||
|
||||
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
|
||||
severity_labels.reverse()
|
||||
tabular_data = []
|
||||
for k, v in component_stats.items():
|
||||
tool_defect = [v.get(s, 0) for s in severity_labels]
|
||||
tabular_data.append([k] + tool_defect)
|
||||
|
||||
total = ["Total"] + [sum(d) for d in list(zip(*tabular_data))[1:]]
|
||||
tabular_data.sort()
|
||||
tabular_data.append([]) # Empty line as delimiter
|
||||
tabular_data.append(total)
|
||||
|
||||
headers = ["Component"]
|
||||
headers.extend([label.upper() for label in severity_labels])
|
||||
headers = [click.style(h, bold=True) for h in headers]
|
||||
click.echo(tabulate(tabular_data, headers=headers, numalign="center"))
|
||||
click.echo()
|
||||
|
||||
|
||||
def print_check_summary(results, verbose=False):
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
succeeded_nums = 0
|
||||
failed_nums = 0
|
||||
duration = 0
|
||||
|
||||
print_defects_stats(results)
|
||||
|
||||
for result in results:
|
||||
duration += result.get("duration", 0)
|
||||
if result.get("succeeded") is False:
|
||||
failed_nums += 1
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
status_str = "IGNORED"
|
||||
if not verbose:
|
||||
continue
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
status_str = click.style("PASSED", fg="green")
|
||||
|
||||
tabular_data.append(
|
||||
(
|
||||
click.style(result["env"], fg="cyan"),
|
||||
result["tool"],
|
||||
status_str,
|
||||
util.humanize_duration_time(result.get("duration")),
|
||||
)
|
||||
)
|
||||
|
||||
click.echo(
|
||||
tabulate(
|
||||
tabular_data,
|
||||
headers=[
|
||||
click.style(s, bold=True)
|
||||
for s in ("Environment", "Tool", "Status", "Duration")
|
||||
],
|
||||
),
|
||||
err=failed_nums,
|
||||
)
|
||||
|
||||
util.print_labeled_bar(
|
||||
"%s%d succeeded in %s"
|
||||
% (
|
||||
"%d failed, " % failed_nums if failed_nums else "",
|
||||
succeeded_nums,
|
||||
util.humanize_duration_time(duration),
|
||||
),
|
||||
is_error=failed_nums,
|
||||
fg="red" if failed_nums else "green",
|
||||
)
|
||||
@@ -1,95 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
# pylint: disable=too-many-instance-attributes, redefined-builtin
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class DefectItem:
|
||||
SEVERITY_HIGH = 1
|
||||
SEVERITY_MEDIUM = 2
|
||||
SEVERITY_LOW = 4
|
||||
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
|
||||
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
severity,
|
||||
category,
|
||||
message,
|
||||
file=None,
|
||||
line=0,
|
||||
column=0,
|
||||
id=None,
|
||||
callstack=None,
|
||||
cwe=None,
|
||||
):
|
||||
assert severity in (self.SEVERITY_HIGH, self.SEVERITY_MEDIUM, self.SEVERITY_LOW)
|
||||
self.severity = severity
|
||||
self.category = category
|
||||
self.message = message
|
||||
self.line = int(line)
|
||||
self.column = int(column)
|
||||
self.callstack = callstack
|
||||
self.cwe = cwe
|
||||
self.id = id
|
||||
self.file = file or "unknown"
|
||||
if file.lower().startswith(get_project_dir().lower()):
|
||||
self.file = os.path.relpath(file, get_project_dir())
|
||||
|
||||
def __repr__(self):
|
||||
defect_color = None
|
||||
if self.severity == self.SEVERITY_HIGH:
|
||||
defect_color = "red"
|
||||
elif self.severity == self.SEVERITY_MEDIUM:
|
||||
defect_color = "yellow"
|
||||
|
||||
format_str = "{file}:{line}: [{severity}:{category}] {message} {id}"
|
||||
return format_str.format(
|
||||
severity=click.style(self.SEVERITY_LABELS[self.severity], fg=defect_color),
|
||||
category=click.style(self.category.lower(), fg=defect_color),
|
||||
file=click.style(self.file, bold=True),
|
||||
message=self.message,
|
||||
line=self.line,
|
||||
id="%s" % "[%s]" % self.id if self.id else "",
|
||||
)
|
||||
|
||||
def __or__(self, defect):
|
||||
return self.severity | defect.severity
|
||||
|
||||
@staticmethod
|
||||
def severity_to_int(label):
|
||||
for key, value in DefectItem.SEVERITY_LABELS.items():
|
||||
if label == value:
|
||||
return key
|
||||
raise PlatformioException("Unknown severity label -> %s" % label)
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
"severity": self.SEVERITY_LABELS[self.severity],
|
||||
"category": self.category,
|
||||
"message": self.message,
|
||||
"file": os.path.abspath(self.file),
|
||||
"line": self.line,
|
||||
"column": self.column,
|
||||
"callstack": self.callstack,
|
||||
"id": self.id,
|
||||
"cwe": self.cwe,
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import exception
|
||||
from platformio.check.tools.clangtidy import ClangtidyCheckTool
|
||||
from platformio.check.tools.cppcheck import CppcheckCheckTool
|
||||
from platformio.check.tools.pvsstudio import PvsStudioCheckTool
|
||||
|
||||
|
||||
class CheckToolFactory:
|
||||
@staticmethod
|
||||
def new(tool, project_dir, config, envname, options):
|
||||
cls = None
|
||||
if tool == "cppcheck":
|
||||
cls = CppcheckCheckTool
|
||||
elif tool == "clangtidy":
|
||||
cls = ClangtidyCheckTool
|
||||
elif tool == "pvs-studio":
|
||||
cls = PvsStudioCheckTool
|
||||
else:
|
||||
raise exception.PlatformioException("Unknown check tool `%s`" % tool)
|
||||
return cls(project_dir, config, envname, options)
|
||||
@@ -1,267 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs, proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.helpers import load_build_metadata
|
||||
|
||||
|
||||
class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, project_dir, config, envname, options):
|
||||
self.config = config
|
||||
self.envname = envname
|
||||
self.options = options
|
||||
self.project_dir = project_dir
|
||||
self.cc_flags = []
|
||||
self.cxx_flags = []
|
||||
self.cpp_includes = []
|
||||
self.cpp_defines = []
|
||||
self.toolchain_defines = []
|
||||
self._tmp_files = []
|
||||
self.cc_path = None
|
||||
self.cxx_path = None
|
||||
self._defects = []
|
||||
self._on_defect_callback = None
|
||||
self._bad_input = False
|
||||
self._load_cpp_data()
|
||||
|
||||
# detect all defects by default
|
||||
if not self.options.get("severity"):
|
||||
self.options["severity"] = [
|
||||
DefectItem.SEVERITY_LOW,
|
||||
DefectItem.SEVERITY_MEDIUM,
|
||||
DefectItem.SEVERITY_HIGH,
|
||||
]
|
||||
# cast to severity by ids
|
||||
self.options["severity"] = [
|
||||
s if isinstance(s, int) else DefectItem.severity_to_int(s)
|
||||
for s in self.options["severity"]
|
||||
]
|
||||
|
||||
def _load_cpp_data(self):
|
||||
data = load_build_metadata(self.project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = data.get("cc_flags", [])
|
||||
self.cxx_flags = data.get("cxx_flags", [])
|
||||
self.cpp_includes = self._dump_includes(data.get("includes", {}))
|
||||
self.cpp_defines = data.get("defines", [])
|
||||
self.cc_path = data.get("cc_path")
|
||||
self.cxx_path = data.get("cxx_path")
|
||||
self.toolchain_defines = self._get_toolchain_defines()
|
||||
|
||||
def get_tool_dir(self, pkg_name):
|
||||
for spec in self.options["platform_packages"] or []:
|
||||
spec = PackageSpec(spec)
|
||||
if spec.name == pkg_name:
|
||||
return get_core_package_dir(pkg_name, spec=spec)
|
||||
return get_core_package_dir(pkg_name)
|
||||
|
||||
def get_flags(self, tool):
|
||||
result = []
|
||||
flags = self.options.get("flags") or []
|
||||
for flag in flags:
|
||||
if ":" not in flag or flag.startswith("-"):
|
||||
result.extend([f for f in flag.split(" ") if f])
|
||||
elif flag.startswith("%s:" % tool):
|
||||
result.extend([f for f in flag.split(":", 1)[1].split(" ") if f])
|
||||
|
||||
return result
|
||||
|
||||
def _get_toolchain_defines(self):
|
||||
def _extract_defines(language, includes_file):
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
defines = []
|
||||
cmd = "echo | %s -x %s %s %s -dM -E -" % (
|
||||
self.cc_path,
|
||||
language,
|
||||
" ".join(
|
||||
[f for f in build_flags if f.startswith(("-m", "-f", "-std"))]
|
||||
),
|
||||
includes_file,
|
||||
)
|
||||
result = proc.exec_command(cmd, shell=True)
|
||||
|
||||
if result["returncode"] != 0:
|
||||
click.echo("Warning: Failed to extract toolchain defines!")
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
defines.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
defines.append(tokens[1])
|
||||
|
||||
return defines
|
||||
|
||||
incflags_file = self._long_includes_hook(self.cpp_includes)
|
||||
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
||||
|
||||
def _create_tmp_file(self, data):
|
||||
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
|
||||
fp.write(data)
|
||||
self._tmp_files.append(fp.name)
|
||||
return fp.name
|
||||
|
||||
def _long_includes_hook(self, includes):
|
||||
data = []
|
||||
for inc in includes:
|
||||
data.append('-I"%s"' % fs.to_unix_path(inc))
|
||||
|
||||
return '@"%s"' % self._create_tmp_file(" ".join(data))
|
||||
|
||||
@staticmethod
|
||||
def _dump_includes(includes_map):
|
||||
result = []
|
||||
for includes in includes_map.values():
|
||||
for include in includes:
|
||||
if include not in result:
|
||||
result.append(include)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def is_flag_set(flag, flags):
|
||||
return any(flag in f for f in flags)
|
||||
|
||||
def get_defects(self):
|
||||
return self._defects
|
||||
|
||||
def configure_command(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def on_tool_output(self, line):
|
||||
line = self.tool_output_filter(line)
|
||||
if not line:
|
||||
return
|
||||
|
||||
defect = self.parse_defect(line)
|
||||
|
||||
if not isinstance(defect, DefectItem):
|
||||
if self.options.get("verbose"):
|
||||
click.echo(line)
|
||||
return
|
||||
|
||||
if defect.severity not in self.options["severity"]:
|
||||
return
|
||||
|
||||
self._defects.append(defect)
|
||||
if self._on_defect_callback:
|
||||
self._on_defect_callback(defect)
|
||||
|
||||
@staticmethod
|
||||
def tool_output_filter(line):
|
||||
return line
|
||||
|
||||
@staticmethod
|
||||
def parse_defect(raw_line):
|
||||
return raw_line
|
||||
|
||||
def clean_up(self):
|
||||
for f in self._tmp_files:
|
||||
if os.path.isfile(f):
|
||||
os.remove(f)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return cmd_result["returncode"] == 0
|
||||
|
||||
def execute_check_cmd(self, cmd):
|
||||
result = proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
if not self.is_check_successful(result):
|
||||
click.echo(
|
||||
"\nError: Failed to execute check command! Exited with code %d."
|
||||
% result["returncode"]
|
||||
)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(project_dir, src_filters):
|
||||
c_extension = (".c",)
|
||||
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
|
||||
header_extensions = (".h", ".hh", ".hpp", ".hxx")
|
||||
|
||||
result = {"c": [], "c++": [], "headers": []}
|
||||
|
||||
def _add_file(path):
|
||||
if path.endswith(header_extensions):
|
||||
result["headers"].append(os.path.abspath(path))
|
||||
elif path.endswith(c_extension):
|
||||
result["c"].append(os.path.abspath(path))
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.abspath(path))
|
||||
|
||||
src_filters = normalize_src_filters(src_filters)
|
||||
for f in fs.match_src_files(project_dir, src_filters):
|
||||
_add_file(f)
|
||||
|
||||
return result
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
cmd = self.configure_command()
|
||||
if cmd:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
else:
|
||||
if self.options.get("verbose"):
|
||||
click.echo("Error: Couldn't configure command")
|
||||
self._bad_input = True
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def normalize_src_filters(src_filters):
|
||||
def _normalize(src_filters):
|
||||
return (
|
||||
src_filters
|
||||
if src_filters.startswith(("+<", "-<"))
|
||||
else "+<%s>" % src_filters
|
||||
)
|
||||
|
||||
if isinstance(src_filters, (list, tuple)):
|
||||
return " ".join([_normalize(f) for f in src_filters])
|
||||
|
||||
return _normalize(src_filters)
|
||||
@@ -1,90 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
from os.path import join
|
||||
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
|
||||
|
||||
class ClangtidyCheckTool(CheckToolBase):
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if not self.options.get("verbose") and "[clang-diagnostic-error]" in line:
|
||||
return ""
|
||||
|
||||
if "[CommonOptionsParser]" in line:
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
if any(d in line for d in ("note: ", "error: ", "warning: ")):
|
||||
return line
|
||||
|
||||
return ""
|
||||
|
||||
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
|
||||
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line)
|
||||
if not match:
|
||||
return raw_line
|
||||
|
||||
file_, line, column, category, message, defect_id = match.groups()
|
||||
|
||||
severity = DefectItem.SEVERITY_LOW
|
||||
if category == "error":
|
||||
severity = DefectItem.SEVERITY_HIGH
|
||||
elif category == "warning":
|
||||
severity = DefectItem.SEVERITY_MEDIUM
|
||||
|
||||
return DefectItem(severity, category, message, file_, line, column, defect_id)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Note: Clang-Tidy returns 1 for not critical compilation errors,
|
||||
# so 0 and 1 are only acceptable values
|
||||
return cmd_result["returncode"] < 2
|
||||
|
||||
def configure_command(self):
|
||||
tool_path = join(self.get_tool_dir("tool-clangtidy"), "clang-tidy")
|
||||
|
||||
cmd = [tool_path, "--quiet"]
|
||||
flags = self.get_flags("clangtidy")
|
||||
if not (
|
||||
self.is_flag_set("--checks", flags) or self.is_flag_set("--config", flags)
|
||||
):
|
||||
cmd.append("--checks=*")
|
||||
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
|
||||
src_files = []
|
||||
for items in project_files.values():
|
||||
src_files.extend(items)
|
||||
|
||||
cmd.extend(flags + src_files + ["--"])
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines["c++"]]
|
||||
)
|
||||
|
||||
includes = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
includes.append(inc)
|
||||
|
||||
cmd.extend(["-I%s" % inc for inc in includes])
|
||||
|
||||
return cmd
|
||||
@@ -1,271 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
|
||||
|
||||
class CppcheckCheckTool(CheckToolBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._field_delimiter = "<&PIO&>"
|
||||
self._buffer = ""
|
||||
self.defect_fields = [
|
||||
"severity",
|
||||
"message",
|
||||
"file",
|
||||
"line",
|
||||
"column",
|
||||
"callstack",
|
||||
"cwe",
|
||||
"id",
|
||||
]
|
||||
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if (
|
||||
not self.options.get("verbose")
|
||||
and "--suppress=unmatchedSuppression:" in line
|
||||
):
|
||||
return ""
|
||||
|
||||
if any(
|
||||
msg in line
|
||||
for msg in (
|
||||
"No C or C++ source files found",
|
||||
"unrecognized command line option",
|
||||
"there was an internal error",
|
||||
)
|
||||
):
|
||||
self._bad_input = True
|
||||
|
||||
return line
|
||||
|
||||
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
|
||||
if self._field_delimiter not in raw_line:
|
||||
return None
|
||||
|
||||
self._buffer += raw_line
|
||||
if any(f not in self._buffer for f in self.defect_fields):
|
||||
return None
|
||||
|
||||
args = {}
|
||||
for field in self._buffer.split(self._field_delimiter):
|
||||
field = field.strip().replace('"', "")
|
||||
name, value = field.split("=", 1)
|
||||
args[name] = value
|
||||
|
||||
args["category"] = args["severity"]
|
||||
if args["severity"] == "error":
|
||||
args["severity"] = DefectItem.SEVERITY_HIGH
|
||||
elif args["severity"] == "warning":
|
||||
args["severity"] = DefectItem.SEVERITY_MEDIUM
|
||||
else:
|
||||
args["severity"] = DefectItem.SEVERITY_LOW
|
||||
|
||||
# Skip defects found in third-party software, but keep in mind that such defects
|
||||
# might break checking process so defects from project files are not reported
|
||||
breaking_defect_ids = ("preprocessorErrorDirective", "syntaxError")
|
||||
if (
|
||||
args.get("file", "")
|
||||
.lower()
|
||||
.startswith(self.config.get("platformio", "packages_dir").lower())
|
||||
):
|
||||
if args["id"] in breaking_defect_ids:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(
|
||||
"Error: Found a breaking defect '%s' in %s:%s\n"
|
||||
"Please note: check results might not be valid!\n"
|
||||
"Try adding --skip-packages"
|
||||
% (args.get("message"), args.get("file"), args.get("line"))
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
self._buffer = ""
|
||||
return None
|
||||
|
||||
self._buffer = ""
|
||||
return DefectItem(**args)
|
||||
|
||||
def configure_command(self, language, src_file): # pylint: disable=arguments-differ
|
||||
tool_path = os.path.join(self.get_tool_dir("tool-cppcheck"), "cppcheck")
|
||||
|
||||
cmd = [
|
||||
tool_path,
|
||||
"--addon-python=%s" % proc.get_pythonexe_path(),
|
||||
"--error-exitcode=3",
|
||||
"--verbose" if self.options.get("verbose") else "--quiet",
|
||||
]
|
||||
|
||||
cmd.append(
|
||||
'--template="%s"'
|
||||
% self._field_delimiter.join(
|
||||
["{0}={{{0}}}".format(f) for f in self.defect_fields]
|
||||
)
|
||||
)
|
||||
|
||||
flags = self.get_flags("cppcheck")
|
||||
if not flags:
|
||||
# by default user can suppress reporting individual defects
|
||||
# directly in code // cppcheck-suppress warningID
|
||||
cmd.append("--inline-suppr")
|
||||
if not self.is_flag_set("--platform", flags):
|
||||
cmd.append("--platform=unspecified")
|
||||
if not self.is_flag_set("--enable", flags):
|
||||
enabled_checks = [
|
||||
"warning",
|
||||
"style",
|
||||
"performance",
|
||||
"portability",
|
||||
"unusedFunction",
|
||||
]
|
||||
cmd.append("--enable=%s" % ",".join(enabled_checks))
|
||||
|
||||
if not self.is_flag_set("--language", flags):
|
||||
cmd.append("--language=" + language)
|
||||
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
|
||||
if not self.is_flag_set("--std", flags):
|
||||
# Try to guess the standard version from the build flags
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
cmd.append("-" + self.convert_language_standard(flag))
|
||||
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
|
||||
)
|
||||
|
||||
cmd.extend(flags)
|
||||
|
||||
cmd.extend(
|
||||
"--include=" + inc
|
||||
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
|
||||
)
|
||||
cmd.append("--includes-file=%s" % self._generate_inc_file())
|
||||
cmd.append('"%s"' % src_file)
|
||||
|
||||
return cmd
|
||||
|
||||
@staticmethod
|
||||
def get_forced_includes(build_flags, includes):
|
||||
def _extract_filepath(flag, include_options, build_flags):
|
||||
path = ""
|
||||
for option in include_options:
|
||||
if not flag.startswith(option):
|
||||
continue
|
||||
if flag.split(option)[1].strip():
|
||||
path = flag.split(option)[1].strip()
|
||||
elif build_flags.index(flag) + 1 < len(build_flags):
|
||||
path = build_flags[build_flags.index(flag) + 1]
|
||||
return path
|
||||
|
||||
def _search_include_dir(filepath, include_paths):
|
||||
for inc_path in include_paths:
|
||||
path = os.path.join(inc_path, filepath)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
return ""
|
||||
|
||||
result = []
|
||||
include_options = ("-include", "-imacros")
|
||||
for f in build_flags:
|
||||
if f.startswith(include_options):
|
||||
filepath = _extract_filepath(f, include_options, build_flags)
|
||||
if not os.path.isabs(filepath):
|
||||
filepath = _search_include_dir(filepath, includes)
|
||||
if os.path.isfile(filepath):
|
||||
result.append(filepath)
|
||||
|
||||
return result
|
||||
|
||||
def _generate_src_file(self, src_files):
|
||||
return self._create_tmp_file("\n".join(src_files))
|
||||
|
||||
def _generate_inc_file(self):
|
||||
result = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
result.append(inc)
|
||||
return self._create_tmp_file("\n".join(result))
|
||||
|
||||
def clean_up(self):
|
||||
super().clean_up()
|
||||
|
||||
# delete temporary dump files generated by addons
|
||||
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
|
||||
return
|
||||
|
||||
for files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).values():
|
||||
for f in files:
|
||||
dump_file = f + ".dump"
|
||||
if os.path.isfile(dump_file):
|
||||
os.remove(dump_file)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Cppcheck is configured to return '3' if a defect is found
|
||||
return cmd_result["returncode"] in (0, 3)
|
||||
|
||||
@staticmethod
|
||||
def convert_language_standard(flag):
|
||||
cpp_standards_map = {
|
||||
"0x": "11",
|
||||
"1y": "14",
|
||||
"1z": "17",
|
||||
"2a": "20",
|
||||
}
|
||||
|
||||
standard = flag[-2:]
|
||||
# Note: GNU extensions are not supported and converted to regular standards
|
||||
return flag.replace("gnu", "c").replace(
|
||||
standard, cpp_standards_map.get(standard, standard)
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
src_files_scope = ("c", "c++")
|
||||
if not any(project_files[t] for t in src_files_scope):
|
||||
click.echo("Error: Nothing to check.")
|
||||
return True
|
||||
|
||||
for scope, files in project_files.items():
|
||||
if scope not in src_files_scope:
|
||||
continue
|
||||
for src_file in files:
|
||||
cmd = self.configure_command(scope, src_file)
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
@@ -1,251 +0,0 @@
|
||||
# Copyright (c) 2020-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from xml.etree.ElementTree import fromstring
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._tmp_dir = tempfile.mkdtemp(prefix="piocheck")
|
||||
self._tmp_preprocessed_file = self._generate_tmp_file_path() + ".i"
|
||||
self._tmp_output_file = self._generate_tmp_file_path() + ".pvs"
|
||||
self._tmp_cfg_file = self._generate_tmp_file_path() + ".cfg"
|
||||
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
|
||||
self.tool_path = os.path.join(
|
||||
self.get_tool_dir("tool-pvs-studio"),
|
||||
"x64" if IS_WINDOWS else "bin",
|
||||
"pvs-studio",
|
||||
)
|
||||
|
||||
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"exclude-path = "
|
||||
+ self.config.get("platformio", "packages_dir").replace("\\", "/")
|
||||
)
|
||||
|
||||
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
" ".join(
|
||||
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
||||
)
|
||||
)
|
||||
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if any(
|
||||
err_msg in line.lower()
|
||||
for err_msg in (
|
||||
"license was not entered",
|
||||
"license information is incorrect",
|
||||
)
|
||||
):
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
def _process_defects(self, defects):
|
||||
for defect in defects:
|
||||
if not isinstance(defect, DefectItem):
|
||||
return
|
||||
if defect.severity not in self.options["severity"]:
|
||||
return
|
||||
self._defects.append(defect)
|
||||
if self._on_defect_callback:
|
||||
self._on_defect_callback(defect)
|
||||
|
||||
def _demangle_report(self, output_file):
|
||||
converter_tool = os.path.join(
|
||||
self.get_tool_dir("tool-pvs-studio"),
|
||||
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
|
||||
)
|
||||
|
||||
cmd = (
|
||||
converter_tool,
|
||||
"-t",
|
||||
"xml",
|
||||
output_file,
|
||||
"-m",
|
||||
"cwe",
|
||||
"-m",
|
||||
"misra",
|
||||
"-a",
|
||||
# Enable all possible analyzers and defect levels
|
||||
"GA:1,2,3;64:1,2,3;OP:1,2,3;CS:1,2,3;MISRA:1,2,3",
|
||||
"--cerr",
|
||||
)
|
||||
|
||||
result = proc.exec_command(cmd)
|
||||
if result["returncode"] != 0:
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result["err"]
|
||||
|
||||
def parse_defects(self, output_file):
|
||||
defects = []
|
||||
|
||||
report = self._demangle_report(output_file)
|
||||
if not report:
|
||||
self._bad_input = True
|
||||
return []
|
||||
|
||||
try:
|
||||
defects_data = fromstring(report)
|
||||
except: # pylint: disable=bare-except
|
||||
click.echo("Error: Couldn't decode generated report!")
|
||||
self._bad_input = True
|
||||
return []
|
||||
|
||||
for table in defects_data.iter("PVS-Studio_Analysis_Log"):
|
||||
message = table.find("Message").text
|
||||
category = table.find("ErrorType").text
|
||||
line = table.find("Line").text
|
||||
file_ = table.find("File").text
|
||||
defect_id = table.find("ErrorCode").text
|
||||
cwe = table.find("CWECode")
|
||||
cwe_id = None
|
||||
if cwe is not None:
|
||||
cwe_id = cwe.text.lower().replace("cwe-", "")
|
||||
misra = table.find("MISRA")
|
||||
if misra is not None:
|
||||
message += " [%s]" % misra.text
|
||||
|
||||
severity = DefectItem.SEVERITY_LOW
|
||||
if category == "error":
|
||||
severity = DefectItem.SEVERITY_HIGH
|
||||
elif category == "warning":
|
||||
severity = DefectItem.SEVERITY_MEDIUM
|
||||
|
||||
defects.append(
|
||||
DefectItem(
|
||||
severity, category, message, file_, line, id=defect_id, cwe=cwe_id
|
||||
)
|
||||
)
|
||||
|
||||
return defects
|
||||
|
||||
def configure_command(self, src_file): # pylint: disable=arguments-differ
|
||||
if os.path.isfile(self._tmp_output_file):
|
||||
os.remove(self._tmp_output_file)
|
||||
|
||||
if not os.path.isfile(self._tmp_preprocessed_file):
|
||||
click.echo("Error: Missing preprocessed file for '%s'" % src_file)
|
||||
return ""
|
||||
|
||||
cmd = [
|
||||
self.tool_path,
|
||||
"--skip-cl-exe",
|
||||
"yes",
|
||||
"--language",
|
||||
"C" if src_file.endswith(".c") else "C++",
|
||||
"--preprocessor",
|
||||
"gcc",
|
||||
"--cfg",
|
||||
self._tmp_cfg_file,
|
||||
"--source-file",
|
||||
src_file,
|
||||
"--i-file",
|
||||
self._tmp_preprocessed_file,
|
||||
"--output-file",
|
||||
self._tmp_output_file,
|
||||
]
|
||||
|
||||
flags = self.get_flags("pvs-studio")
|
||||
if not self.is_flag_set("--platform", flags):
|
||||
cmd.append("--platform=arm")
|
||||
cmd.extend(flags)
|
||||
|
||||
return cmd
|
||||
|
||||
def _generate_tmp_file_path(self):
|
||||
# pylint: disable=protected-access
|
||||
return os.path.join(self._tmp_dir, next(tempfile._get_candidate_names()))
|
||||
|
||||
def _prepare_preprocessed_file(self, src_file):
|
||||
if os.path.isfile(self._tmp_preprocessed_file):
|
||||
os.remove(self._tmp_preprocessed_file)
|
||||
|
||||
flags = self.cxx_flags
|
||||
compiler = self.cxx_path
|
||||
if src_file.endswith(".c"):
|
||||
flags = self.cc_flags
|
||||
compiler = self.cc_path
|
||||
|
||||
cmd = [
|
||||
compiler,
|
||||
'"%s"' % src_file,
|
||||
"-E",
|
||||
"-o",
|
||||
'"%s"' % self._tmp_preprocessed_file,
|
||||
]
|
||||
cmd.extend([f for f in flags if f])
|
||||
cmd.extend(['"-D%s"' % d.replace('"', '\\"') for d in self.cpp_defines])
|
||||
cmd.append('@"%s"' % self._tmp_cmd_file)
|
||||
|
||||
# Explicitly specify C++ as the language used in .ino files
|
||||
if src_file.endswith(".ino"):
|
||||
cmd.insert(1, "-xc++")
|
||||
|
||||
result = proc.exec_command(" ".join(cmd), shell=True)
|
||||
if result["returncode"] != 0 or result["err"]:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
def clean_up(self):
|
||||
super().clean_up()
|
||||
if os.path.isdir(self._tmp_dir):
|
||||
shutil.rmtree(self._tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return (
|
||||
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).items():
|
||||
if scope not in ("c", "c++"):
|
||||
continue
|
||||
for src_file in files:
|
||||
self._prepare_preprocessed_file(src_file)
|
||||
cmd = self.configure_command(src_file)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
|
||||
result = self.execute_check_cmd(cmd)
|
||||
if result["returncode"] != 0:
|
||||
continue
|
||||
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
@@ -1,110 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.Group):
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._pio_root_path = Path(__file__).parent
|
||||
self._pio_cmd_aliases = dict(package="pkg")
|
||||
|
||||
def _find_pio_commands(self):
|
||||
def _to_module_path(p):
|
||||
return (
|
||||
"platformio." + ".".join(p.relative_to(self._pio_root_path).parts)[:-3]
|
||||
)
|
||||
|
||||
result = {}
|
||||
for p in self._pio_root_path.rglob("cli.py"):
|
||||
# skip this module
|
||||
if p.parent == self._pio_root_path:
|
||||
continue
|
||||
cmd_name = p.parent.name
|
||||
result[self._pio_cmd_aliases.get(cmd_name, cmd_name)] = _to_module_path(p)
|
||||
|
||||
# find legacy commands
|
||||
for p in (self._pio_root_path / "commands").iterdir():
|
||||
if p.name.startswith("_"):
|
||||
continue
|
||||
if (p / "command.py").is_file():
|
||||
result[p.name] = _to_module_path(p / "command.py")
|
||||
elif p.name.endswith(".py"):
|
||||
result[p.name[:-3]] = _to_module_path(p)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def in_silence():
|
||||
args = PlatformioCLI.leftover_args
|
||||
return args and any(
|
||||
[
|
||||
args[0] == "debug" and "--interpreter" in " ".join(args),
|
||||
args[0] == "upgrade",
|
||||
"--json-output" in args,
|
||||
"--version" in args,
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def reveal_cmd_path_args(cls, ctx):
|
||||
result = []
|
||||
group = ctx.command
|
||||
args = cls.leftover_args[::]
|
||||
while args:
|
||||
cmd_name = args.pop(0)
|
||||
next_group = group.get_command(ctx, cmd_name)
|
||||
if next_group:
|
||||
group = next_group
|
||||
result.append(cmd_name)
|
||||
if not hasattr(group, "get_command"):
|
||||
break
|
||||
return result
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
|
||||
return super().invoke(ctx)
|
||||
|
||||
def list_commands(self, ctx): # pylint: disable=unused-argument
|
||||
return sorted(list(self._find_pio_commands()))
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
commands = self._find_pio_commands()
|
||||
if cmd_name not in commands:
|
||||
return self._handle_obsolate_command(ctx, cmd_name)
|
||||
module = importlib.import_module(commands[cmd_name])
|
||||
return getattr(module, "cli")
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(ctx, cmd_name):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if cmd_name == "init":
|
||||
from platformio.project.commands.init import project_init_cmd
|
||||
|
||||
return project_init_cmd
|
||||
|
||||
if cmd_name == "package":
|
||||
from platformio.package.cli import cli
|
||||
|
||||
return cli
|
||||
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
72
platformio/commands/account.py
Normal file
72
platformio/commands/account.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.group("account", short_help="Manage PIO Account")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("register", short_help="Create new PIO Account")
|
||||
@click.option("-u", "--username")
|
||||
def account_register(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("login", short_help="Log in to PIO Account")
|
||||
@click.option("-u", "--username")
|
||||
@click.option("-p", "--password")
|
||||
def account_login(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("logout", short_help="Log out of PIO Account")
|
||||
def account_logout():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("password", short_help="Change password")
|
||||
@click.option("--old-password")
|
||||
@click.option("--new-password")
|
||||
def account_password(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password")
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("forgot", short_help="Forgot password")
|
||||
@click.option("-u", "--username")
|
||||
def account_forgot(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("show", short_help="PIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
@@ -13,16 +13,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import shutil
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.managers.platform import PlatformManager
|
||||
|
||||
|
||||
@click.command("boards", short_help="Board Explorer")
|
||||
@click.command("boards", short_help="Embedded Board Explorer")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--installed", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@@ -32,54 +29,64 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
|
||||
grpboards = {}
|
||||
for board in _get_boards(installed):
|
||||
if query and not any(
|
||||
query.lower() in str(board.get(k, "")).lower()
|
||||
for k in ("id", "name", "mcu", "vendor", "platform", "frameworks")
|
||||
):
|
||||
if query and query.lower() not in json.dumps(board).lower():
|
||||
continue
|
||||
if board["platform"] not in grpboards:
|
||||
grpboards[board["platform"]] = []
|
||||
grpboards[board["platform"]].append(board)
|
||||
if board['platform'] not in grpboards:
|
||||
grpboards[board['platform']] = []
|
||||
grpboards[board['platform']].append(board)
|
||||
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for platform, boards in sorted(grpboards.items()):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
click.echo("=" * terminal_width)
|
||||
click.echo("-" * terminal_width)
|
||||
print_boards(boards)
|
||||
return True
|
||||
|
||||
|
||||
def print_boards(boards):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
BOARDLIST_TPL = ("{type:<30} {mcu:<14} {frequency:<8} "
|
||||
" {flash:<7} {ram:<6} {name}")
|
||||
click.echo(
|
||||
tabulate(
|
||||
[
|
||||
(
|
||||
click.style(b["id"], fg="cyan"),
|
||||
b["mcu"],
|
||||
"%dMHz" % (b["fcpu"] / 1000000),
|
||||
fs.humanize_file_size(b["rom"]),
|
||||
fs.humanize_file_size(b["ram"]),
|
||||
b["name"],
|
||||
)
|
||||
for b in boards
|
||||
],
|
||||
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"],
|
||||
)
|
||||
)
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style("ID", fg="cyan"),
|
||||
mcu="MCU",
|
||||
frequency="Frequency",
|
||||
flash="Flash",
|
||||
ram="RAM",
|
||||
name="Name"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for board in boards:
|
||||
ram_size = board['ram']
|
||||
if ram_size >= 1024:
|
||||
if ram_size % 1024:
|
||||
ram_size = "%.1fkB" % (ram_size / 1024.0)
|
||||
else:
|
||||
ram_size = "%dkB" % (ram_size / 1024)
|
||||
else:
|
||||
ram_size = "%dB" % ram_size
|
||||
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style(board['id'], fg="cyan"),
|
||||
mcu=board['mcu'],
|
||||
frequency="%dMhz" % (board['fcpu'] / 1000000),
|
||||
flash="%dkB" % (board['rom'] / 1024),
|
||||
ram=ram_size,
|
||||
name=board['name']))
|
||||
|
||||
|
||||
def _get_boards(installed=False):
|
||||
pm = PlatformPackageManager()
|
||||
pm = PlatformManager()
|
||||
return pm.get_installed_boards() if installed else pm.get_all_boards()
|
||||
|
||||
|
||||
def _print_boards_json(query, installed=False):
|
||||
result = []
|
||||
for board in _get_boards(installed):
|
||||
if query:
|
||||
search_data = "%s %s" % (board["id"], json.dumps(board).lower())
|
||||
search_data = "%s %s" % (board['id'], json.dumps(board).lower())
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
result.append(board)
|
||||
|
||||
@@ -12,18 +12,19 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from glob import glob
|
||||
from os import getenv, makedirs, remove
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile, join
|
||||
from shutil import copyfile, copytree
|
||||
from tempfile import mkdtemp
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio import app, util
|
||||
from platformio.commands.init import cli as cmd_init
|
||||
from platformio.commands.init import validate_boards
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
from platformio.exception import CIBuildEnvsEmpty
|
||||
from platformio.project.commands.init import project_init_cmd, validate_boards
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.run.cli import cli as cmd_run
|
||||
|
||||
|
||||
def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@@ -31,71 +32,69 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
value = list(value)
|
||||
for i, p in enumerate(value):
|
||||
if p.startswith("~"):
|
||||
value[i] = fs.expanduser(p)
|
||||
value[i] = os.path.abspath(value[i])
|
||||
if not glob.glob(value[i], recursive=True):
|
||||
value[i] = expanduser(p)
|
||||
value[i] = abspath(value[i])
|
||||
if not glob(value[i]):
|
||||
invalid_path = p
|
||||
break
|
||||
try:
|
||||
assert invalid_path is None
|
||||
return value
|
||||
except AssertionError as exc:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path) from exc
|
||||
except AssertionError:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path)
|
||||
|
||||
|
||||
@click.command("ci", short_help="Continuous Integration")
|
||||
@click.argument("src", nargs=-1, callback=validate_path)
|
||||
@click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
|
||||
@click.option(
|
||||
"-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
|
||||
@click.option("--exclude", multiple=True)
|
||||
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=tempfile.mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
default=mkdtemp,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@click.option(
|
||||
"-c",
|
||||
"-C",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, too-many-branches
|
||||
ctx,
|
||||
src,
|
||||
lib,
|
||||
exclude,
|
||||
board,
|
||||
build_dir,
|
||||
keep_build_dir,
|
||||
project_conf,
|
||||
project_option,
|
||||
environments,
|
||||
verbose,
|
||||
):
|
||||
if not src and os.getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
def cli( # pylint: disable=too-many-arguments
|
||||
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
|
||||
project_option, verbose):
|
||||
|
||||
if not src and getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src:
|
||||
raise click.BadParameter("Missing argument 'src'")
|
||||
|
||||
try:
|
||||
if not keep_build_dir and os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
app.set_session_var("force_option", True)
|
||||
_clean_dir(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||
for dir_name, patterns in dict(lib=lib, src=src).iteritems():
|
||||
if not patterns:
|
||||
continue
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += glob.glob(p, recursive=True)
|
||||
_copy_contents(os.path.join(build_dir, dir_name), contents)
|
||||
contents += glob(p)
|
||||
_copy_contents(join(build_dir, dir_name), contents)
|
||||
|
||||
if project_conf and os.path.isfile(project_conf):
|
||||
if project_conf and isfile(project_conf):
|
||||
_copy_project_conf(build_dir, project_conf)
|
||||
elif not board:
|
||||
raise CIBuildEnvsEmpty()
|
||||
@@ -105,70 +104,66 @@ def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, to
|
||||
|
||||
# initialise project
|
||||
ctx.invoke(
|
||||
project_init_cmd,
|
||||
cmd_init,
|
||||
project_dir=build_dir,
|
||||
boards=board,
|
||||
project_options=project_option,
|
||||
)
|
||||
board=board,
|
||||
project_option=project_option)
|
||||
|
||||
# process project
|
||||
ctx.invoke(
|
||||
cmd_run, project_dir=build_dir, environment=environments, verbose=verbose
|
||||
)
|
||||
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
||||
finally:
|
||||
if not keep_build_dir:
|
||||
fs.rmtree(build_dir)
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents): # pylint: disable=too-many-branches
|
||||
def _clean_dir(dirpath):
|
||||
util.rmtree_(dirpath)
|
||||
makedirs(dirpath)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents):
|
||||
items = {"dirs": set(), "files": set()}
|
||||
|
||||
for path in contents:
|
||||
if os.path.isdir(path):
|
||||
items["dirs"].add(path)
|
||||
elif os.path.isfile(path):
|
||||
items["files"].add(path)
|
||||
if isdir(path):
|
||||
items['dirs'].add(path)
|
||||
elif isfile(path):
|
||||
items['files'].add(path)
|
||||
|
||||
dst_dir_name = os.path.basename(dst_dir)
|
||||
dst_dir_name = basename(dst_dir)
|
||||
|
||||
if dst_dir_name == "src" and len(items["dirs"]) == 1:
|
||||
if not os.path.isdir(dst_dir):
|
||||
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||
if dst_dir_name == "src" and len(items['dirs']) == 1:
|
||||
copytree(list(items['dirs']).pop(), dst_dir, symlinks=True)
|
||||
else:
|
||||
if not os.path.isdir(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
for d in items["dirs"]:
|
||||
src_dst_dir = os.path.join(dst_dir, os.path.basename(d))
|
||||
if not os.path.isdir(src_dst_dir):
|
||||
shutil.copytree(d, src_dst_dir, symlinks=True)
|
||||
makedirs(dst_dir)
|
||||
for d in items['dirs']:
|
||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
||||
|
||||
if not items["files"]:
|
||||
if not items['files']:
|
||||
return
|
||||
|
||||
if dst_dir_name == "lib":
|
||||
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
|
||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
||||
|
||||
for f in items["files"]:
|
||||
dst_file = os.path.join(dst_dir, os.path.basename(f))
|
||||
if f == dst_file:
|
||||
continue
|
||||
shutil.copyfile(f, dst_file)
|
||||
for f in items['files']:
|
||||
copyfile(f, join(dst_dir, basename(f)))
|
||||
|
||||
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
|
||||
contents += glob(join(util.glob_escape(dst_dir), p))
|
||||
for path in contents:
|
||||
path = os.path.abspath(path)
|
||||
if os.path.isdir(path):
|
||||
fs.rmtree(path)
|
||||
elif os.path.isfile(path):
|
||||
os.remove(path)
|
||||
path = abspath(path)
|
||||
if isdir(path):
|
||||
util.rmtree_(path)
|
||||
elif isfile(path):
|
||||
remove(path)
|
||||
|
||||
|
||||
def _copy_project_conf(build_dir, project_conf):
|
||||
config = ProjectConfig(project_conf, parse_extra=False)
|
||||
config = util.load_project_config(project_conf)
|
||||
if config.has_section("platformio"):
|
||||
config.remove_section("platformio")
|
||||
config.save(os.path.join(build_dir, "platformio.ini"))
|
||||
with open(join(build_dir, "platformio.ini"), "w") as fp:
|
||||
config.write(fp)
|
||||
|
||||
42
platformio/commands/debug.py
Normal file
42
platformio/commands/debug.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="PIO Unified Debugger")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
174
platformio/commands/device.py
Normal file
174
platformio/commands/device.py
Normal file
@@ -0,0 +1,174 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
@click.group(short_help="Monitor device or list existing")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("list", short_help="List devices")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list(json_output):
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(util.get_serialports()))
|
||||
return
|
||||
|
||||
for item in util.get_serialports():
|
||||
click.secho(item['port'], fg="cyan")
|
||||
click.echo("-" * len(item['port']))
|
||||
click.echo("Hardware ID: %s" % item['hwid'])
|
||||
click.echo("Description: %s" % item['description'])
|
||||
click.echo("")
|
||||
|
||||
|
||||
@cli.command("monitor", short_help="Monitor device (Serial)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option(
|
||||
"--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option(
|
||||
"--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, resolve_path=True))
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment")
|
||||
def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
try:
|
||||
project_options = get_project_options(kwargs['project_dir'],
|
||||
kwargs['environment'])
|
||||
monitor_options = {k: v for k, v in project_options or []}
|
||||
if monitor_options:
|
||||
for k in ("port", "baud", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if kwargs[k] is None and k2 in monitor_options:
|
||||
kwargs[k] = monitor_options[k2]
|
||||
if k != "port":
|
||||
kwargs[k] = int(kwargs[k])
|
||||
except exception.NotPlatformIOProject:
|
||||
pass
|
||||
|
||||
if not kwargs['port']:
|
||||
ports = util.get_serialports(filter_hwid=True)
|
||||
if len(ports) == 1:
|
||||
kwargs['port'] = ports[0]['port']
|
||||
|
||||
sys.argv = ["monitor"]
|
||||
for k, v in kwargs.iteritems():
|
||||
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
if isinstance(v, bool):
|
||||
if v:
|
||||
sys.argv.append(k)
|
||||
elif isinstance(v, tuple):
|
||||
for i in v:
|
||||
sys.argv.extend([k, i])
|
||||
else:
|
||||
sys.argv.extend([k, str(v)])
|
||||
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs['port'],
|
||||
default_baudrate=kwargs['baud'] or 9600,
|
||||
default_rts=kwargs['rts'],
|
||||
default_dtr=kwargs['dtr'])
|
||||
except Exception as e:
|
||||
raise exception.MinitermException(e)
|
||||
|
||||
|
||||
def get_project_options(project_dir, environment):
|
||||
config = util.load_project_config(project_dir)
|
||||
if not config.sections():
|
||||
return
|
||||
|
||||
known_envs = [s[4:] for s in config.sections() if s.startswith("env:")]
|
||||
if environment:
|
||||
if environment in known_envs:
|
||||
return config.items("env:%s" % environment)
|
||||
raise exception.UnknownEnvNames(environment, ", ".join(known_envs))
|
||||
|
||||
if not known_envs:
|
||||
return
|
||||
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default and env_default in known_envs:
|
||||
return config.items("env:%s" % env_default)
|
||||
|
||||
return config.items("env:%s" % known_envs[0])
|
||||
@@ -12,19 +12,16 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio.device.list.command import device_list_cmd
|
||||
from platformio.device.monitor.command import device_monitor_cmd
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.group(
|
||||
"device",
|
||||
commands=[
|
||||
device_list_cmd,
|
||||
device_monitor_cmd,
|
||||
],
|
||||
short_help="Device manager & Serial/Socket monitor",
|
||||
)
|
||||
def cli():
|
||||
pass
|
||||
@click.command("home", short_help="PIO Home")
|
||||
@click.option(
|
||||
"--port", "-p", type=int, default=8008, help="HTTP port, default=8008")
|
||||
@click.option("--no-open", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
134
platformio/commands/init.py
Normal file
134
platformio/commands/init.py
Normal file
@@ -0,0 +1,134 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.managers.platform import PlatformManager
|
||||
from platformio.project.generator import ProjectGenerator
|
||||
|
||||
|
||||
def validate_boards(ctx, param, value): # pylint: disable=unused-argument
|
||||
pm = PlatformManager()
|
||||
for id_ in value:
|
||||
try:
|
||||
pm.board_config(id_)
|
||||
except exception.UnknownBoard:
|
||||
raise click.BadParameter(
|
||||
"`%s`. Please search for board ID using `platformio boards` "
|
||||
"command" % id_)
|
||||
return value
|
||||
|
||||
|
||||
@click.command(
|
||||
"init", short_help="Initialize PlatformIO project or update existing")
|
||||
@click.option(
|
||||
"--project-dir",
|
||||
"-d",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option(
|
||||
"-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option("-p", "--platform")
|
||||
@click.option("-f", "--framework")
|
||||
@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ide()))
|
||||
@click.option("--vcs", type=click.Choice(ProjectGenerator.get_supported_vcs()))
|
||||
@click.option("--ci", type=click.Choice(ProjectGenerator.get_supported_ci()))
|
||||
@click.option(
|
||||
"-L", "--list-templates", help="List available source code templates")
|
||||
@click.option("-t", "--template")
|
||||
@click.option("-T", "--template-var", multiple=True)
|
||||
@click.option("--env-prefix", default="")
|
||||
@click.option("-E", "--env-option", multiple=True)
|
||||
@click.option(
|
||||
"-O",
|
||||
"--project-option",
|
||||
multiple=True,
|
||||
help="Deprecated. Use `--env-option` instead")
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-locals
|
||||
project_dir, board, platform, framework, ide, vcs, ci, list_templates,
|
||||
template, template_var, env_prefix, env_option, project_option,
|
||||
silent):
|
||||
if not silent:
|
||||
if project_dir == getcwd():
|
||||
click.secho(
|
||||
"\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho(
|
||||
"will be used for project.\n"
|
||||
"You can specify another project directory via\n"
|
||||
"`platformio init -d %PATH_TO_THE_PROJECT_DIR%` command.",
|
||||
fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo("The next files/directories have been created in %s" %
|
||||
click.style(project_dir, fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
click.echo(
|
||||
"%s - Put your source files here" % click.style("src", fg="cyan"))
|
||||
click.echo("%s - Put here project specific (private) libraries" %
|
||||
click.style("lib", fg="cyan"))
|
||||
|
||||
pg = ProjectGenerator(
|
||||
project_dir,
|
||||
dict(
|
||||
boards=list(board),
|
||||
platform=platform,
|
||||
framework=framework,
|
||||
ide=ide,
|
||||
template=template,
|
||||
template_vars=list(template_var),
|
||||
env_prefix=env_prefix,
|
||||
env_options=list(env_option) + list(project_option),
|
||||
vcs=vcs,
|
||||
ci=ci))
|
||||
|
||||
if ide:
|
||||
# install development platform before (show progress)
|
||||
pm = PlatformManager()
|
||||
for name in pg.project_config.get_env_names():
|
||||
platform = pg.project_config.env_get(name, "platform")
|
||||
framework = pg.project_config.env_get(name, "framework")
|
||||
if not platform:
|
||||
continue
|
||||
if framework:
|
||||
pm.install(
|
||||
platform,
|
||||
with_packages=["framework-%s" % framework],
|
||||
silent=True)
|
||||
else:
|
||||
pm.install(platform, silent=True)
|
||||
|
||||
pg.generate()
|
||||
|
||||
if not silent:
|
||||
click.secho(
|
||||
"\nProject has been successfully initialized!\nUseful commands:\n"
|
||||
"`platformio run` - process/build project from the current "
|
||||
"directory\n"
|
||||
"`platformio run --target upload` or `platformio run -t upload` "
|
||||
"- upload firmware to embedded board\n"
|
||||
"`platformio run --target clean` - clean project (remove compiled "
|
||||
"files)\n"
|
||||
"`platformio run --help` - additional information",
|
||||
fg="green")
|
||||
@@ -15,185 +15,97 @@
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from os.path import isdir, join
|
||||
from time import sleep
|
||||
from urllib import quote
|
||||
|
||||
import arrow
|
||||
import click
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.list import package_list_cmd
|
||||
from platformio.package.commands.search import package_search_cmd
|
||||
from platformio.package.commands.show import package_show_cmd
|
||||
from platformio.package.commands.uninstall import package_uninstall_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.exception import NotGlobalLibDir
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
|
||||
CTX_META_INPUT_DIRS_KEY = __name__ + ".input_dirs"
|
||||
CTX_META_PROJECT_ENVIRONMENTS_KEY = __name__ + ".project_environments"
|
||||
CTX_META_STORAGE_DIRS_KEY = __name__ + ".storage_dirs"
|
||||
CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
|
||||
from platformio import exception, util
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
from platformio.util import get_api_result
|
||||
|
||||
|
||||
def get_project_global_lib_dir():
|
||||
return ProjectConfig.get_instance().get("platformio", "globallib_dir")
|
||||
|
||||
|
||||
def invoke_command(ctx, cmd, **kwargs):
|
||||
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
|
||||
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
|
||||
for input_dir in input_dirs:
|
||||
cmd_kwargs = kwargs.copy()
|
||||
if is_platformio_project(input_dir):
|
||||
cmd_kwargs["project_dir"] = input_dir
|
||||
cmd_kwargs["environments"] = project_environments
|
||||
else:
|
||||
cmd_kwargs["global"] = True
|
||||
cmd_kwargs["storage_dir"] = input_dir
|
||||
ctx.invoke(cmd, **cmd_kwargs)
|
||||
|
||||
|
||||
@click.group(short_help="Library manager", hidden=True)
|
||||
@click.group(short_help="Library Manager")
|
||||
@click.option(
|
||||
"-g",
|
||||
"--global",
|
||||
is_flag=True,
|
||||
help="Manage global PlatformIO"
|
||||
" library storage `%s`" % join(util.get_home_dir(), "lib"))
|
||||
@click.option(
|
||||
"-d",
|
||||
"--storage-dir",
|
||||
multiple=True,
|
||||
default=None,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
|
||||
help="Manage custom library storage",
|
||||
)
|
||||
@click.option(
|
||||
"-g", "--global", is_flag=True, help="Manage global PlatformIO library storage"
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
multiple=True,
|
||||
help=(
|
||||
"Manage libraries for the specific project build environments "
|
||||
"declared in `platformio.ini`"
|
||||
),
|
||||
)
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True),
|
||||
help="Manage custom library storage")
|
||||
@click.pass_context
|
||||
def cli(ctx, **options):
|
||||
in_silence = PlatformioCLI.in_silence()
|
||||
storage_cmds = ("install", "uninstall", "update", "list")
|
||||
non_storage_cmds = ("search", "show", "register", "stats", "builtin")
|
||||
# skip commands that don't need storage folder
|
||||
if ctx.invoked_subcommand not in storage_cmds or (
|
||||
len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")
|
||||
):
|
||||
if ctx.invoked_subcommand in non_storage_cmds or \
|
||||
(len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")):
|
||||
return
|
||||
storage_dirs = list(options["storage_dir"])
|
||||
if options["global"]:
|
||||
storage_dirs.append(get_project_global_lib_dir())
|
||||
if not storage_dirs:
|
||||
if is_platformio_project():
|
||||
storage_dirs = [get_project_dir()]
|
||||
elif is_ci():
|
||||
storage_dirs = [get_project_global_lib_dir()]
|
||||
storage_dir = options['storage_dir']
|
||||
if not storage_dir:
|
||||
if options['global']:
|
||||
storage_dir = join(util.get_home_dir(), "lib")
|
||||
elif util.is_platformio_project():
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
elif util.is_ci():
|
||||
storage_dir = join(util.get_home_dir(), "lib")
|
||||
click.secho(
|
||||
"Warning! Global library storage is used automatically. "
|
||||
"Please use `platformio lib --global %s` command to remove "
|
||||
"this warning." % ctx.invoked_subcommand,
|
||||
fg="yellow",
|
||||
)
|
||||
fg="yellow")
|
||||
elif util.is_platformio_project(storage_dir):
|
||||
with util.cd(storage_dir):
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
|
||||
if not storage_dirs:
|
||||
raise NotGlobalLibDir(
|
||||
get_project_dir(), get_project_global_lib_dir(), ctx.invoked_subcommand
|
||||
)
|
||||
if not storage_dir and not util.is_platformio_project():
|
||||
raise exception.NotGlobalLibDir(util.get_project_dir(),
|
||||
join(util.get_home_dir(), "lib"),
|
||||
ctx.invoked_subcommand)
|
||||
|
||||
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options["environment"]
|
||||
ctx.meta[CTX_META_INPUT_DIRS_KEY] = storage_dirs
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = []
|
||||
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY] = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not is_platformio_project(storage_dir):
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||
continue
|
||||
with fs.cd(storage_dir):
|
||||
config = ProjectConfig.get_instance(
|
||||
os.path.join(storage_dir, "platformio.ini")
|
||||
)
|
||||
config.validate(options["environment"], silent=in_silence)
|
||||
libdeps_dir = config.get("platformio", "libdeps_dir")
|
||||
for env in config.envs():
|
||||
if options["environment"] and env not in options["environment"]:
|
||||
continue
|
||||
storage_dir = os.path.join(libdeps_dir, env)
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
|
||||
"env:" + env, "lib_deps", []
|
||||
)
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if "--json-output" not in ctx.args:
|
||||
click.echo("Library Storage: " + storage_dir)
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install library")
|
||||
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
|
||||
# @click.option(
|
||||
# "--save",
|
||||
# is_flag=True,
|
||||
# help="Save installed libraries into the project's platformio.ini "
|
||||
# "library dependencies")
|
||||
@click.option(
|
||||
"--save/--no-save",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Save installed libraries into the `platformio.ini` dependency list"
|
||||
" (enabled by default)",
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
"-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"--interactive",
|
||||
is_flag=True,
|
||||
help="Deprecated! Please use a strict dependency specification (owner/libname)",
|
||||
)
|
||||
@click.option(
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments,too-many-positional-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg install` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(
|
||||
ctx,
|
||||
package_install_cmd,
|
||||
libraries=libraries,
|
||||
no_save=not save,
|
||||
force=force,
|
||||
silent=silent,
|
||||
)
|
||||
help="Allow to make a choice for all prompts")
|
||||
@click.pass_obj
|
||||
def lib_install(lm, libraries, silent, interactive):
|
||||
# @TODO "save" option
|
||||
for library in libraries:
|
||||
lm.install(library, silent=silent, interactive=interactive)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Remove libraries")
|
||||
@cli.command("uninstall", short_help="Uninstall libraries")
|
||||
@click.argument("libraries", nargs=-1, metavar="[LIBRARY...]")
|
||||
@click.option(
|
||||
"--save/--no-save",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Remove libraries from the `platformio.ini` dependency list and save changes"
|
||||
" (enabled by default)",
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.pass_context
|
||||
def lib_uninstall(ctx, libraries, save, silent):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg uninstall` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
invoke_command(
|
||||
ctx,
|
||||
package_uninstall_cmd,
|
||||
libraries=libraries,
|
||||
no_save=not save,
|
||||
silent=silent,
|
||||
)
|
||||
@click.pass_obj
|
||||
def lib_uninstall(lm, libraries):
|
||||
for library in libraries:
|
||||
lm.uninstall(library)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed libraries")
|
||||
@@ -202,106 +114,73 @@ def lib_uninstall(ctx, libraries, save, silent):
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
help="Do not update, only check for new version")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_update( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
if only_check and not json_output:
|
||||
raise exception.UserSideException(
|
||||
"This command is deprecated, please use `pio pkg outdated` instead"
|
||||
)
|
||||
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg update` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(
|
||||
ctx,
|
||||
package_update_cmd,
|
||||
libraries=libraries,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
lib_deps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, {}).get(storage_dir, [])
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
_libraries = libraries or lib_deps or lm.get_installed()
|
||||
@click.pass_obj
|
||||
def lib_update(lm, libraries, only_check, json_output):
|
||||
if not libraries:
|
||||
libraries = [manifest['__pkg_dir'] for manifest in lm.get_installed()]
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in _libraries:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(library, PackageItem):
|
||||
pkg = library
|
||||
else:
|
||||
spec = PackageSpec(library)
|
||||
pkg = lm.get_package(spec)
|
||||
if not pkg:
|
||||
for library in libraries:
|
||||
pkg_dir = library if isdir(library) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = lm.parse_pkg_input(library)
|
||||
pkg_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
outdated = lm.outdated(pkg, spec)
|
||||
if not outdated.is_outdated(allow_incompatible=True):
|
||||
latest = lm.outdated(pkg_dir, requirements)
|
||||
if not latest:
|
||||
continue
|
||||
manifest = lm.legacy_load_manifest(pkg)
|
||||
manifest["versionWanted"] = (
|
||||
str(outdated.wanted) if outdated.wanted else None
|
||||
)
|
||||
manifest["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
manifest = lm.load_manifest(pkg_dir)
|
||||
manifest['versionLatest'] = latest
|
||||
result.append(manifest)
|
||||
|
||||
json_result[storage_dir] = result
|
||||
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
for library in libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_list(ctx, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg list` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(ctx, package_list_cmd, only_libraries=True)
|
||||
def print_lib_item(item):
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("=" * len(item['name']))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item['id'], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
click.echo()
|
||||
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
json_result[storage_dir] = lm.legacy_get_installed()
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(item[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), item[key]))
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo("Compatible %s: %s" % (key, ", ".join(
|
||||
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo("Authors: %s" % ", ".join(
|
||||
item.get("authornames",
|
||||
[a.get("name", "") for a in item.get("authors", [])])))
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item['__src_url'])
|
||||
click.echo()
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for a library")
|
||||
@click.argument("query", required=False, nargs=-1)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option("--page", type=click.INT, default=1)
|
||||
@click.option("--id", multiple=True)
|
||||
@click.option("-o", "--owner", multiple=True)
|
||||
@click.option("-n", "--name", multiple=True)
|
||||
@click.option("-a", "--author", multiple=True)
|
||||
@click.option("-k", "--keyword", multiple=True)
|
||||
@@ -311,149 +190,281 @@ def lib_list(ctx, json_output):
|
||||
@click.option(
|
||||
"--noninteractive",
|
||||
is_flag=True,
|
||||
help="Do not prompt, automatically paginate with delay",
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_search( # pylint: disable=unused-argument
|
||||
ctx, query, json_output, page, noninteractive, **filters
|
||||
):
|
||||
help="Do not prompt, automatically paginate with delay")
|
||||
def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
if not query:
|
||||
query = []
|
||||
if not isinstance(query, list):
|
||||
query = list(query)
|
||||
|
||||
for key, values in filters.items():
|
||||
for key, values in filters.iteritems():
|
||||
for value in values:
|
||||
query.append('%s:"%s"' % (key, value))
|
||||
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
query.append("type:library")
|
||||
return ctx.invoke(package_search_cmd, query=" ".join(query), page=page)
|
||||
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data(
|
||||
"get",
|
||||
result = get_api_result(
|
||||
"/v2/lib/search",
|
||||
params=dict(query=" ".join(query), page=page),
|
||||
x_cache_valid="1d",
|
||||
)
|
||||
return click.echo(json.dumps(result))
|
||||
dict(query=" ".join(query), page=page),
|
||||
cache_valid="3d")
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(result))
|
||||
return
|
||||
|
||||
if result['total'] == 0:
|
||||
click.secho(
|
||||
"Nothing has been found by your request\n"
|
||||
"Try a less-specific search or use truncation (or wildcard) "
|
||||
"operator",
|
||||
fg="yellow",
|
||||
nl=False)
|
||||
click.secho(" *", fg="green")
|
||||
click.secho("For example: DS*, PCA*, DHT* and etc.\n", fg="yellow")
|
||||
click.echo("For more examples and advanced search syntax, "
|
||||
"please use documentation:")
|
||||
click.secho(
|
||||
"http://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
fg="cyan")
|
||||
return
|
||||
|
||||
click.secho(
|
||||
"Found %d libraries:\n" % result['total'],
|
||||
fg="green" if result['total'] else "yellow")
|
||||
|
||||
while True:
|
||||
for item in result['items']:
|
||||
print_lib_item(item)
|
||||
|
||||
if (int(result['page']) * int(result['perpage']) >=
|
||||
int(result['total'])):
|
||||
break
|
||||
|
||||
if noninteractive:
|
||||
click.echo()
|
||||
click.secho(
|
||||
"Loading next %d libraries... Press Ctrl+C to stop!" %
|
||||
result['perpage'],
|
||||
fg="yellow")
|
||||
click.echo()
|
||||
sleep(5)
|
||||
elif not click.confirm("Show next libraries?"):
|
||||
break
|
||||
result = get_api_result(
|
||||
"/v2/lib/search",
|
||||
{"query": " ".join(query),
|
||||
"page": int(result['page']) + 1},
|
||||
cache_valid="3d")
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_obj
|
||||
def lib_list(lm, json_output):
|
||||
items = lm.get_installed()
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
if not items:
|
||||
return
|
||||
|
||||
for item in sorted(items, key=lambda i: i['name']):
|
||||
print_lib_item(item)
|
||||
|
||||
|
||||
@util.memoized
|
||||
def get_builtin_libs(storage_names=None):
|
||||
items = []
|
||||
storage_names = storage_names or []
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
||||
for storage in p.get_lib_storages():
|
||||
if storage_names and storage['name'] not in storage_names:
|
||||
continue
|
||||
lm = LibraryManager(storage['path'])
|
||||
items.append({
|
||||
"name": storage['name'],
|
||||
"path": storage['path'],
|
||||
"items": lm.get_installed()
|
||||
})
|
||||
return items
|
||||
|
||||
|
||||
@cli.command("builtin", short_help="List built-in libraries")
|
||||
@click.option("--storage", multiple=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_builtin(storage, json_output):
|
||||
items = LibraryPackageManager.get_builtin_libs(storage)
|
||||
items = get_builtin_libs(storage)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
for storage_ in items:
|
||||
if not storage_["items"]:
|
||||
if not storage_['items']:
|
||||
continue
|
||||
click.secho(storage_["name"], fg="green")
|
||||
click.echo("*" * len(storage_["name"]))
|
||||
click.secho(storage_['name'], fg="green")
|
||||
click.echo("*" * len(storage_['name']))
|
||||
click.echo()
|
||||
|
||||
for item in sorted(storage_["items"], key=lambda i: i["name"]):
|
||||
for item in sorted(storage_['items'], key=lambda i: i['name']):
|
||||
print_lib_item(item)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show detailed info about a library")
|
||||
@click.argument("library", metavar="[LIBRARY]")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_show(ctx, library, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg show` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_show_cmd, pkg_type="library", spec=library)
|
||||
def lib_show(library, json_output):
|
||||
lm = LibraryManager()
|
||||
name, requirements, _ = lm.parse_pkg_input(library)
|
||||
lib_id = lm.get_pkg_id_by_name(
|
||||
name, requirements, silent=json_output, interactive=not json_output)
|
||||
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
if json_output:
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
lm = LibraryPackageManager()
|
||||
lm.set_log_level(logging.ERROR if json_output else logging.DEBUG)
|
||||
lib_id = lm.reveal_registry_package_id(library)
|
||||
regclient = lm.get_registry_client_instance()
|
||||
lib = regclient.fetch_json_data(
|
||||
"get", "/v2/lib/info/%d" % lib_id, x_cache_valid="1h"
|
||||
)
|
||||
return click.echo(json.dumps(lib))
|
||||
click.secho(lib['name'], fg="cyan")
|
||||
click.echo("=" * len(lib['name']))
|
||||
click.secho("#ID: %d" % lib['id'], bold=True)
|
||||
click.echo(lib['description'])
|
||||
click.echo()
|
||||
|
||||
click.echo("Version: %s, released %s" %
|
||||
(lib['version']['name'],
|
||||
arrow.get(lib['version']['released']).humanize()))
|
||||
click.echo("Manifest: %s" % lib['confurl'])
|
||||
for key in ("homepage", "repository", "license"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
if isinstance(lib[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(lib[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), lib[key]))
|
||||
|
||||
blocks = []
|
||||
|
||||
_authors = []
|
||||
for author in lib.get("authors", []):
|
||||
_data = []
|
||||
for key in ("name", "email", "url", "maintainer"):
|
||||
if not author[key]:
|
||||
continue
|
||||
if key == "email":
|
||||
_data.append("<%s>" % author[key])
|
||||
elif key == "maintainer":
|
||||
_data.append("(maintainer)")
|
||||
else:
|
||||
_data.append(author[key])
|
||||
_authors.append(" ".join(_data))
|
||||
if _authors:
|
||||
blocks.append(("Authors", _authors))
|
||||
|
||||
blocks.append(("Keywords", lib['keywords']))
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
blocks.append(("Compatible %s" % key, [i['title'] for i in lib[key]]))
|
||||
blocks.append(("Headers", lib['headers']))
|
||||
blocks.append(("Examples", lib['examples']))
|
||||
blocks.append(("Versions", [
|
||||
"%s, released %s" % (v['name'], arrow.get(v['released']).humanize())
|
||||
for v in lib['versions']
|
||||
]))
|
||||
blocks.append(("Unique Downloads", [
|
||||
"Today: %s" % lib['dlstats']['day'],
|
||||
"Week: %s" % lib['dlstats']['week'],
|
||||
"Month: %s" % lib['dlstats']['month']
|
||||
]))
|
||||
|
||||
for (title, rows) in blocks:
|
||||
click.echo()
|
||||
click.secho(title, bold=True)
|
||||
click.echo("-" * len(title))
|
||||
for row in rows:
|
||||
click.echo(row)
|
||||
|
||||
|
||||
@cli.command("register", short_help="Deprecated")
|
||||
@cli.command("register", short_help="Register a new library")
|
||||
@click.argument("config_url")
|
||||
def lib_register(config_url): # pylint: disable=unused-argument
|
||||
raise exception.UserSideException(
|
||||
"This command is deprecated. Please use `pio pkg publish` command."
|
||||
)
|
||||
def lib_register(config_url):
|
||||
if (not config_url.startswith("http://")
|
||||
and not config_url.startswith("https://")):
|
||||
raise exception.InvalidLibConfURL(config_url)
|
||||
|
||||
result = get_api_result("/lib/register", data=dict(config_url=config_url))
|
||||
if "message" in result and result['message']:
|
||||
click.secho(
|
||||
result['message'],
|
||||
fg="green"
|
||||
if "successed" in result and result['successed'] else "red")
|
||||
|
||||
|
||||
@cli.command("stats", short_help="Library Registry Statistics")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_stats(json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease visit "
|
||||
"https://registry.platformio.org\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return None
|
||||
result = get_api_result("/lib/stats", cache_valid="1h")
|
||||
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data("get", "/v2/lib/stats", x_cache_valid="1h")
|
||||
return click.echo(json.dumps(result))
|
||||
if json_output:
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
printitem_tpl = "{name:<33} {url}"
|
||||
printitemdate_tpl = "{name:<33} {date:23} {url}"
|
||||
|
||||
def print_lib_item(item):
|
||||
click.secho(item["name"], fg="cyan")
|
||||
click.echo("=" * len(item["name"]))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item["id"], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
def _print_title(title):
|
||||
click.secho(title.upper(), bold=True)
|
||||
click.echo("*" * len(title))
|
||||
|
||||
def _print_header(with_date=False):
|
||||
click.echo((printitemdate_tpl if with_date else printitem_tpl).format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
date="Date",
|
||||
url=click.style("Url", fg="blue")))
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
def _print_lib_item(item):
|
||||
click.echo((printitemdate_tpl
|
||||
if "date" in item else printitem_tpl).format(
|
||||
name=click.style(item['name'], fg="cyan"),
|
||||
date=str(
|
||||
arrow.get(item['date']).humanize()
|
||||
if "date" in item else ""),
|
||||
url=click.style(
|
||||
"http://platformio.org/lib/show/%s/%s" %
|
||||
(item['id'], quote(item['name'])),
|
||||
fg="blue")))
|
||||
|
||||
def _print_tag_item(name):
|
||||
click.echo(
|
||||
printitem_tpl.format(
|
||||
name=click.style(name, fg="cyan"),
|
||||
url=click.style(
|
||||
"http://platformio.org/lib/search?query=" + quote(
|
||||
"keyword:%s" % name),
|
||||
fg="blue")))
|
||||
|
||||
for key in ("updated", "added"):
|
||||
_print_title("Recently " + key)
|
||||
_print_header(with_date=True)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
click.echo()
|
||||
|
||||
_print_title("Recent keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("lastkeywords"):
|
||||
_print_tag_item(item)
|
||||
click.echo()
|
||||
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.capitalize(), ", ".join(item[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.capitalize(), item[key]))
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo(
|
||||
"Compatible %s: %s"
|
||||
% (
|
||||
key,
|
||||
", ".join(
|
||||
[i["title"] if isinstance(i, dict) else i for i in item[key]]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo(
|
||||
"Authors: %s"
|
||||
% ", ".join(
|
||||
item.get(
|
||||
"authornames", [a.get("name", "") for a in item.get("authors", [])]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item["__src_url"])
|
||||
_print_title("Popular keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("topkeywords"):
|
||||
_print_tag_item(item)
|
||||
click.echo()
|
||||
|
||||
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
|
||||
"Month")):
|
||||
_print_title("Featured: " + title)
|
||||
_print_header(with_date=False)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
click.echo()
|
||||
|
||||
@@ -13,44 +13,159 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from os.path import dirname, isdir
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.list import package_list_cmd
|
||||
from platformio.package.commands.search import package_search_cmd
|
||||
from platformio.package.commands.show import package_show_cmd
|
||||
from platformio.package.commands.uninstall import package_uninstall_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.package.version import get_original_version
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio import app, exception, util
|
||||
from platformio.commands.boards import print_boards
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
|
||||
|
||||
@click.group(short_help="Platform manager", hidden=True)
|
||||
@click.group(short_help="Platform Manager")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
def _print_platforms(platforms):
|
||||
for platform in platforms:
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(platform['name'], fg="cyan"),
|
||||
title=platform['title']))
|
||||
click.echo("=" * (3 + len(platform['name'] + platform['title'])))
|
||||
click.echo(platform['description'])
|
||||
click.echo()
|
||||
if "homepage" in platform:
|
||||
click.echo("Home: %s" % platform['homepage'])
|
||||
if "frameworks" in platform and platform['frameworks']:
|
||||
click.echo("Frameworks: %s" % ", ".join(platform['frameworks']))
|
||||
if "packages" in platform:
|
||||
click.echo("Packages: %s" % ", ".join(platform['packages']))
|
||||
if "version" in platform:
|
||||
click.echo("Version: " + platform['version'])
|
||||
click.echo()
|
||||
|
||||
|
||||
def _get_registry_platforms():
|
||||
platforms = util.get_api_result("/platforms", cache_valid="30d")
|
||||
pm = PlatformManager()
|
||||
for platform in platforms or []:
|
||||
platform['versions'] = pm.get_all_repo_versions(platform['name'])
|
||||
return platforms
|
||||
|
||||
|
||||
def _original_version(version):
|
||||
if version.count(".") != 2:
|
||||
return None
|
||||
_, y = version.split(".")[:2]
|
||||
if int(y) < 100:
|
||||
return None
|
||||
if len(y) % 2 != 0:
|
||||
y = "0" + y
|
||||
parts = [str(int(y[i * 2:i * 2 + 2])) for i in range(len(y) / 2)]
|
||||
return ".".join(parts)
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
try:
|
||||
return _get_installed_platform_data(*args, **kwargs)
|
||||
except exception.UnknownPlatform:
|
||||
return _get_registry_platform_data(*args, **kwargs)
|
||||
|
||||
|
||||
def _get_installed_platform_data(platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
p = PlatformFactory.newPlatform(platform)
|
||||
data = dict(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
repository=p.repository_url,
|
||||
url=p.vendor_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(p.frameworks.keys() if p.frameworks else []),
|
||||
packages=p.packages.keys() if p.packages else [])
|
||||
|
||||
# if dump to API
|
||||
# del data['version']
|
||||
# return data
|
||||
|
||||
# overwrite VCS version and add extra fields
|
||||
manifest = PlatformManager().load_manifest(dirname(p.manifest_path))
|
||||
assert manifest
|
||||
for key in manifest:
|
||||
if key == "version" or key.startswith("__"):
|
||||
data[key] = manifest[key]
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [c.get_brief_data() for c in p.get_boards().values()]
|
||||
|
||||
if not data['packages'] or not expose_packages:
|
||||
return data
|
||||
|
||||
data['packages'] = []
|
||||
installed_pkgs = p.get_installed_packages()
|
||||
for name, opts in p.packages.items():
|
||||
item = dict(
|
||||
name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=opts.get("version"),
|
||||
optional=opts.get("optional") is True)
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key not in ("url", "version", "description"):
|
||||
continue
|
||||
item[key] = value
|
||||
if key == "version":
|
||||
item["originalVersion"] = _original_version(value)
|
||||
data['packages'].append(item)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
_data = None
|
||||
for p in _get_registry_platforms():
|
||||
if p['name'] == platform:
|
||||
_data = p
|
||||
break
|
||||
|
||||
if not _data:
|
||||
return None
|
||||
|
||||
data = dict(
|
||||
name=_data['name'],
|
||||
title=_data['title'],
|
||||
description=_data['description'],
|
||||
homepage=_data['homepage'],
|
||||
repository=_data['repository'],
|
||||
url=_data['url'],
|
||||
license=_data['license'],
|
||||
forDesktop=_data['forDesktop'],
|
||||
frameworks=_data['frameworks'],
|
||||
packages=_data['packages'],
|
||||
versions=_data['versions'])
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [
|
||||
board for board in PlatformManager().get_registered_boards()
|
||||
if board['platform'] == _data['name']
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for development platform")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_search(ctx, query, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
query = query or ""
|
||||
return ctx.invoke(package_search_cmd, query=f"type:platform {query}".strip())
|
||||
|
||||
def platform_search(query, json_output):
|
||||
platforms = []
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
@@ -60,89 +175,117 @@ def platform_search(ctx, query, json_output):
|
||||
continue
|
||||
platforms.append(
|
||||
_get_registry_platform_data(
|
||||
platform["name"], with_boards=False, expose_packages=False
|
||||
)
|
||||
)
|
||||
click.echo(json.dumps(platforms))
|
||||
return None
|
||||
platform['name'], with_boards=False, expose_packages=False))
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_frameworks(query, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease visit https://docs.platformio.org"
|
||||
"/en/latest/frameworks/index.html\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return
|
||||
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
frameworks = []
|
||||
for framework in regclient.fetch_json_data(
|
||||
"get", "/v2/frameworks", x_cache_valid="1d"
|
||||
):
|
||||
for framework in util.get_api_result("/frameworks", cache_valid="30d"):
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(framework)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
|
||||
framework["platforms"] = [
|
||||
platform["name"]
|
||||
for platform in _get_registry_platforms()
|
||||
if framework["name"] in platform["frameworks"]
|
||||
framework['homepage'] = (
|
||||
"http://platformio.org/frameworks/" + framework['name'])
|
||||
framework['platforms'] = [
|
||||
platform['name'] for platform in _get_registry_platforms()
|
||||
if framework['name'] in platform['frameworks']
|
||||
]
|
||||
frameworks.append(framework)
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
||||
click.echo(json.dumps(frameworks))
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_list(ctx, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg list` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_list_cmd, **{"global": True, "only_platforms": True})
|
||||
|
||||
def platform_list(json_output):
|
||||
platforms = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
|
||||
)
|
||||
_get_installed_platform_data(
|
||||
manifest['__pkg_dir'],
|
||||
with_boards=False,
|
||||
expose_packages=False))
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
||||
click.echo(json.dumps(platforms))
|
||||
return None
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about development platform")
|
||||
@click.argument("platform")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branches
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg show` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_show_cmd, pkg_type="platform", spec=platform)
|
||||
|
||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
data = _get_platform_data(platform)
|
||||
if not data:
|
||||
raise UnknownPlatform(platform)
|
||||
return click.echo(json.dumps(data))
|
||||
raise exception.UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(data['name'], fg="cyan"), title=data['title']))
|
||||
click.echo("=" * (3 + len(data['name'] + data['title'])))
|
||||
click.echo(data['description'])
|
||||
click.echo()
|
||||
if "version" in data:
|
||||
click.echo("Version: %s" % data['version'])
|
||||
if data['homepage']:
|
||||
click.echo("Home: %s" % data['homepage'])
|
||||
if data['repository']:
|
||||
click.echo("Repository: %s" % data['repository'])
|
||||
if data['url']:
|
||||
click.echo("Vendor: %s" % data['url'])
|
||||
if data['license']:
|
||||
click.echo("License: %s" % data['license'])
|
||||
if data['frameworks']:
|
||||
click.echo("Frameworks: %s" % ", ".join(data['frameworks']))
|
||||
|
||||
if not data['packages']:
|
||||
return
|
||||
|
||||
if not isinstance(data['packages'][0], dict):
|
||||
click.echo("Packages: %s" % ", ".join(data['packages']))
|
||||
else:
|
||||
click.echo()
|
||||
click.secho("Packages", bold=True)
|
||||
click.echo("--------")
|
||||
for item in data['packages']:
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(item['name'], fg="yellow"))
|
||||
click.echo("-" * (8 + len(item['name'])))
|
||||
if item['type']:
|
||||
click.echo("Type: %s" % item['type'])
|
||||
click.echo("Requirements: %s" % item['requirements'])
|
||||
click.echo("Installed: %s" %
|
||||
("Yes" if item.get("version") else "No (optional)"))
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item['version'])
|
||||
if "originalVersion" in item:
|
||||
click.echo("Original version: %s" % item['originalVersion'])
|
||||
if "description" in item:
|
||||
click.echo("Description: %s" % item['description'])
|
||||
|
||||
if data['boards']:
|
||||
click.echo()
|
||||
click.secho("Boards", bold=True)
|
||||
click.echo("------")
|
||||
print_boards(data['boards'])
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install new development platform")
|
||||
@@ -150,242 +293,85 @@ def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branc
|
||||
@click.option("--with-package", multiple=True)
|
||||
@click.option("--without-package", multiple=True)
|
||||
@click.option("--skip-default-package", is_flag=True)
|
||||
@click.option("--with-all-packages", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
without_package,
|
||||
skip_default_package,
|
||||
with_all_packages,
|
||||
silent,
|
||||
force,
|
||||
):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg install` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
ctx.invoke(
|
||||
package_install_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
"skip_dependencies": (
|
||||
not with_all_packages
|
||||
and (with_package or without_package or skip_default_package)
|
||||
),
|
||||
"silent": silent,
|
||||
"force": force,
|
||||
},
|
||||
)
|
||||
def platform_install(platforms, with_package, without_package,
|
||||
skip_default_package):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.install(
|
||||
name=platform,
|
||||
with_packages=with_package,
|
||||
without_packages=without_package,
|
||||
skip_default_package=skip_default_package):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of packages will be installed automatically "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green")
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
@click.pass_context
|
||||
def platform_uninstall(ctx, platforms):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg uninstall` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
ctx.invoke(
|
||||
package_uninstall_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
},
|
||||
)
|
||||
def platform_uninstall(platforms):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.uninstall(platform):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully "
|
||||
"uninstalled!" % platform,
|
||||
fg="green")
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed development platforms")
|
||||
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
||||
@click.option(
|
||||
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
|
||||
)
|
||||
"-p",
|
||||
"--only-packages",
|
||||
is_flag=True,
|
||||
help="Update only the platform packages")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
help="Do not update, only check for a new version")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals,too-many-arguments,too-many-positional-arguments
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
def platform_update(platforms, only_packages, only_check, json_output):
|
||||
pm = PlatformManager()
|
||||
pkg_dir_to_name = {}
|
||||
if not platforms:
|
||||
platforms = []
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(manifest['__pkg_dir'])
|
||||
pkg_dir_to_name[manifest['__pkg_dir']] = manifest.get(
|
||||
"title", manifest['name'])
|
||||
|
||||
if only_check and not json_output:
|
||||
raise UserSideException(
|
||||
"This command is deprecated, please use `pio pkg outdated` instead"
|
||||
)
|
||||
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg update` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(
|
||||
package_update_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
"silent": silent,
|
||||
},
|
||||
)
|
||||
|
||||
pm = PlatformPackageManager()
|
||||
pm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
platforms = platforms or pm.get_installed()
|
||||
result = []
|
||||
for platform in platforms:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(platform, PackageItem):
|
||||
pkg = platform
|
||||
else:
|
||||
spec = PackageSpec(platform)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = pm.outdated(pkg, spec)
|
||||
if (
|
||||
not outdated.is_outdated(allow_incompatible=True)
|
||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||
):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg, with_boards=False, expose_packages=False
|
||||
)
|
||||
if outdated.is_outdated(allow_incompatible=True):
|
||||
data["versionLatest"] = str(outdated.latest) if outdated.latest else None
|
||||
result.append(data)
|
||||
click.echo(json.dumps(result))
|
||||
return True
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def _get_registry_platforms():
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
return regclient.fetch_json_data("get", "/v2/platforms", x_cache_valid="1d")
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
try:
|
||||
return _get_installed_platform_data(*args, **kwargs)
|
||||
except UnknownPlatform:
|
||||
return _get_registry_platform_data(*args, **kwargs)
|
||||
|
||||
|
||||
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
|
||||
p = PlatformFactory.new(platform)
|
||||
data = dict(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
url=p.homepage,
|
||||
repository=p.repository_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
|
||||
packages=list(p.packages) if p.packages else [],
|
||||
)
|
||||
|
||||
# if dump to API
|
||||
# del data['version']
|
||||
# return data
|
||||
|
||||
# overwrite VCS version and add extra fields
|
||||
manifest = PlatformPackageManager().legacy_load_manifest(
|
||||
os.path.dirname(p.manifest_path)
|
||||
)
|
||||
assert manifest
|
||||
for key in manifest:
|
||||
if key == "version" or key.startswith("__"):
|
||||
data[key] = manifest[key]
|
||||
|
||||
if with_boards:
|
||||
data["boards"] = [c.get_brief_data() for c in p.get_boards().values()]
|
||||
|
||||
if not data["packages"] or not expose_packages:
|
||||
return data
|
||||
|
||||
data["packages"] = []
|
||||
installed_pkgs = {
|
||||
pkg.metadata.name: p.pm.load_manifest(pkg) for pkg in p.get_installed_packages()
|
||||
}
|
||||
for name, options in p.packages.items():
|
||||
item = dict(
|
||||
name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=options.get("version"),
|
||||
optional=options.get("optional") is True,
|
||||
)
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key not in ("url", "version", "description"):
|
||||
continue
|
||||
item[key] = value
|
||||
if key == "version":
|
||||
item["originalVersion"] = get_original_version(value)
|
||||
data["packages"].append(item)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
platform, with_boards=True, expose_packages=True
|
||||
):
|
||||
_data = None
|
||||
for p in _get_registry_platforms():
|
||||
if p["name"] == platform:
|
||||
_data = p
|
||||
break
|
||||
|
||||
if not _data:
|
||||
return None
|
||||
|
||||
data = dict(
|
||||
ownername=_data.get("ownername"),
|
||||
name=_data["name"],
|
||||
title=_data["title"],
|
||||
description=_data["description"],
|
||||
homepage=_data["homepage"],
|
||||
repository=_data["repository"],
|
||||
url=_data["url"],
|
||||
license=_data["license"],
|
||||
forDesktop=_data["forDesktop"],
|
||||
frameworks=_data["frameworks"],
|
||||
packages=_data["packages"],
|
||||
versions=_data.get("versions"),
|
||||
)
|
||||
|
||||
if with_boards:
|
||||
data["boards"] = [
|
||||
board
|
||||
for board in PlatformPackageManager().get_registered_boards()
|
||||
if board["platform"] == _data["name"]
|
||||
]
|
||||
|
||||
return data
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
pkg_dir = platform if isdir(platform) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = pm.parse_pkg_input(platform)
|
||||
pkg_dir = pm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = pm.outdated(pkg_dir, requirements)
|
||||
if (not latest and not PlatformFactory.newPlatform(pkg_dir)
|
||||
.are_outdated_packages()):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg_dir, with_boards=False, expose_packages=False)
|
||||
if latest:
|
||||
data['versionLatest'] = latest
|
||||
result.append(data)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
# cleanup cached board and platform lists
|
||||
app.clean_cache()
|
||||
for platform in platforms:
|
||||
click.echo("Platform %s" % click.style(
|
||||
pkg_dir_to_name.get(platform, platform), fg="cyan"))
|
||||
click.echo("--------")
|
||||
pm.update(
|
||||
platform, only_packages=only_packages, only_check=only_check)
|
||||
click.echo()
|
||||
|
||||
209
platformio/commands/remote.py
Normal file
209
platformio/commands/remote.py
Normal file
@@ -0,0 +1,209 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import threading
|
||||
from os import getcwd
|
||||
from os.path import isfile, join
|
||||
from tempfile import mkdtemp
|
||||
from time import sleep
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
@click.group("remote", short_help="PIO Remote")
|
||||
@click.option("-a", "--agent", multiple=True)
|
||||
def cli(**kwargs):
|
||||
pass
|
||||
|
||||
|
||||
@cli.group("agent", short_help="Start new agent or list active")
|
||||
def remote_agent():
|
||||
pass
|
||||
|
||||
|
||||
@remote_agent.command("start", short_help="Start agent")
|
||||
@click.option("-n", "--name")
|
||||
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--working-dir",
|
||||
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
|
||||
type=click.Path(
|
||||
file_okay=False, dir_okay=True, writable=True, resolve_path=True))
|
||||
def remote_agent_start(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@remote_agent.command("reload", short_help="Reload agents")
|
||||
def remote_agent_reload():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@remote_agent.command("list", short_help="List active agents")
|
||||
def remote_agent_list():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command(
|
||||
"update", short_help="Update installed Platforms, Packages and Libraries")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
def remote_update(only_check):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("run", short_help="Process project environments remotely")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
def remote_run(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("test", short_help="Remote Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def remote_test(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.group("device", short_help="Monitor remote device or list existing")
|
||||
def remote_device():
|
||||
pass
|
||||
|
||||
|
||||
@remote_device.command("list", short_help="List remote devices")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list(json_output):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@remote_device.command("monitor", short_help="Monitor remote device")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option(
|
||||
"--baud", "-b", type=int, default=9600, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option(
|
||||
"--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option(
|
||||
"--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.pass_context
|
||||
def device_monitor(ctx, **kwargs):
|
||||
|
||||
def _tx_target(sock_dir):
|
||||
try:
|
||||
pioplus_call(sys.argv[1:] + ["--sock", sock_dir])
|
||||
except exception.ReturnErrorCode:
|
||||
pass
|
||||
|
||||
sock_dir = mkdtemp(suffix="pioplus")
|
||||
sock_file = join(sock_dir, "sock")
|
||||
try:
|
||||
t = threading.Thread(target=_tx_target, args=(sock_dir, ))
|
||||
t.start()
|
||||
while t.is_alive() and not isfile(sock_file):
|
||||
sleep(0.1)
|
||||
if not t.is_alive():
|
||||
return
|
||||
kwargs['port'] = open(sock_file).read()
|
||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||
t.join(2)
|
||||
finally:
|
||||
util.rmtree_(sock_dir)
|
||||
419
platformio/commands/run.py
Normal file
419
platformio/commands/run.py
Normal file
@@ -0,0 +1,419 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
from hashlib import sha1
|
||||
from os import getcwd, makedirs, walk
|
||||
from os.path import getmtime, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, telemetry, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.lib import lib_install as cmd_lib_install
|
||||
from platformio.commands.lib import get_builtin_libs
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cmd_platform_install
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
|
||||
@click.command("run", short_help="Process project environments")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
disable_auto_clean):
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
project_dir = util.find_project_dir_above(project_dir)
|
||||
|
||||
if not util.is_platformio_project(project_dir):
|
||||
raise exception.NotPlatformIOProject(project_dir)
|
||||
|
||||
with util.cd(project_dir):
|
||||
# clean obsolete .pioenvs dir
|
||||
if not disable_auto_clean:
|
||||
try:
|
||||
_clean_pioenvs_dir(util.get_projectpioenvs_dir())
|
||||
except: # pylint: disable=bare-except
|
||||
click.secho(
|
||||
"Can not remove temporary directory `%s`. Please remove "
|
||||
"`.pioenvs` directory from the project manually to avoid "
|
||||
"build issues" % util.get_projectpioenvs_dir(force=True),
|
||||
fg="yellow")
|
||||
|
||||
config = util.load_project_config()
|
||||
check_project_defopts(config)
|
||||
assert check_project_envs(config, environment)
|
||||
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = util.parse_conf_multi_values(
|
||||
config.get("platformio", "env_default"))
|
||||
|
||||
results = []
|
||||
start_time = time()
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
|
||||
envname = section[4:]
|
||||
skipenv = any([
|
||||
environment and envname not in environment, not environment
|
||||
and env_default and envname not in env_default
|
||||
])
|
||||
if skipenv:
|
||||
results.append((envname, None))
|
||||
continue
|
||||
|
||||
if not silent and results:
|
||||
click.echo()
|
||||
|
||||
options = {}
|
||||
for k, v in config.items(section):
|
||||
options[k] = v
|
||||
if "piotest" not in options and "piotest" in ctx.meta:
|
||||
options['piotest'] = ctx.meta['piotest']
|
||||
|
||||
ep = EnvironmentProcessor(ctx, envname, options, target,
|
||||
upload_port, silent, verbose)
|
||||
result = (envname, ep.process())
|
||||
results.append(result)
|
||||
if result[1] and "monitor" in ep.get_build_targets() and \
|
||||
"nobuild" not in ep.get_build_targets():
|
||||
ctx.invoke(cmd_device_monitor)
|
||||
|
||||
found_error = any([status is False for (_, status) in results])
|
||||
|
||||
if (found_error or not silent) and len(results) > 1:
|
||||
click.echo()
|
||||
print_summary(results, start_time)
|
||||
|
||||
if found_error:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
return True
|
||||
|
||||
|
||||
class EnvironmentProcessor(object):
|
||||
|
||||
KNOWN_OPTIONS = ("platform", "framework", "board", "board_mcu",
|
||||
"board_f_cpu", "board_f_flash", "board_flash_mode",
|
||||
"build_flags", "src_build_flags", "build_unflags",
|
||||
"src_filter", "extra_scripts", "targets", "upload_port",
|
||||
"upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore",
|
||||
"lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode",
|
||||
"lib_archive", "piotest", "test_transport", "test_filter",
|
||||
"test_ignore", "test_port", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd", "monitor_port",
|
||||
"monitor_baud", "monitor_rts", "monitor_dtr")
|
||||
|
||||
IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore",
|
||||
"test_port", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break",
|
||||
"debug_load_cmd", "monitor_port", "monitor_baud",
|
||||
"monitor_rts", "monitor_dtr")
|
||||
|
||||
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
|
||||
|
||||
RENAMED_OPTIONS = {
|
||||
"lib_use": "lib_deps",
|
||||
"lib_force": "lib_deps",
|
||||
"extra_script": "extra_scripts"
|
||||
}
|
||||
|
||||
RENAMED_PLATFORMS = {"espressif": "espressif8266"}
|
||||
|
||||
def __init__(
|
||||
self, # pylint: disable=R0913
|
||||
cmd_ctx,
|
||||
name,
|
||||
options,
|
||||
targets,
|
||||
upload_port,
|
||||
silent,
|
||||
verbose):
|
||||
self.cmd_ctx = cmd_ctx
|
||||
self.name = name
|
||||
self.options = options
|
||||
self.targets = targets
|
||||
self.upload_port = upload_port
|
||||
self.silent = silent
|
||||
self.verbose = verbose
|
||||
|
||||
def process(self):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
start_time = time()
|
||||
|
||||
for k, v in self.options.items():
|
||||
self.options[k] = self.options[k].strip()
|
||||
|
||||
if not self.silent:
|
||||
click.echo(
|
||||
"[%s] Processing %s (%s)" %
|
||||
(datetime.now().strftime("%c"),
|
||||
click.style(self.name, fg="cyan", bold=True), "; ".join([
|
||||
"%s: %s" % (k, ", ".join(util.parse_conf_multi_values(v)))
|
||||
for k, v in self.options.items()
|
||||
])))
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
self.options = self._validate_options(self.options)
|
||||
result = self._run()
|
||||
is_error = result['returncode'] != 0
|
||||
|
||||
if self.silent and not is_error:
|
||||
return True
|
||||
|
||||
if is_error or "piotest_processor" not in self.cmd_ctx.meta:
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("ERROR", fg="red", bold=True)
|
||||
if is_error else click.style(
|
||||
"SUCCESS", fg="green", bold=True)), time() - start_time),
|
||||
is_error=is_error)
|
||||
|
||||
return not is_error
|
||||
|
||||
def _validate_options(self, options):
|
||||
result = {}
|
||||
for k, v in options.items():
|
||||
# process obsolete options
|
||||
if k in self.RENAMED_OPTIONS:
|
||||
click.secho(
|
||||
"Warning! `%s` option is deprecated and will be "
|
||||
"removed in the next release! Please use "
|
||||
"`%s` instead." % (k, self.RENAMED_OPTIONS[k]),
|
||||
fg="yellow")
|
||||
k = self.RENAMED_OPTIONS[k]
|
||||
# process renamed platforms
|
||||
if k == "platform" and v in self.RENAMED_PLATFORMS:
|
||||
click.secho(
|
||||
"Warning! Platform `%s` is deprecated and will be "
|
||||
"removed in the next release! Please use "
|
||||
"`%s` instead." % (v, self.RENAMED_PLATFORMS[v]),
|
||||
fg="yellow")
|
||||
v = self.RENAMED_PLATFORMS[v]
|
||||
|
||||
# warn about unknown options
|
||||
if k not in self.KNOWN_OPTIONS:
|
||||
click.secho(
|
||||
"Detected non-PlatformIO `%s` option in `[env:%s]` section"
|
||||
% (k, self.name),
|
||||
fg="yellow")
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def get_build_variables(self):
|
||||
variables = {"pioenv": self.name}
|
||||
if self.upload_port:
|
||||
variables['upload_port'] = self.upload_port
|
||||
for k, v in self.options.items():
|
||||
if k in self.REMAPED_OPTIONS:
|
||||
k = self.REMAPED_OPTIONS[k]
|
||||
if k in self.IGNORE_BUILD_OPTIONS:
|
||||
continue
|
||||
if k == "targets" or (k == "upload_port" and self.upload_port):
|
||||
continue
|
||||
variables[k] = v
|
||||
return variables
|
||||
|
||||
def get_build_targets(self):
|
||||
targets = []
|
||||
if self.targets:
|
||||
targets = [t for t in self.targets]
|
||||
elif "targets" in self.options:
|
||||
targets = self.options['targets'].split(", ")
|
||||
return targets
|
||||
|
||||
def _run(self):
|
||||
if "platform" not in self.options:
|
||||
raise exception.UndefinedEnvPlatform(self.name)
|
||||
|
||||
build_vars = self.get_build_variables()
|
||||
build_targets = self.get_build_targets()
|
||||
|
||||
telemetry.on_run_environment(self.options, build_targets)
|
||||
|
||||
# skip monitor target, we call it above
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
if "nobuild" not in build_targets:
|
||||
# install dependent libraries
|
||||
if "lib_install" in self.options:
|
||||
_autoinstall_libdeps(self.cmd_ctx, [
|
||||
int(d.strip())
|
||||
for d in self.options['lib_install'].split(",")
|
||||
if d.strip()
|
||||
], self.verbose)
|
||||
if "lib_deps" in self.options:
|
||||
_autoinstall_libdeps(
|
||||
self.cmd_ctx,
|
||||
util.parse_conf_multi_values(self.options['lib_deps']),
|
||||
self.verbose)
|
||||
|
||||
try:
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
except exception.UnknownPlatform:
|
||||
self.cmd_ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[self.options['platform']],
|
||||
skip_default_package=True)
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
|
||||
return p.run(build_vars, build_targets, self.silent, self.verbose)
|
||||
|
||||
|
||||
def _autoinstall_libdeps(ctx, libraries, verbose=False):
|
||||
if not libraries:
|
||||
return
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if verbose:
|
||||
click.echo("Library Storage: " + storage_dir)
|
||||
for lib in libraries:
|
||||
try:
|
||||
ctx.invoke(cmd_lib_install, libraries=[lib], silent=not verbose)
|
||||
except exception.LibNotFound as e:
|
||||
if not _is_builtin_lib(lib):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
|
||||
|
||||
def _is_builtin_lib(lib_name):
|
||||
for storage in get_builtin_libs():
|
||||
if any([l.get("name") == lib_name for l in storage['items']]):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _clean_pioenvs_dir(pioenvs_dir):
|
||||
structhash_file = join(pioenvs_dir, "structure.hash")
|
||||
proj_hash = calculate_project_hash()
|
||||
|
||||
# if project's config is modified
|
||||
if (isdir(pioenvs_dir)
|
||||
and getmtime(join(util.get_project_dir(), "platformio.ini")) >
|
||||
getmtime(pioenvs_dir)):
|
||||
util.rmtree_(pioenvs_dir)
|
||||
|
||||
# check project structure
|
||||
if isdir(pioenvs_dir) and isfile(structhash_file):
|
||||
with open(structhash_file) as f:
|
||||
if f.read() == proj_hash:
|
||||
return
|
||||
util.rmtree_(pioenvs_dir)
|
||||
|
||||
if not isdir(pioenvs_dir):
|
||||
makedirs(pioenvs_dir)
|
||||
|
||||
with open(structhash_file, "w") as f:
|
||||
f.write(proj_hash)
|
||||
|
||||
|
||||
def print_header(label, is_error=False):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
width = len(click.unstyle(label))
|
||||
half_line = "=" * ((terminal_width - width - 2) / 2)
|
||||
click.echo("%s %s %s" % (half_line, label, half_line), err=is_error)
|
||||
|
||||
|
||||
def print_summary(results, start_time):
|
||||
print_header("[%s]" % click.style("SUMMARY"))
|
||||
|
||||
envname_max_len = 0
|
||||
for (envname, _) in results:
|
||||
if len(envname) > envname_max_len:
|
||||
envname_max_len = len(envname)
|
||||
|
||||
successed = True
|
||||
for (envname, status) in results:
|
||||
status_str = click.style("SUCCESS", fg="green")
|
||||
if status is False:
|
||||
successed = False
|
||||
status_str = click.style("ERROR", fg="red")
|
||||
elif status is None:
|
||||
status_str = click.style("SKIP", fg="yellow")
|
||||
|
||||
format_str = (
|
||||
"Environment {0:<" + str(envname_max_len + 9) + "}\t[{1}]")
|
||||
click.echo(
|
||||
format_str.format(click.style(envname, fg="cyan"), status_str),
|
||||
err=status is False)
|
||||
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("SUCCESS", fg="green", bold=True)
|
||||
if successed else click.style("ERROR", fg="red", bold=True)),
|
||||
time() - start_time),
|
||||
is_error=not successed)
|
||||
|
||||
|
||||
def check_project_defopts(config):
|
||||
if not config.has_section("platformio"):
|
||||
return True
|
||||
known = ("env_default", "home_dir", "lib_dir", "libdeps_dir", "src_dir",
|
||||
"envs_dir", "data_dir", "test_dir", "boards_dir",
|
||||
"lib_extra_dirs")
|
||||
unknown = set([k for k, _ in config.items("platformio")]) - set(known)
|
||||
if not unknown:
|
||||
return True
|
||||
click.secho(
|
||||
"Warning! Ignore unknown `%s` option from `[platformio]` section" %
|
||||
", ".join(unknown),
|
||||
fg="yellow")
|
||||
return False
|
||||
|
||||
|
||||
def check_project_envs(config, environments):
|
||||
if not config.sections():
|
||||
raise exception.ProjectEnvsNotAvailable()
|
||||
|
||||
known = set([s[4:] for s in config.sections() if s.startswith("env:")])
|
||||
unknown = set(environments) - known
|
||||
if unknown:
|
||||
raise exception.UnknownEnvNames(", ".join(unknown), ", ".join(known))
|
||||
return True
|
||||
|
||||
|
||||
def calculate_project_hash():
|
||||
structure = [__version__]
|
||||
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
|
||||
if not isdir(d):
|
||||
continue
|
||||
for root, _, files in walk(d):
|
||||
for f in files:
|
||||
path = join(root, f)
|
||||
if not any([s in path for s in (".git", ".svn", ".pioenvs")]):
|
||||
structure.append(path)
|
||||
return sha1(",".join(sorted(structure))).hexdigest() if structure else ""
|
||||
@@ -13,21 +13,11 @@
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app
|
||||
from platformio.compat import string_types
|
||||
|
||||
|
||||
def format_value(raw):
|
||||
if isinstance(raw, bool):
|
||||
return "Yes" if raw else "No"
|
||||
if isinstance(raw, string_types):
|
||||
return raw
|
||||
return str(raw)
|
||||
|
||||
|
||||
@click.group(short_help="Manage system settings")
|
||||
@click.group(short_help="Manage PlatformIO settings")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
@@ -35,31 +25,41 @@ def cli():
|
||||
@cli.command("get", short_help="Get existing setting/-s")
|
||||
@click.argument("name", required=False)
|
||||
def settings_get(name):
|
||||
tabular_data = []
|
||||
for key, options in sorted(app.DEFAULT_SETTINGS.items()):
|
||||
if name and name != key:
|
||||
continue
|
||||
raw_value = app.get_setting(key)
|
||||
formatted_value = format_value(raw_value)
|
||||
|
||||
if raw_value != options["value"]:
|
||||
default_formatted_value = format_value(options["value"])
|
||||
formatted_value += "%s" % (
|
||||
"\n" if len(default_formatted_value) > 10 else " "
|
||||
)
|
||||
formatted_value += "[%s]" % click.style(
|
||||
default_formatted_value, fg="yellow"
|
||||
)
|
||||
|
||||
tabular_data.append(
|
||||
(click.style(key, fg="cyan"), formatted_value, options["description"])
|
||||
)
|
||||
list_tpl = "{name:<40} {value:<35} {description}"
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
|
||||
click.echo(
|
||||
tabulate(
|
||||
tabular_data, headers=["Name", "Current value [Default]", "Description"]
|
||||
)
|
||||
)
|
||||
list_tpl.format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
value=(click.style("Value", fg="green") + click.style(
|
||||
" [Default]", fg="yellow")),
|
||||
description="Description"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for _name, _data in sorted(app.DEFAULT_SETTINGS.items()):
|
||||
if name and name != _name:
|
||||
continue
|
||||
_value = app.get_setting(_name)
|
||||
|
||||
_value_str = str(_value)
|
||||
if isinstance(_value, bool):
|
||||
_value_str = "Yes" if _value else "No"
|
||||
_value_str = click.style(_value_str, fg="green")
|
||||
|
||||
if _value != _data['value']:
|
||||
_defvalue_str = str(_data['value'])
|
||||
if isinstance(_data['value'], bool):
|
||||
_defvalue_str = "Yes" if _data['value'] else "No"
|
||||
_value_str += click.style(" [%s]" % _defvalue_str, fg="yellow")
|
||||
else:
|
||||
_value_str += click.style(" ", fg="yellow")
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style(_name, fg="cyan"),
|
||||
value=_value_str,
|
||||
description=_data['description']))
|
||||
|
||||
|
||||
@cli.command("set", short_help="Set new value for the setting")
|
||||
@@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
||||
67
platformio/commands/test.py
Normal file
67
platformio/commands/test.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command("test", short_help="Local Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option(
|
||||
"--filter",
|
||||
"-f",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Filter tests by a pattern")
|
||||
@click.option(
|
||||
"--ignore",
|
||||
"-i",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Ignore tests by a pattern")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option(
|
||||
"--no-reset",
|
||||
is_flag=True,
|
||||
help="Disable software reset via Serial.DTR/RST")
|
||||
@click.option(
|
||||
"--monitor-rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state for Serial Monitor")
|
||||
@click.option(
|
||||
"--monitor-dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state for Serial Monitor")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user