Compare commits

..

1 Commits

Author SHA1 Message Date
Ivan Kravets
4ee4315c04 Fixed an issue with preprocessing of `*.ino` when macros were not handled // Resolve #2972 2019-09-02 23:22:42 +03:00
188 changed files with 5628 additions and 15032 deletions

28
.appveyor.yml Normal file
View File

@@ -0,0 +1,28 @@
build: off
platform:
- x64
environment:
matrix:
- TOXENV: "py27"
PLATFORMIO_BUILD_CACHE_DIR: C:/Temp/PIO_Build_Cache_P2_{build}
- TOXENV: "py36"
PLATFORMIO_BUILD_CACHE_DIR: C:/Temp/PIO_Build_Cache_P3_{build}
install:
- cmd: git submodule update --init --recursive
- cmd: SET PATH=C:\MinGW\bin;%PATH%
- cmd: pip install --force-reinstall tox
test_script:
- cmd: tox
notifications:
- provider: Slack
incoming_webhook:
secure: E9H0SU0Ju7WLDvgxsV8cs3J62T3nTTX7QkEjsczN0Sto/c9hWkVfhc5gGWUkxhlD975cokHByKGJIdwYwCewqOI+7BrcT8U+nlga4Uau7J8=
on_build_success: false
on_build_failure: true
on_build_status_changed: true

View File

@@ -1,42 +0,0 @@
name: Core
on: [push]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: [2.7, 3.7]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
git submodule update --init --recursive
python -m pip install --upgrade pip
pip install tox
- name: Python Lint
run: |
tox -e lint
- name: Integration Tests
env:
PLATFORMIO_TEST_ACCOUNT_LOGIN: ${{ secrets.PLATFORMIO_TEST_ACCOUNT_LOGIN }}
PLATFORMIO_TEST_ACCOUNT_PASSWORD: ${{ secrets.PLATFORMIO_TEST_ACCOUNT_PASSWORD }}
run: |
tox -e testcore
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Core*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}

View File

@@ -1,31 +0,0 @@
name: Docs
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install dependencies
run: |
git submodule update --init --recursive
python -m pip install --upgrade pip
pip install tox
- name: Build docs
run: |
tox -e docs
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Docs*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}

View File

@@ -1,62 +0,0 @@
name: Examples
on: [push]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-16.04, windows-latest, macos-latest]
python-version: [2.7, 3.7]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
git submodule update --init --recursive
python -m pip install --upgrade pip
pip install tox
- name: Run on Linux
if: startsWith(matrix.os, 'ubuntu')
env:
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51,aceinna_imu"
run: |
# ChipKIT issue: install 32-bit support for GCC PIC32
sudo apt-get install libc6-i386
# Free space
sudo apt clean
docker rmi $(docker image ls -aq)
df -h
# Run
tox -e testexamples
- name: Run on macOS
if: startsWith(matrix.os, 'macos')
env:
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,gd32v,nuclei"
run: |
df -h
tox -e testexamples
- name: Run on Windows
if: startsWith(matrix.os, 'windows')
env:
PLATFORMIO_CORE_DIR: C:/pio
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
run: |
tox -e testexamples
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Examples*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}

View File

@@ -1,3 +1,3 @@
[settings]
line_length=88
known_third_party=OpenSSL, SCons, autobahn, jsonrpc, twisted, zope
line_length=79
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial,twisted,autobahn,jsonrpc,tabulate

View File

@@ -1,7 +1,5 @@
[MESSAGES CONTROL]
disable=
bad-continuation,
bad-whitespace,
missing-docstring,
ungrouped-imports,
invalid-name,
@@ -11,5 +9,4 @@ disable=
too-few-public-methods,
useless-object-inheritance,
useless-import-alias,
fixme,
bad-option-value
fixme

View File

@@ -1,12 +0,0 @@
# See https://docs.readthedocs.io/en/stable/config-file/index.html
version: 2
sphinx:
configuration: docs/conf.py
formats:
- pdf
submodules:
include: all

3
.style.yapf Normal file
View File

@@ -0,0 +1,3 @@
[style]
blank_line_before_nested_class_or_def = true
allow_multiline_lambdas = true

39
.travis.yml Normal file
View File

@@ -0,0 +1,39 @@
language: python
matrix:
include:
- os: linux
sudo: false
python: 2.7
env: TOX_ENV=docs
- os: linux
sudo: required
python: 2.7
env: TOX_ENV=py27 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
- os: linux
sudo: required
python: 3.6
env: TOX_ENV=py36 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
- os: osx
language: generic
env: TOX_ENV=skipexamples
install:
- git submodule update --init --recursive
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
- pip install -U tox
# ChipKIT issue: install 32-bit support for GCC PIC32
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
script:
- tox -e $TOX_ENV
notifications:
email: false
slack:
rooms:
secure: JD6VGfN4+SLU2CwDdiIOr1VgwD+zbYUCE/srwyGuHavnjIkPItkl6T6Bn8Y4VrU6ysbuKotfdV2TAJJ82ivFbY8BvZBc7FBcYp/AGQ4FaCCV5ySv8RDAcQgdE12oaGzMdODiLqsB85f65zOlAFa+htaXyEiRTcotn6Y2hupatrI=
on_failure: always
on_success: change

View File

@@ -1,151 +1,18 @@
Release Notes
=============
.. _release_notes_4:
.. _release_notes_4_0:
PlatformIO Core 4
-----------------
PlatformIO Core 4.0
-------------------
4.3.2 (2020-04-28)
4.1.0 (2019-??-??)
~~~~~~~~~~~~~~~~~~
* New `Account Management System <https://docs.platformio.org/page/plus/pio-account.html>`__ (preview)
* Open source `PIO Remote <http://docs.platformio.org/page/plus/pio-remote.html>`__ client
* Improved `PIO Check <http://docs.platformio.org/page/plus/pio-check.html>`__ with more accurate project processing
* Echo what is typed when ``send_on_enter`` device monitor filter <https://docs.platformio.org/page/projectconf/section_env_monitor.html#monitor-filters>`__ is used (`issue #3452 <https://github.com/platformio/platformio-core/issues/3452>`_)
* Fixed PIO Unit Testing for Zephyr RTOS
* Fixed UnicodeDecodeError on Windows when network drive (NAS) is used (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
* Fixed an issue when saving libraries in new project results in error "No option 'lib_deps' in section" (`issue #3442 <https://github.com/platformio/platformio-core/issues/3442>`_)
* Fixed an incorrect node path used for pattern matching when processing middleware nodes
* Fixed an issue with missing ``lib_extra_dirs`` option in SRC_LIST for CLion (`issue #3460 <https://github.com/platformio/platformio-core/issues/3460>`_)
4.3.1 (2020-03-20)
~~~~~~~~~~~~~~~~~~
* Fixed a SyntaxError "'return' with argument inside generator" for PIO Unified Debugger when Python 2.7 is used
* Fixed an issue when ``lib_archive = no`` was not honored in `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__
* Fixed a TypeError "super(type, obj): obj must be an instance or subtype of type" when device monitor is used with a custom dev-platform filter (`issue #3431 <https://github.com/platformio/platformio-core/issues/3431>`_)
4.3.0 (2020-03-19)
~~~~~~~~~~~~~~~~~~
* Initial support for an official `PlatformIO for CLion IDE <https://docs.platformio.org/page/integration/ide/clion.html>`__ plugin:
- Smart C and C++ editor
- Code refactoring
- On-the-fly code analysis
- "New PlatformIO Project" wizard
- Building, Uploading, Testing
- Integrated debugger (inline variable view, conditional breakpoints, expressions, watchpoints, peripheral registers, multi-thread support, etc.)
* `Device Monitor 2.0 <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html>`__
- Added **PlatformIO Device Monitor Filter API** (dev-platforms can extend base device monitor with a custom functionality, such as exception decoding) (`pull #3383 <https://github.com/platformio/platformio-core/pull/3383>`_)
- Configure project device monitor with `monitor_filters <https://docs.platformio.org/page/projectconf/section_env_monitor.html#monitor-filters>`__ option
- `Capture device monitor output to a file <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#capture-output-to-a-file>`__ with ``log2file`` filter (`issue #670 <https://github.com/platformio/platformio-core/issues/670>`_)
- Show a timestamp for each new line with ``time`` filter (`issue #981 <https://github.com/platformio/platformio-core/issues/981>`_)
- Send a text to device on ENTER with ``send_on_enter`` filter (`issue #926 <https://github.com/platformio/platformio-core/issues/926>`_)
- Show a hexadecimal representation of the data (code point of each character) with ``hexlify`` filter
* New standalone (1-script) `PlatformIO Core Installer <https://github.com/platformio/platformio-core-installer>`_
* Initial support for `Renode <https://docs.platformio.org/page/plus/debug-tools/renode.html>`__ simulation framework (`issue #3401 <https://github.com/platformio/platformio-core/issues/3401>`_)
* Added support for Arm Mbed "module.json" ``dependencies`` field (`issue #3400 <https://github.com/platformio/platformio-core/issues/3400>`_)
* Improved support for Arduino "library.properties" ``depends`` field
* Fixed an issue when quitting from PlatformIO IDE does not shutdown PIO Home server
* Fixed an issue "the JSON object must be str, not 'bytes'" when PIO Home is used with Python 3.5 (`issue #3396 <https://github.com/platformio/platformio-core/issues/3396>`_)
* Fixed an issue when Python 2 does not keep encoding when converting ".ino" (`issue #3393 <https://github.com/platformio/platformio-core/issues/3393>`_)
* Fixed an issue when ``"libArchive": false`` in "library.json" does not work (`issue #3403 <https://github.com/platformio/platformio-core/issues/3403>`_)
* Fixed an issue when not all commands in `compilation database "compile_commands.json" <https://docs.platformio.org/page/integration/compile_commands.html>`__ use absolute paths (`pull #3415 <https://github.com/platformio/platformio-core/pull/3415>`_)
* Fixed an issue when unknown transport is used for `PIO Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html>`__ engine (`issue #3422 <https://github.com/platformio/platformio-core/issues/3422>`_)
4.2.1 (2020-02-17)
~~~~~~~~~~~~~~~~~~
* Improved VSCode template with special ``forceInclude`` field for direct includes via ``-include`` flag (`issue #3379 <https://github.com/platformio/platformio-core/issues/3379>`_)
* Improved support of PIO Home on card-sized PC (Raspberry Pi, etc.) (`issue #3313 <https://github.com/platformio/platformio-core/issues/3313>`_)
* Froze "marshmallow" dependency to 2.X for Python 2 (`issue #3380 <https://github.com/platformio/platformio-core/issues/3380>`_)
* Fixed "TypeError: unsupported operand type(s)" when system environment variable is used by project configuration parser (`issue #3377 <https://github.com/platformio/platformio-core/issues/3377>`_)
* Fixed an issue when Library Dependency Finder (LDF) ignores custom "libLDFMode" and "libCompatMode" options in `library.json <http://docs.platformio.org/page/librarymanager/config.html>`__
* Fixed an issue when generating of compilation database "compile_commands.json" does not work with Python 2.7 (`issue #3378 <https://github.com/platformio/platformio-core/issues/3378>`_)
4.2.0 (2020-02-12)
~~~~~~~~~~~~~~~~~~
* `PlatformIO Home 3.1 <http://docs.platformio.org/page/home/index.html>`__:
- Project Manager
- Project Configuration UI for `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__
* `PIO Check <http://docs.platformio.org/page/plus/pio-check.html>`__ automated code analysis without hassle:
- Added support for `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ static code analyzer
* Initial support for `Project Manager <https://docs.platformio.org/page/userguide/project/index.html>`_ CLI:
- Show computed project configuration with a new `platformio project config <https://docs.platformio.org/page/userguide/project/cmd_config.html>`_ command or dump to JSON with ``platformio project config --json-output`` (`issue #3335 <https://github.com/platformio/platformio-core/issues/3335>`_)
- Moved ``platformio init`` command to `platformio project init <https://docs.platformio.org/page/userguide/project/cmd_init.html>`_
* Generate `compilation database "compile_commands.json" <https://docs.platformio.org/page/integration/compile_commands.html>`__ (`issue #2990 <https://github.com/platformio/platformio-core/issues/2990>`_)
* Control debug flags and optimization level with a new `debug_build_flags <https://docs.platformio.org/page/projectconf/section_env_debug.html#debug-build-flags>`__ option
* Install a dev-platform with ALL declared packages using a new ``--with-all-packages`` option for `pio platform install <https://docs.platformio.org/page/userguide/platforms/cmd_install.html>`__ command (`issue #3345 <https://github.com/platformio/platformio-core/issues/3345>`_)
* Added support for "pythonPackages" in `platform.json <https://docs.platformio.org/page/platforms/creating_platform.html#manifest-file-platform-json>`__ manifest (PlatformIO Package Manager will install dependent Python packages from PyPi registry automatically when dev-platform is installed)
* Handle project configuration (monitor, test, and upload options) for PIO Remote commands (`issue #2591 <https://github.com/platformio/platformio-core/issues/2591>`_)
* Added support for Arduino's library.properties ``depends`` field (`issue #2781 <https://github.com/platformio/platformio-core/issues/2781>`_)
* Autodetect monitor port for boards with specified HWIDs (`issue #3349 <https://github.com/platformio/platformio-core/issues/3349>`_)
* Updated SCons tool to 3.1.2
* Updated Unity tool to 2.5.0
* Made package ManifestSchema compatible with marshmallow >= 3 (`issue #3296 <https://github.com/platformio/platformio-core/issues/3296>`_)
* Warn about broken library manifest when scanning dependencies (`issue #3268 <https://github.com/platformio/platformio-core/issues/3268>`_)
* Do not overwrite custom items in VSCode's "extensions.json" (`issue #3374 <https://github.com/platformio/platformio-core/issues/3374>`_)
* Fixed an issue when ``env.BoardConfig()`` does not work for custom boards in extra scripts of libraries (`issue #3264 <https://github.com/platformio/platformio-core/issues/3264>`_)
* Fixed an issue with "start-group/end-group" linker flags on Native development platform (`issue #3282 <https://github.com/platformio/platformio-core/issues/3282>`_)
* Fixed default PIO Unified Debugger configuration for `J-Link probe <http://docs.platformio.org/page/plus/debug-tools/jlink.html>`__
* Fixed an issue with LDF when header files not found if "libdeps_dir" is within a subdirectory of "lib_extra_dirs" (`issue #3311 <https://github.com/platformio/platformio-core/issues/3311>`_)
* Fixed an issue "Import of non-existent variable 'projenv''" when development platform does not call "env.BuildProgram()" (`issue #3315 <https://github.com/platformio/platformio-core/issues/3315>`_)
* Fixed an issue when invalid CLI command does not return non-zero exit code
* Fixed an issue when Project Inspector crashes when flash use > 100% (`issue #3368 <https://github.com/platformio/platformio-core/issues/3368>`_)
* Fixed a "UnicodeDecodeError" when listing built-in libraries on macOS with Python 2.7 (`issue #3370 <https://github.com/platformio/platformio-core/issues/3370>`_)
* Fixed an issue with improperly handled compiler flags with space symbols in VSCode template (`issue #3364 <https://github.com/platformio/platformio-core/issues/3364>`_)
* Fixed an issue when no error is raised if referred parameter (interpolation) is missing in a project configuration file (`issue #3279 <https://github.com/platformio/platformio-core/issues/3279>`_)
4.1.0 (2019-11-07)
~~~~~~~~~~~~~~~~~~
* `PIO Check <http://docs.platformio.org/page/plus/pio-check.html>`__ automated code analysis without hassle:
- Potential NULL pointer dereferences
- Possible indexing beyond array bounds
- Suspicious assignments
- Reads of potentially uninitialized objects
- Unused variables or functions
- Out of scope memory usage.
* `PlatformIO Home 3.0 <http://docs.platformio.org/page/home/index.html>`__:
- Project Inspection
- Static Code Analysis
- Firmware File Explorer
- Firmware Memory Inspection
- Firmware Sections & Symbols Viewer.
* Added support for `Build Middlewares <http://docs.platformio.org/page/projectconf/advanced_scripting.html#build-middlewares>`__: configure custom build flags per specific file, skip any build nodes from a framework, replace build file with another on-the-fly, etc.
* Extend project environment configuration in "platformio.ini" with other sections using a new `extends <http://docs.platformio.org/page/projectconf/section_env_advanced.html#extends>`__ option (`issue #2953 <https://github.com/platformio/platformio-core/issues/2953>`_)
* Generate ``.ccls`` LSP file for `Emacs <https://docs.platformio.org/page/ide/emacs.html>`__ cross references, hierarchies, completion and semantic highlighting
* Added ``--no-ansi`` flag for `PIO Core <http://docs.platformio.org/page/userguide/index.html>`__ to disable ANSI control characters
* Added ``--shutdown-timeout`` option to `PIO Home Server <http://docs.platformio.org/page/userguide/cmd_home.html>`__
* Fixed an issue with project generator for `CLion IDE <http://docs.platformio.org/page/ide/clion.html>`__ when 2 environments were used (`issue #2824 <https://github.com/platformio/platformio-core/issues/2824>`_)
* Fixed default PIO Unified Debugger configuration for `J-Link probe <http://docs.platformio.org/page/plus/debug-tools/jlink.html>`__
* Fixed an issue when configuration file options partly ignored when using custom ``--project-conf`` (`issue #3034 <https://github.com/platformio/platformio-core/issues/3034>`_)
* Fixed an issue when installing a package using custom Git tag and submodules were not updated correctly (`issue #3060 <https://github.com/platformio/platformio-core/issues/3060>`_)
* Fixed an issue with linking process when ``$LDSCRIPT`` contains a space in path
* Fixed security issue when extracting items from TAR archive (`issue #2995 <https://github.com/platformio/platformio-core/issues/2995>`_)
* Fixed an issue with project generator when ``src_build_flags`` were not respected (`issue #3137 <https://github.com/platformio/platformio-core/issues/3137>`_)
* Fixed an issue when booleans in "platformio.ini" are not parsed properly (`issue #3022 <https://github.com/platformio/platformio-core/issues/3022>`_)
* Fixed an issue with invalid encoding when generating project for Visual Studio (`issue #3183 <https://github.com/platformio/platformio-core/issues/3183>`_)
* Fixed an issue when Project Config Parser does not remove in-line comments when Python 3 is used (`issue #3213 <https://github.com/platformio/platformio-core/issues/3213>`_)
* Fixed an issue with a GCC Linter for PlatformIO IDE for Atom (`issue #3218 <https://github.com/platformio/platformio-core/issues/3218>`_)
* Fixed default PIO Unified Debugger configuration for `J-Link probe <http://docs.platformio.org/en/latest/plus/debug-tools/jlink.html>`__
* Fixed an issue with preprocessing of ``*.ino`` files when macros were not handled (`issue #2972 <https://github.com/platformio/platformio-core/issues/2972>`_)
4.0.3 (2019-08-30)
~~~~~~~~~~~~~~~~~~
@@ -245,8 +112,8 @@ PlatformIO Core 4
- Fixed "systemd-udevd" warnings in `99-platformio-udev.rules <http://docs.platformio.org/page/faq.html#platformio-udev-rules>`__ (`issue #2442 <https://github.com/platformio/platformio-core/issues/2442>`_)
- Fixed an issue when package cache (Library Manager) expires too fast (`issue #2559 <https://github.com/platformio/platformio-core/issues/2559>`_)
PlatformIO Core 3
-----------------
PlatformIO Core 3.0
-------------------
3.6.7 (2019-04-23)
~~~~~~~~~~~~~~~~~~
@@ -846,8 +713,8 @@ PlatformIO Core 3
(`issue #742 <https://github.com/platformio/platformio-core/issues/742>`_)
* Stopped supporting Python 2.6
PlatformIO Core 2
-----------------
PlatformIO Core 2.0
--------------------
2.11.2 (2016-08-02)
~~~~~~~~~~~~~~~~~~~
@@ -1632,8 +1499,8 @@ PlatformIO Core 2
* Fixed bug with creating copies of source files
(`issue #177 <https://github.com/platformio/platformio-core/issues/177>`_)
PlatformIO Core 1
-----------------
PlatformIO Core 1.0
-------------------
1.5.0 (2015-05-15)
~~~~~~~~~~~~~~~~~~
@@ -1823,8 +1690,8 @@ PlatformIO Core 1
error (`issue #81 <https://github.com/platformio/platformio-core/issues/81>`_)
* Several bug fixes, increased stability and performance improvements
PlatformIO Core Preview
-----------------------
PlatformIO Core 0.0
-------------------
0.10.2 (2015-01-06)
~~~~~~~~~~~~~~~~~~~

View File

@@ -1 +0,0 @@
include LICENSE

View File

@@ -5,14 +5,13 @@ isort:
isort -rc ./platformio
isort -rc ./tests
format:
black --target-version py27 ./platformio
black --target-version py27 ./tests
yapf:
yapf --recursive --in-place platformio/
test:
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
py.test --verbose --capture=no --exitfirst -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
before-commit: isort format lint
before-commit: isort yapf lint test
clean-docs:
rm -rf docs/_build

View File

@@ -1,15 +1,12 @@
PlatformIO
==========
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
:target: https://docs.platformio.org/page/core/index.html
:alt: CI Build for PlatformIO Core
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
:target: https://github.com/platformio/platformio-examples
:alt: CI Build for dev-platform examples
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
:alt: CI Build for Docs
.. image:: https://travis-ci.org/platformio/platformio-core.svg?branch=develop
:target: https://travis-ci.org/platformio/platformio-core
:alt: Travis.CI Build Status
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
:alt: AppVeyor.CI Build Status
.. image:: https://img.shields.io/pypi/v/platformio.svg
:target: https://pypi.python.org/pypi/platformio/
:alt: Latest Version
@@ -37,37 +34,34 @@ PlatformIO
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
:target: https://platformio.org?utm_source=github&utm_medium=core
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ a new generation ecosystem for embedded development
* Open source, maximum permissive Apache 2.0 license
* Cross-platform IDE and Unified Debugger
* Static Code Analyzer and Remote Unit Testing
* Multi-platform and Multi-architecture Build System
* Firmware File Explorer and Memory Inspection.
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ is an open source ecosystem for IoT
development. Cross-platform IDE and unified debugger. Remote unit testing and
firmware updates.
Get Started
-----------
* `What is PlatformIO? <https://docs.platformio.org/page/what-is-platformio.html?utm_source=github&utm_medium=core>`_
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
Instruments
Open Source
-----------
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
* `PlatformIO Core (CLI) <https://docs.platformio.org/page/core.html?utm_source=github&utm_medium=core>`_
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
Professional
------------
PIO Plus
--------
* `PIO Check <https://docs.platformio.org/page/plus/pio-check.html?utm_source=github&utm_medium=core>`_
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
* `PIO Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
* `Cloud IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
Registry
--------
@@ -95,11 +89,10 @@ Development Platforms
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
* `Nuclei <https://platformio.org/platforms/nuclei?utm_source=github&utm_medium=core>`_
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
* `RISC-V GAP <https://platformio.org/platforms/riscv_gap?utm_source=github&utm_medium=core>`_
* `Shakti <https://platformio.org/platforms/shakti?utm_source=github&utm_medium=core>`_
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
* `ST STM8 <https://platformio.org/platforms/ststm8?utm_source=github&utm_medium=core>`_
@@ -112,25 +105,23 @@ Frameworks
----------
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
* `ARTIK SDK <https://platformio.org/frameworks/artik-sdk?utm_source=github&utm_medium=core>`_
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
* `Energia <https://platformio.org/frameworks/energia?utm_source=github&utm_medium=core>`_
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
* `Freedom E SDK <https://platformio.org/frameworks/freedom-e-sdk?utm_source=github&utm_medium=core>`_
* `GigaDevice GD32V SDK <https://platformio.org/frameworks/gd32vf103-sdk?utm_source=github&utm_medium=core>`_
* `Kendryte Standalone SDK <https://platformio.org/frameworks/kendryte-standalone-sdk?utm_source=github&utm_medium=core>`_
* `Kendryte FreeRTOS SDK <https://platformio.org/frameworks/kendryte-freertos-sdk?utm_source=github&utm_medium=core>`_
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
* `Mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
* `Nuclei SDK <https://platformio.org/frameworks/nuclei-sdk?utm_source=github&utm_medium=core>`_
* `mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
* `PULP OS <https://platformio.org/frameworks/pulp-os?utm_source=github&utm_medium=core>`_
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
* `Shakti SDK <https://platformio.org/frameworks/shakti-sdk?utm_source=github&utm_medium=core>`_
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
* `Tizen RT <https://platformio.org/frameworks/tizenrt?utm_source=github&utm_medium=core>`_
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
* `Zephyr <https://platformio.org/frameworks/zephyr?utm_source=github&utm_medium=core>`_
Contributing
------------
@@ -143,8 +134,8 @@ Telemetry / Privacy Policy
Share minimal diagnostics and usage information to help us make PlatformIO better.
It is enabled by default. For more information see:
* `Telemetry Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
* `SSL Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
* `SSL Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
License
-------

2
docs

Submodule docs updated: bff1fc845b...083a75dbe3

View File

@@ -12,19 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 3, 2)
VERSION = (4, 1, "0a1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A new generation ecosystem for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"An open source ecosystem for IoT development. "
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"RISC-V, FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3"
)
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
@@ -34,5 +31,3 @@ __license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
__pioaccount_api__ = "https://api.accounts.platformio.org"
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"

View File

@@ -23,42 +23,22 @@ from platformio.commands import PlatformioCLI
from platformio.compat import CYGWIN
@click.command(
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
)
@click.command(cls=PlatformioCLI,
context_settings=dict(help_option_names=["-h", "--help"]))
@click.version_option(__version__, prog_name="PlatformIO")
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
@click.option("--caller", "-c", help="Caller ID (service)")
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
@click.option("--force",
"-f",
is_flag=True,
help="Force to accept any confirmation prompts.")
@click.option("--caller", "-c", help="Caller ID (service).")
@click.pass_context
def cli(ctx, force, caller, no_ansi):
try:
if (
no_ansi
or str(
os.getenv("PLATFORMIO_NO_ANSI", os.getenv("PLATFORMIO_DISABLE_COLOR"))
).lower()
== "true"
):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: False
elif (
str(
os.getenv("PLATFORMIO_FORCE_ANSI", os.getenv("PLATFORMIO_FORCE_COLOR"))
).lower()
== "true"
):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
except: # pylint: disable=bare-except
pass
def cli(ctx, force, caller):
maintenance.on_platformio_start(ctx, force, caller)
@cli.resultcallback()
@click.pass_context
def process_result(ctx, result, *_, **__):
def process_result(ctx, result, force, caller): # pylint: disable=W0613
maintenance.on_platformio_end(ctx, result)
@@ -70,12 +50,21 @@ def configure():
# https://urllib3.readthedocs.org
# /en/latest/security.html#insecureplatformwarning
try:
import urllib3 # pylint: disable=import-outside-toplevel
import urllib3
urllib3.disable_warnings()
except (AttributeError, ImportError):
pass
try:
if str(os.getenv("PLATFORMIO_DISABLE_COLOR", "")).lower() == "true":
# pylint: disable=protected-access
click._compat.isatty = lambda stream: False
elif str(os.getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true":
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
except: # pylint: disable=bare-except
pass
# Handle IOError issue with VSCode's Terminal (Windows)
click_echo_origin = [click.echo, click.secho]
@@ -84,8 +73,7 @@ def configure():
click_echo_origin[origin](*args, **kwargs)
except IOError:
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
"%s\n" % (args[0] if args else "")
)
"%s\n" % (args[0] if args else ""))
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
@@ -99,10 +87,9 @@ def main(argv=None):
sys.argv = argv
try:
configure()
cli() # pylint: disable=no-value-for-parameter
except SystemExit as e:
if e.code and str(e.code).isdigit():
exit_code = int(e.code)
cli(None, None, None)
except SystemExit:
pass
except Exception as e: # pylint: disable=broad-except
if not isinstance(e, exception.ReturnErrorCode):
maintenance.on_platformio_exception(e)

View File

@@ -13,83 +13,91 @@
# limitations under the License.
import codecs
import getpass
import hashlib
import os
import platform
import socket
import uuid
from os import environ, getenv, listdir, remove
from os.path import dirname, isdir, isfile, join, realpath
from os.path import abspath, dirname, expanduser, isdir, isfile, join
from time import time
import requests
from platformio import __version__, exception, fs, lockfile
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
from platformio import exception, fs, lockfile
from platformio.compat import (WINDOWS, dump_json_to_unicode,
hashlib_encode_data)
from platformio.proc import is_ci
from platformio.project.helpers import (
get_default_projects_dir,
get_project_cache_dir,
get_project_core_dir,
)
from platformio.project.helpers import (get_project_cache_dir,
get_project_core_dir)
def get_default_projects_dir():
docs_dir = join(expanduser("~"), "Documents")
try:
assert WINDOWS
import ctypes.wintypes
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
ctypes.windll.shell32.SHGetFolderPathW(None, 5, None, 0, buf)
docs_dir = buf.value
except: # pylint: disable=bare-except
pass
return join(docs_dir, "PlatformIO", "Projects")
def projects_dir_validate(projects_dir):
assert isdir(projects_dir)
return realpath(projects_dir)
return abspath(projects_dir)
DEFAULT_SETTINGS = {
"auto_update_libraries": {
"description": "Automatically update libraries (Yes/No)",
"value": False,
"value": False
},
"auto_update_platforms": {
"description": "Automatically update platforms (Yes/No)",
"value": False,
"value": False
},
"check_libraries_interval": {
"description": "Check for the library updates interval (days)",
"value": 7,
"value": 7
},
"check_platformio_interval": {
"description": "Check for the new PlatformIO interval (days)",
"value": 3,
"value": 3
},
"check_platforms_interval": {
"description": "Check for the platform updates interval (days)",
"value": 7,
"value": 7
},
"enable_cache": {
"description": "Enable caching for API requests and Library Manager",
"value": True,
"value": True
},
"strict_ssl": {
"description": "Strict SSL for PlatformIO Services",
"value": False
},
"strict_ssl": {"description": "Strict SSL for PlatformIO Services", "value": False},
"enable_telemetry": {
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"value": True,
"description":
("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"value": True
},
"force_verbose": {
"description": "Force verbose output when processing environments",
"value": False,
"value": False
},
"projects_dir": {
"description": "Default location for PlatformIO projects (PIO Home)",
"value": get_default_projects_dir(),
"validator": projects_dir_validate,
"validator": projects_dir_validate
},
}
SESSION_VARS = {
"command_ctx": None,
"force_option": False,
"caller_id": None,
"custom_project_conf": None,
}
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
class State(object):
def __init__(self, path=None, lock=False):
self.path = path
self.lock = lock
@@ -105,12 +113,8 @@ class State(object):
if isfile(self.path):
self._storage = fs.load_json(self.path)
assert isinstance(self._storage, dict)
except (
AssertionError,
ValueError,
UnicodeDecodeError,
exception.InvalidJSONFile,
):
except (AssertionError, ValueError, UnicodeDecodeError,
exception.InvalidJSONFile):
self._storage = {}
return self
@@ -170,6 +174,7 @@ class State(object):
class ContentCache(object):
def __init__(self, cache_dir=None):
self.cache_dir = None
self._db_path = None
@@ -202,7 +207,6 @@ class ContentCache(object):
return True
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return join(self.cache_dir, key[-2:], key)
@@ -273,11 +277,8 @@ class ContentCache(object):
continue
expire, path = line.split("=")
try:
if (
time() < int(expire)
and isfile(path)
and path not in paths_for_delete
):
if time() < int(expire) and isfile(path) and \
path not in paths_for_delete:
newlines.append(line)
continue
except ValueError:
@@ -316,11 +317,11 @@ def sanitize_setting(name, value):
defdata = DEFAULT_SETTINGS[name]
try:
if "validator" in defdata:
value = defdata["validator"](value)
elif isinstance(defdata["value"], bool):
value = defdata['validator'](value)
elif isinstance(defdata['value'], bool):
if not isinstance(value, bool):
value = str(value).lower() in ("true", "yes", "y", "1")
elif isinstance(defdata["value"], int):
elif isinstance(defdata['value'], int):
value = int(value)
except Exception:
raise exception.InvalidSettingValue(value, name)
@@ -350,24 +351,24 @@ def get_setting(name):
return sanitize_setting(name, getenv(_env_name))
with State() as state:
if "settings" in state and name in state["settings"]:
return state["settings"][name]
if "settings" in state and name in state['settings']:
return state['settings'][name]
return DEFAULT_SETTINGS[name]["value"]
return DEFAULT_SETTINGS[name]['value']
def set_setting(name, value):
with State(lock=True) as state:
if "settings" not in state:
state["settings"] = {}
state["settings"][name] = sanitize_setting(name, value)
state['settings'] = {}
state['settings'][name] = sanitize_setting(name, value)
state.modified = True
def reset_settings():
with State(lock=True) as state:
if "settings" in state:
del state["settings"]
del state['settings']
def get_session_var(name, default=None):
@@ -380,13 +381,11 @@ def set_session_var(name, value):
def is_disabled_progressbar():
return any(
[
get_session_var("force_option"),
is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
]
)
return any([
get_session_var("force_option"),
is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
])
def get_cid():
@@ -398,47 +397,15 @@ def get_cid():
uid = getenv("C9_UID")
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
try:
uid = (
requests.get(
"{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"),
)
)
.json()
.get("id")
)
uid = requests.get("{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"))).json().get("id")
except: # pylint: disable=bare-except
pass
if not uid:
uid = uuid.getnode()
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
cid = str(cid)
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
if WINDOWS or os.getuid() > 0: # yapf: disable pylint: disable=no-member
set_state_item("cid", cid)
return cid
def get_user_agent():
data = ["PlatformIO/%s" % __version__, "CI/%d" % int(is_ci())]
if get_session_var("caller_id"):
data.append("Caller/%s" % get_session_var("caller_id"))
if os.getenv("PLATFORMIO_IDE"):
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
data.append("Python/%s" % platform.python_version())
data.append("Platform/%s" % platform.platform())
return " ".join(data)
def get_host_id():
h = hashlib.sha1(hashlib_encode_data(get_cid()))
try:
username = getpass.getuser()
h.update(hashlib_encode_data(username))
except: # pylint: disable=bare-except
pass
return h.hexdigest()
def get_host_name():
return str(socket.gethostname())[:255]

View File

@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from os import environ, makedirs
from os.path import isdir, join
from time import time
@@ -29,10 +28,10 @@ from SCons.Script import Import # pylint: disable=import-error
from SCons.Script import Variables # pylint: disable=import-error
from platformio import fs
from platformio.compat import dump_json_to_unicode
from platformio.compat import PY2, dump_json_to_unicode
from platformio.managers.platform import PlatformBase
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_dir
from platformio.project import helpers as project_helpers
AllowSubstExceptions(NameError)
@@ -44,45 +43,48 @@ clivars.AddVariables(
("PROJECT_CONFIG",),
("PIOENV",),
("PIOTEST_RUNNING_NAME",),
("UPLOAD_PORT",),
)
("UPLOAD_PORT",)
) # yapf: disable
DEFAULT_ENV_OPTIONS = dict(
tools=[
"ar",
"as",
"cc",
"c++",
"link",
"platformio",
"pioplatform",
"pioproject",
"piomaxlen",
"piolib",
"pioupload",
"piomisc",
"pioide",
"piosize",
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
"pioproject", "piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
],
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
variables=clivars,
# Propagating External Environment
ENV=environ,
UNIX_TIME=int(time()),
BUILD_DIR=join("$PROJECT_BUILD_DIR", "$PIOENV"),
BUILD_SRC_DIR=join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=join("$BUILD_DIR", "test"),
COMPILATIONDB_PATH=join("$BUILD_DIR", "compile_commands.json"),
PROJECT_DIR=project_helpers.get_project_dir(),
PROJECTCORE_DIR=project_helpers.get_project_core_dir(),
PROJECTPACKAGES_DIR=project_helpers.get_project_packages_dir(),
PROJECTWORKSPACE_DIR=project_helpers.get_project_workspace_dir(),
PROJECTLIBDEPS_DIR=project_helpers.get_project_libdeps_dir(),
PROJECTINCLUDE_DIR=project_helpers.get_project_include_dir(),
PROJECTSRC_DIR=project_helpers.get_project_src_dir(),
PROJECTTEST_DIR=project_helpers.get_project_test_dir(),
PROJECTDATA_DIR=project_helpers.get_project_data_dir(),
PROJECTBUILD_DIR=project_helpers.get_project_build_dir(),
BUILDCACHE_DIR=project_helpers.get_project_optional_dir("build_cache_dir"),
BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"),
BUILDSRC_DIR=join("$BUILD_DIR", "src"),
BUILDTEST_DIR=join("$BUILD_DIR", "test"),
LIBPATH=["$BUILD_DIR"],
LIBSOURCE_DIRS=[
project_helpers.get_project_lib_dir(),
join("$PROJECTLIBDEPS_DIR", "$PIOENV"),
project_helpers.get_project_global_lib_dir()
],
PROGNAME="program",
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PYTHONEXE=get_pythonexe_path(),
)
PYTHONEXE=get_pythonexe_path())
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
DEFAULT_ENV_OPTIONS["ARCOMSTR"] = "Archiving $TARGET"
DEFAULT_ENV_OPTIONS["LINKCOMSTR"] = "Linking $TARGET"
DEFAULT_ENV_OPTIONS["RANLIBCOMSTR"] = "Indexing $TARGET"
DEFAULT_ENV_OPTIONS['ARCOMSTR'] = "Archiving $TARGET"
DEFAULT_ENV_OPTIONS['LINKCOMSTR'] = "Linking $TARGET"
DEFAULT_ENV_OPTIONS['RANLIBCOMSTR'] = "Indexing $TARGET"
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
@@ -92,63 +94,31 @@ env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
env.Replace(
**{
key: PlatformBase.decode_scons_arg(env[key])
for key in list(clivars.keys())
if key in env
}
)
for key in list(clivars.keys()) if key in env
})
# Setup project optional directories
config = env.GetProjectConfig()
env.Replace(
PROJECT_DIR=get_project_dir(),
PROJECT_CORE_DIR=config.get_optional_dir("core"),
PROJECT_PACKAGES_DIR=config.get_optional_dir("packages"),
PROJECT_WORKSPACE_DIR=config.get_optional_dir("workspace"),
PROJECT_LIBDEPS_DIR=config.get_optional_dir("libdeps"),
PROJECT_INCLUDE_DIR=config.get_optional_dir("include"),
PROJECT_SRC_DIR=config.get_optional_dir("src"),
PROJECTSRC_DIR=config.get_optional_dir("src"), # legacy for dev/platform
PROJECT_TEST_DIR=config.get_optional_dir("test"),
PROJECT_DATA_DIR=config.get_optional_dir("data"),
PROJECTDATA_DIR=config.get_optional_dir("data"), # legacy for dev/platform
PROJECT_BUILD_DIR=config.get_optional_dir("build"),
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
LIBSOURCE_DIRS=[
config.get_optional_dir("lib"),
join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
config.get_optional_dir("globallib"),
],
)
if env.subst("$BUILD_CACHE_DIR"):
if not isdir(env.subst("$BUILD_CACHE_DIR")):
makedirs(env.subst("$BUILD_CACHE_DIR"))
env.CacheDir("$BUILD_CACHE_DIR")
if env.subst("$BUILDCACHE_DIR"):
if not isdir(env.subst("$BUILDCACHE_DIR")):
makedirs(env.subst("$BUILDCACHE_DIR"))
env.CacheDir("$BUILDCACHE_DIR")
if int(ARGUMENTS.get("ISATTY", 0)):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
if env.GetOption("clean"):
if env.GetOption('clean'):
env.PioClean(env.subst("$BUILD_DIR"))
env.Exit(0)
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
# Dynamically load dependent tools
if "compiledb" in COMMAND_LINE_TARGETS:
env.Tool("compilation_db")
if not isdir(env.subst("$BUILD_DIR")):
makedirs(env.subst("$BUILD_DIR"))
print("Verbose mode can be enabled via `-v, --verbose` option")
env.LoadProjectOptions()
env.LoadPioPlatform()
env.SConscriptChdir(0)
env.SConsignFile(
join("$BUILD_DIR", ".sconsign%d%d.db" % (sys.version_info[0], sys.version_info[1]))
)
join("$PROJECTBUILD_DIR",
".sconsign.dblite" if PY2 else ".sconsign3.dblite"))
for item in env.GetExtraScripts("pre"):
env.SConscript(item, exports="env")
@@ -166,9 +136,7 @@ for item in env.GetExtraScripts("post"):
##############################################################################
# Checking program size
if env.get("SIZETOOL") and not (
set(["nobuild", "sizedata"]) & set(COMMAND_LINE_TARGETS)
):
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
env.Depends(["upload", "program"], "checkprogsize")
# Replace platform's "size" target with our
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
@@ -176,17 +144,11 @@ if env.get("SIZETOOL") and not (
Default(_new_targets)
Default("checkprogsize")
if "compiledb" in COMMAND_LINE_TARGETS:
env.Alias("compiledb", env.CompilationDatabase("$COMPILATIONDB_PATH"))
# Print configured protocols
env.AddPreAction(
["upload", "program"],
env.VerboseAction(
lambda source, target, env: env.PrintUploadInfo(),
"Configuring upload protocol...",
),
)
env.AddPreAction(["upload", "program"],
env.VerboseAction(
lambda source, target, env: env.PrintUploadInfo(),
"Configuring upload protocol..."))
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
@@ -194,29 +156,12 @@ AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
##############################################################################
if "envdump" in COMMAND_LINE_TARGETS:
click.echo(env.Dump())
print(env.Dump())
env.Exit(0)
if "idedata" in COMMAND_LINE_TARGETS:
try:
Import("projenv")
except: # pylint: disable=bare-except
projenv = env
click.echo(
"\n%s\n"
% dump_json_to_unicode(
projenv.DumpIDEData() # pylint: disable=undefined-variable
)
)
Import("projenv")
print("\n%s\n" % dump_json_to_unicode(
env.DumpIDEData(projenv) # pylint: disable=undefined-variable
))
env.Exit(0)
if "sizedata" in COMMAND_LINE_TARGETS:
AlwaysBuild(
env.Alias(
"sizedata",
DEFAULT_TARGETS,
env.VerboseAction(env.DumpSizeData, "Generating memory usage report..."),
)
)
Default("sizedata")

View File

@@ -1,219 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
# Copyright 2015 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
from __future__ import absolute_import
import itertools
import json
import os
import SCons
from platformio.builder.tools.platformio import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
from platformio.proc import where_is_program
# Implements the ability for SCons to emit a compilation database for the MongoDB project. See
# http://clang.llvm.org/docs/JSONCompilationDatabase.html for details on what a compilation
# database is, and why you might want one. The only user visible entry point here is
# 'env.CompilationDatabase'. This method takes an optional 'target' to name the file that
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
# which is the name that most clang tools search for by default.
# TODO: Is there a better way to do this than this global? Right now this exists so that the
# emitter we add can record all of the things it emits, so that the scanner for the top level
# compilation database can access the complete list, and also so that the writer has easy
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
# communicate more gracefully?
__COMPILATION_DB_ENTRIES = []
# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even
# integrate with the cache, but there doesn't seem to be much call for it.
class __CompilationDbNode(SCons.Node.Python.Value):
def __init__(self, value):
SCons.Node.Python.Value.__init__(self, value)
self.Decider(changed_since_last_build_node)
def changed_since_last_build_node(*args, **kwargs):
""" Dummy decider to force always building"""
return True
def makeEmitCompilationDbEntry(comstr):
"""
Effectively this creates a lambda function to capture:
* command line
* source
* target
:param comstr: unevaluated command line
:return: an emitter which has captured the above
"""
user_action = SCons.Action.Action(comstr)
def EmitCompilationDbEntry(target, source, env):
"""
This emitter will be added to each c/c++ object build to capture the info needed
for clang tools
:param target: target node(s)
:param source: source node(s)
:param env: Environment for use building this node
:return: target(s), source(s)
"""
# Resolve absolute path of toolchain
for cmd in ("CC", "CXX", "AS"):
if cmd not in env:
continue
if os.path.isabs(env[cmd]):
continue
env[cmd] = where_is_program(
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
)
dbtarget = __CompilationDbNode(source)
entry = env.__COMPILATIONDB_Entry(
target=dbtarget,
source=[],
__COMPILATIONDB_UTARGET=target,
__COMPILATIONDB_USOURCE=source,
__COMPILATIONDB_UACTION=user_action,
__COMPILATIONDB_ENV=env,
)
# TODO: Technically, these next two lines should not be required: it should be fine to
# cache the entries. However, they don't seem to update properly. Since they are quick
# to re-generate disable caching and sidestep this problem.
env.AlwaysBuild(entry)
env.NoCache(entry)
__COMPILATION_DB_ENTRIES.append(dbtarget)
return target, source
return EmitCompilationDbEntry
def CompilationDbEntryAction(target, source, env, **kw):
"""
Create a dictionary with evaluated command line, target, source
and store that info as an attribute on the target
(Which has been stored in __COMPILATION_DB_ENTRIES array
:param target: target node(s)
:param source: source node(s)
:param env: Environment for use building this node
:param kw:
:return: None
"""
command = env["__COMPILATIONDB_UACTION"].strfunction(
target=env["__COMPILATIONDB_UTARGET"],
source=env["__COMPILATIONDB_USOURCE"],
env=env["__COMPILATIONDB_ENV"],
)
entry = {
"directory": env.Dir("#").abspath,
"command": command,
"file": str(env["__COMPILATIONDB_USOURCE"][0]),
}
target[0].write(entry)
def WriteCompilationDb(target, source, env):
entries = []
for s in __COMPILATION_DB_ENTRIES:
item = s.read()
item["file"] = os.path.abspath(item["file"])
entries.append(item)
with open(str(target[0]), "w") as target_file:
json.dump(
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
)
def ScanCompilationDb(node, env, path):
return __COMPILATION_DB_ENTRIES
def generate(env, **kwargs):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
env["COMPILATIONDB_COMSTR"] = kwargs.get(
"COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
)
components_by_suffix = itertools.chain(
itertools.product(
[".%s" % ext for ext in SRC_C_EXT],
[
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CCCOM"),
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCCCOM"),
],
),
itertools.product(
[".%s" % ext for ext in SRC_CXX_EXT],
[
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CXXCOM"),
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCXXCOM"),
],
),
itertools.product(
[".%s" % ext for ext in SRC_ASM_EXT],
[(static_obj, SCons.Defaults.StaticObjectEmitter, "$ASCOM")],
),
)
for entry in components_by_suffix:
suffix = entry[0]
builder, base_emitter, command = entry[1]
# Assumes a dictionary emitter
emitter = builder.emitter[suffix]
builder.emitter[suffix] = SCons.Builder.ListEmitter(
[emitter, makeEmitCompilationDbEntry(command)]
)
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
action=SCons.Action.Action(CompilationDbEntryAction, None),
)
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
target_scanner=SCons.Scanner.Scanner(
function=ScanCompilationDb, node_class=None
),
)
def CompilationDatabase(env, target):
result = env.__COMPILATIONDB_Database(target=target, source=[])
env.AlwaysBuild(result)
env.NoCache(result)
return result
env.AddMethod(CompilationDatabase, "CompilationDatabase")
def exists(env):
return True

View File

@@ -14,8 +14,9 @@
from __future__ import absolute_import
import os
from glob import glob
from os import environ
from os.path import abspath, isfile, join
from SCons.Defaults import processDefines # pylint: disable=import-error
@@ -24,61 +25,61 @@ from platformio.managers.core import get_core_package_dir
from platformio.proc import exec_command, where_is_program
def _dump_includes(env):
includes = {}
def _dump_includes(env, projenv):
includes = []
includes["build"] = [
env.subst("$PROJECT_INCLUDE_DIR"),
env.subst("$PROJECT_SRC_DIR"),
]
includes["build"].extend(
[os.path.realpath(env.subst(item)) for item in env.get("CPPPATH", [])]
)
for item in projenv.get("CPPPATH", []):
includes.append(projenv.subst(item))
# installed libs
includes["compatlib"] = []
for lb in env.GetLibBuilders():
includes["compatlib"].extend(
[os.path.realpath(inc) for inc in lb.get_include_dirs()]
)
includes.extend(lb.get_include_dirs())
# includes from toolchains
p = env.PioPlatform()
includes["toolchain"] = []
for name in p.get_installed_packages():
if p.get_package_type(name) != "toolchain":
continue
toolchain_dir = glob_escape(p.get_package_dir(name))
toolchain_incglobs = [
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
os.path.join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
os.path.join(toolchain_dir, "*", "include*"),
join(toolchain_dir, "*", "include*"),
join(toolchain_dir, "*", "include", "c++", "*"),
join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
join(toolchain_dir, "lib", "gcc", "*", "*", "include*")
]
for g in toolchain_incglobs:
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
includes.extend(glob(g))
includes["unity"] = []
unity_dir = get_core_package_dir("tool-unity")
if unity_dir:
includes["unity"].append(unity_dir)
includes.append(unity_dir)
return includes
includes.extend(
[env.subst("$PROJECTINCLUDE_DIR"),
env.subst("$PROJECTSRC_DIR")])
# remove duplicates
result = []
for item in includes:
if item not in result:
result.append(abspath(item))
return result
def _get_gcc_defines(env):
items = []
try:
sysenv = os.environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command(
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True
)
sysenv = environ.copy()
sysenv['PATH'] = str(env['ENV']['PATH'])
result = exec_command("echo | %s -dM -E -" % env.subst("$CC"),
env=sysenv,
shell=True)
except OSError:
return items
if result["returncode"] != 0:
if result['returncode'] != 0:
return items
for line in result["out"].split("\n"):
for line in result['out'].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
@@ -93,22 +94,17 @@ def _dump_defines(env):
defines = []
# global symbols
for item in processDefines(env.get("CPPDEFINES", [])):
defines.append(env.subst(item).replace("\\", ""))
defines.append(env.subst(item).replace('\\', ''))
# special symbol for Atmel AVR MCU
if env["PIOPLATFORM"] == "atmelavr":
if env['PIOPLATFORM'] == "atmelavr":
board_mcu = env.get("BOARD_MCU")
if not board_mcu and "BOARD" in env:
board_mcu = env.BoardConfig().get("build.mcu")
if board_mcu:
defines.append(
str(
"__AVR_%s__"
% board_mcu.upper()
.replace("ATMEGA", "ATmega")
.replace("ATTINY", "ATtiny")
)
)
str("__AVR_%s__" % board_mcu.upper().replace(
"ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
# built-in GCC marcos
# if env.GetCompilerType() == "gcc":
@@ -120,7 +116,7 @@ def _dump_defines(env):
def _get_svd_path(env):
svd_path = env.GetProjectOption("debug_svd_path")
if svd_path:
return os.path.realpath(svd_path)
return abspath(svd_path)
if "BOARD" not in env:
return None
@@ -130,45 +126,47 @@ def _get_svd_path(env):
except (AssertionError, KeyError):
return None
# custom path to SVD file
if os.path.isfile(svd_path):
if isfile(svd_path):
return svd_path
# default file from ./platform/misc/svd folder
p = env.PioPlatform()
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
return os.path.realpath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
if isfile(join(p.get_dir(), "misc", "svd", svd_path)):
return abspath(join(p.get_dir(), "misc", "svd", svd_path))
return None
def _escape_build_flag(flags):
return [flag if " " not in flag else '"%s"' % flag for flag in flags]
def DumpIDEData(env):
env["__escape_build_flag"] = _escape_build_flag
LINTCCOM = (
"${__escape_build_flag(CFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
)
LINTCXXCOM = (
"${__escape_build_flag(CXXFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
)
def DumpIDEData(env, projenv):
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS"
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS"
data = {
"env_name": env["PIOENV"],
"env_name":
env['PIOENV'],
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
"defines": _dump_defines(env),
"includes": _dump_includes(env),
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path": env.subst("$PROG_PATH"),
"flash_extra_images": [
{"offset": item[0], "path": env.subst(item[1])}
for item in env.get("FLASH_EXTRA_IMAGES", [])
],
"svd_path": _get_svd_path(env),
"compiler_type": env.GetCompilerType(),
"defines":
_dump_defines(env),
"includes":
_dump_includes(env, projenv),
"cc_flags":
env.subst(LINTCCOM),
"cxx_flags":
env.subst(LINTCXXCOM),
"cc_path":
where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
"cxx_path":
where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
"gdb_path":
where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path":
env.subst("$PROG_PATH"),
"flash_extra_images": [{
"offset": item[0],
"path": env.subst(item[1])
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
"svd_path":
_get_svd_path(env),
"compiler_type":
env.GetCompilerType()
}
env_ = env.Clone()
@@ -182,7 +180,10 @@ def DumpIDEData(env):
_new_defines.append(item)
env_.Replace(CPPDEFINES=_new_defines)
data.update({"cc_flags": env_.subst(LINTCCOM), "cxx_flags": env_.subst(LINTCXXCOM)})
data.update({
"cc_flags": env_.subst(LINTCCOM),
"cxx_flags": env_.subst(LINTCXXCOM)
})
return data

View File

@@ -14,16 +14,16 @@
# pylint: disable=no-member, no-self-use, unused-argument, too-many-lines
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# pylint: disable=assignment-from-no-return
from __future__ import absolute_import
import codecs
import hashlib
import io
import os
import re
import sys
from os.path import basename, commonprefix, isdir, isfile, join, realpath, sep
from os.path import (basename, commonprefix, expanduser, isdir, isfile, join,
realpath, sep)
import click
import SCons.Scanner # pylint: disable=import-error
@@ -33,16 +33,13 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.builder.tools import platformio as piotool
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
from platformio.compat import (WINDOWS, get_file_contents, hashlib_encode_data,
string_types)
from platformio.managers.lib import LibraryManager
from platformio.package.manifest.parser import (
ManifestParserError,
ManifestParserFactory,
)
from platformio.project.options import ProjectOptions
class LibBuilderFactory(object):
@staticmethod
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
clsname = "UnknownLibBuilder"
@@ -50,30 +47,31 @@ class LibBuilderFactory(object):
clsname = "PlatformIOLibBuilder"
else:
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
common_frameworks = set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks)
common_frameworks = (set(env.get("PIOFRAMEWORK", []))
& set(used_frameworks))
if common_frameworks:
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
elif used_frameworks:
clsname = "%sLibBuilder" % used_frameworks[0].title()
obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose)
obj = getattr(sys.modules[__name__], clsname)(env,
path,
verbose=verbose)
assert isinstance(obj, LibBuilderBase)
return obj
@staticmethod
def get_used_frameworks(env, path):
if any(
isfile(join(path, fname))
for fname in ("library.properties", "keywords.txt")
):
isfile(join(path, fname))
for fname in ("library.properties", "keywords.txt")):
return ["arduino"]
if isfile(join(path, "module.json")):
return ["mbed"]
include_re = re.compile(
r'^#include\s+(<|")(Arduino|mbed)\.h(<|")', flags=re.MULTILINE
)
include_re = re.compile(r'^#include\s+(<|")(Arduino|mbed)\.h(<|")',
flags=re.MULTILINE)
# check source files
for root, _, files in os.walk(path, followlinks=True):
@@ -81,11 +79,9 @@ class LibBuilderFactory(object):
return ["mbed"]
for fname in files:
if not fs.path_endswith_ext(
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
):
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
continue
with io.open(join(root, fname), errors="ignore") as fp:
content = fp.read()
content = get_file_contents(join(root, fname))
if not content:
continue
if "Arduino.h" in content and include_re.search(content):
@@ -97,6 +93,12 @@ class LibBuilderFactory(object):
class LibBuilderBase(object):
LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"]
LDF_MODE_DEFAULT = "chain"
COMPAT_MODES = ["off", "soft", "strict"]
COMPAT_MODE_DEFAULT = "soft"
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
# Max depth of nested includes:
@@ -114,14 +116,7 @@ class LibBuilderBase(object):
self.path = realpath(env.subst(path))
self.verbose = verbose
try:
self._manifest = manifest if manifest else self.load_manifest()
except ManifestParserError:
click.secho(
"Warning! Ignoring broken library manifest in " + self.path, fg="yellow"
)
self._manifest = {}
self._manifest = manifest if manifest else self.load_manifest()
self._is_dependent = False
self._is_built = False
self._depbuilders = list()
@@ -129,7 +124,7 @@ class LibBuilderBase(object):
self._processed_files = list()
# reset source filter, could be overridden with extra script
self.env["SRC_FILTER"] = ""
self.env['SRC_FILTER'] = ""
# process extra options and append to build environment
self.process_extra_options()
@@ -157,7 +152,8 @@ class LibBuilderBase(object):
@property
def dependencies(self):
return self._manifest.get("dependencies")
return LibraryManager.normalize_dependencies(
self._manifest.get("dependencies", []))
@property
def src_filter(self):
@@ -165,7 +161,7 @@ class LibBuilderBase(object):
"-<example%s>" % os.sep,
"-<examples%s>" % os.sep,
"-<test%s>" % os.sep,
"-<tests%s>" % os.sep,
"-<tests%s>" % os.sep
]
@property
@@ -176,7 +172,8 @@ class LibBuilderBase(object):
@property
def src_dir(self):
return join(self.path, "src") if isdir(join(self.path, "src")) else self.path
return (join(self.path, "src")
if isdir(join(self.path, "src")) else self.path)
def get_include_dirs(self):
items = []
@@ -217,41 +214,40 @@ class LibBuilderBase(object):
@property
def lib_archive(self):
return self.env.GetProjectOption("lib_archive")
return self.env.GetProjectOption("lib_archive", True)
@property
def lib_ldf_mode(self):
return self.env.GetProjectOption("lib_ldf_mode")
return self.env.GetProjectOption("lib_ldf_mode", self.LDF_MODE_DEFAULT)
@staticmethod
def validate_ldf_mode(mode):
ldf_modes = ProjectOptions["env.lib_ldf_mode"].type.choices
if isinstance(mode, string_types):
mode = mode.strip().lower()
if mode in ldf_modes:
if mode in LibBuilderBase.LDF_MODES:
return mode
try:
return ldf_modes[int(mode)]
return LibBuilderBase.LDF_MODES[int(mode)]
except (IndexError, ValueError):
pass
return ProjectOptions["env.lib_ldf_mode"].default
return LibBuilderBase.LDF_MODE_DEFAULT
@property
def lib_compat_mode(self):
return self.env.GetProjectOption("lib_compat_mode")
return self.env.GetProjectOption("lib_compat_mode",
self.COMPAT_MODE_DEFAULT)
@staticmethod
def validate_compat_mode(mode):
compat_modes = ProjectOptions["env.lib_compat_mode"].type.choices
if isinstance(mode, string_types):
mode = mode.strip().lower()
if mode in compat_modes:
if mode in LibBuilderBase.COMPAT_MODES:
return mode
try:
return compat_modes[int(mode)]
return LibBuilderBase.COMPAT_MODES[int(mode)]
except (IndexError, ValueError):
pass
return ProjectOptions["env.lib_compat_mode"].default
return LibBuilderBase.COMPAT_MODE_DEFAULT
def is_platforms_compatible(self, platforms):
return True
@@ -267,10 +263,11 @@ class LibBuilderBase(object):
self.env.ProcessFlags(self.build_flags)
if self.extra_script:
self.env.SConscriptChdir(1)
self.env.SConscript(
realpath(self.extra_script),
exports={"env": self.env, "pio_lib_builder": self},
)
self.env.SConscript(realpath(self.extra_script),
exports={
"env": self.env,
"pio_lib_builder": self
})
self.env.ProcessUnFlags(self.build_unflags)
def process_dependencies(self):
@@ -279,7 +276,7 @@ class LibBuilderBase(object):
for item in self.dependencies:
found = False
for lb in self.env.GetLibBuilders():
if item["name"] != lb.name:
if item['name'] != lb.name:
continue
found = True
if lb not in self.depbuilders:
@@ -287,48 +284,37 @@ class LibBuilderBase(object):
break
if not found and self.verbose:
sys.stderr.write(
"Warning: Ignored `%s` dependency for `%s` "
"library\n" % (item["name"], self.name)
)
sys.stderr.write("Warning: Ignored `%s` dependency for `%s` "
"library\n" % (item['name'], self.name))
def get_search_files(self):
items = [
join(self.src_dir, item)
for item in self.env.MatchSourceFiles(self.src_dir, self.src_filter)
join(self.src_dir, item) for item in self.env.MatchSourceFiles(
self.src_dir, self.src_filter)
]
include_dir = self.include_dir
if include_dir:
items.extend(
[
join(include_dir, item)
for item in self.env.MatchSourceFiles(include_dir)
]
)
items.extend([
join(include_dir, item)
for item in self.env.MatchSourceFiles(include_dir)
])
return items
def _get_found_includes( # pylint: disable=too-many-branches
self, search_files=None
):
self, search_files=None):
# all include directories
if not LibBuilderBase._INCLUDE_DIRS_CACHE:
LibBuilderBase._INCLUDE_DIRS_CACHE = [
self.env.Dir(d)
for d in ProjectAsLibBuilder(
self.envorigin, "$PROJECT_DIR"
).get_include_dirs()
]
LibBuilderBase._INCLUDE_DIRS_CACHE = []
for lb in self.env.GetLibBuilders():
LibBuilderBase._INCLUDE_DIRS_CACHE.extend(
[self.env.Dir(d) for d in lb.get_include_dirs()]
)
[self.env.Dir(d) for d in lb.get_include_dirs()])
# append self include directories
include_dirs = [self.env.Dir(d) for d in self.get_include_dirs()]
include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE)
result = []
for path in search_files or []:
for path in (search_files or []):
if path in self._processed_files:
continue
self._processed_files.append(path)
@@ -339,27 +325,21 @@ class LibBuilderBase(object):
self.env.File(path),
self.env,
tuple(include_dirs),
depth=self.CCONDITIONAL_SCANNER_DEPTH,
)
depth=self.CCONDITIONAL_SCANNER_DEPTH)
# mark candidates already processed via Conditional Scanner
self._processed_files.extend(
[
c.get_abspath()
for c in candidates
if c.get_abspath() not in self._processed_files
]
)
self._processed_files.extend([
c.get_abspath() for c in candidates
if c.get_abspath() not in self._processed_files
])
except Exception as e: # pylint: disable=broad-except
if self.verbose and "+" in self.lib_ldf_mode:
sys.stderr.write(
"Warning! Classic Pre Processor is used for `%s`, "
"advanced has failed with `%s`\n" % (path, e)
)
"advanced has failed with `%s`\n" % (path, e))
candidates = LibBuilderBase.CLASSIC_SCANNER(
self.env.File(path), self.env, tuple(include_dirs)
)
self.env.File(path), self.env, tuple(include_dirs))
# print(path, [c.get_abspath() for c in candidates])
# print(path, map(lambda n: n.get_abspath(), candidates))
for item in candidates:
if item not in result:
result.append(item)
@@ -368,8 +348,8 @@ class LibBuilderBase(object):
_h_path = item.get_abspath()
if not fs.path_endswith_ext(_h_path, piotool.SRC_HEADER_EXT):
continue
_f_part = _h_path[: _h_path.rindex(".")]
for ext in piotool.SRC_C_EXT + piotool.SRC_CXX_EXT:
_f_part = _h_path[:_h_path.rindex(".")]
for ext in piotool.SRC_C_EXT:
if not isfile("%s.%s" % (_f_part, ext)):
continue
_c_path = self.env.File("%s.%s" % (_f_part, ext))
@@ -379,6 +359,7 @@ class LibBuilderBase(object):
return result
def depend_recursive(self, lb, search_files=None):
def _already_depends(_lb):
if self in _lb.depbuilders:
return True
@@ -391,10 +372,9 @@ class LibBuilderBase(object):
if self != lb:
if _already_depends(lb):
if self.verbose:
sys.stderr.write(
"Warning! Circular dependencies detected "
"between `%s` and `%s`\n" % (self.path, lb.path)
)
sys.stderr.write("Warning! Circular dependencies detected "
"between `%s` and `%s`\n" %
(self.path, lb.path))
self._circular_deps.append(lb)
elif lb not in self._depbuilders:
self._depbuilders.append(lb)
@@ -451,10 +431,11 @@ class LibBuilderBase(object):
if self.lib_archive:
libs.append(
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
)
self.env.BuildLibrary(self.build_dir, self.src_dir,
self.src_filter))
else:
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
self.env.BuildSources(self.build_dir, self.src_dir,
self.src_filter)
return libs
@@ -463,11 +444,19 @@ class UnknownLibBuilder(LibBuilderBase):
class ArduinoLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest = {}
if not isfile(join(self.path, "library.properties")):
return manifest
manifest_path = join(self.path, "library.properties")
if not isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
with codecs.open(manifest_path, encoding="utf-8") as fp:
for line in fp.readlines():
if "=" not in line:
continue
key, value = line.split("=", 1)
manifest[key.strip()] = value.strip()
return manifest
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
@@ -511,18 +500,35 @@ class ArduinoLibBuilder(LibBuilderBase):
return util.items_in_list(frameworks, ["arduino", "energia"])
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms", [])
platforms_map = {
"avr": ["atmelavr"],
"sam": ["atmelsam"],
"samd": ["atmelsam"],
"esp8266": ["espressif8266"],
"esp32": ["espressif32"],
"arc32": ["intel_arc32"],
"stm32": ["ststm32"],
"nrf5": ["nordicnrf51", "nordicnrf52"]
}
items = []
for arch in self._manifest.get("architectures", "").split(","):
arch = arch.strip().lower()
if arch == "*":
items = "*"
break
if arch in platforms_map:
items.extend(platforms_map[arch])
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return util.items_in_list(platforms, items)
class MbedLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "module.json")
if not isfile(manifest_path):
if not isfile(join(self.path, "module.json")):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
return fs.load_json(join(self.path, "module.json"))
@property
def include_dir(self):
@@ -577,7 +583,8 @@ class MbedLibBuilder(LibBuilderBase):
mbed_config_path = join(self.env.subst(p), "mbed_config.h")
if isfile(mbed_config_path):
break
mbed_config_path = None
else:
mbed_config_path = None
if not mbed_config_path:
return None
@@ -604,15 +611,14 @@ class MbedLibBuilder(LibBuilderBase):
# default macros
for macro in manifest.get("macros", []):
macro = self._mbed_normalize_macro(macro)
macros[macro["name"]] = macro
macros[macro['name']] = macro
# configuration items
for key, options in manifest.get("config", {}).items():
if "value" not in options:
continue
macros[key] = dict(
name=options.get("macro_name"), value=options.get("value")
)
macros[key] = dict(name=options.get("macro_name"),
value=options.get("value"))
# overrode items per target
for target, options in manifest.get("target_overrides", {}).items():
@@ -620,23 +626,25 @@ class MbedLibBuilder(LibBuilderBase):
continue
for macro in options.get("target.macros_add", []):
macro = self._mbed_normalize_macro(macro)
macros[macro["name"]] = macro
macros[macro['name']] = macro
for key, value in options.items():
if not key.startswith("target.") and key in macros:
macros[key]["value"] = value
macros[key]['value'] = value
# normalize macro names
for key, macro in macros.items():
if not macro["name"]:
macro["name"] = key
if "." not in macro["name"]:
macro["name"] = "%s.%s" % (manifest.get("name"), macro["name"])
macro["name"] = re.sub(
r"[^a-z\d]+", "_", macro["name"], flags=re.I
).upper()
macro["name"] = "MBED_CONF_" + macro["name"]
if isinstance(macro["value"], bool):
macro["value"] = 1 if macro["value"] else 0
if not macro['name']:
macro['name'] = key
if "." not in macro['name']:
macro['name'] = "%s.%s" % (manifest.get("name"),
macro['name'])
macro['name'] = re.sub(r"[^a-z\d]+",
"_",
macro['name'],
flags=re.I).upper()
macro['name'] = "MBED_CONF_" + macro['name']
if isinstance(macro['value'], bool):
macro['value'] = 1 if macro['value'] else 0
return {macro["name"]: macro["value"] for macro in macros.values()}
@@ -646,13 +654,13 @@ class MbedLibBuilder(LibBuilderBase):
for line in fp.readlines():
line = line.strip()
if line == "#endif":
lines.append("// PlatformIO Library Dependency Finder (LDF)")
lines.extend(
[
"#define %s %s" % (name, value if value is not None else "")
for name, value in macros.items()
]
)
lines.append(
"// PlatformIO Library Dependency Finder (LDF)")
lines.extend([
"#define %s %s" %
(name, value if value is not None else "")
for name, value in macros.items()
])
lines.append("")
if not line.startswith("#define"):
lines.append(line)
@@ -666,13 +674,22 @@ class MbedLibBuilder(LibBuilderBase):
class PlatformIOLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "library.json")
if not isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
def _has_arduino_manifest(self):
def load_manifest(self):
assert isfile(join(self.path, "library.json"))
manifest = fs.load_json(join(self.path, "library.json"))
assert "name" in manifest
# replace "espressif" old name dev/platform with ESP8266
if "platforms" in manifest:
manifest['platforms'] = [
"espressif8266" if p == "espressif" else p
for p in util.items_to_list(manifest['platforms'])
]
return manifest
def _is_arduino_manifest(self):
return isfile(join(self.path, "library.properties"))
@property
@@ -693,9 +710,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
def src_filter(self):
if "srcFilter" in self._manifest.get("build", {}):
return self._manifest.get("build").get("srcFilter")
if self.env["SRC_FILTER"]:
return self.env["SRC_FILTER"]
if self._has_arduino_manifest():
if self.env['SRC_FILTER']:
return self.env['SRC_FILTER']
if self._is_arduino_manifest():
return ArduinoLibBuilder.src_filter.fget(self)
return LibBuilderBase.src_filter.fget(self)
@@ -719,33 +736,28 @@ class PlatformIOLibBuilder(LibBuilderBase):
@property
def lib_archive(self):
missing = object()
global_value = self.env.GetProjectConfig().getraw(
"env:" + self.env["PIOENV"], "lib_archive", missing
)
if global_value != missing:
return self.env.GetProjectConfig().get(
"env:" + self.env["PIOENV"], "lib_archive"
)
global_value = self.env.GetProjectOption("lib_archive")
if global_value is not None:
return global_value
return self._manifest.get("build", {}).get(
"libArchive", LibBuilderBase.lib_archive.fget(self)
)
"libArchive", LibBuilderBase.lib_archive.fget(self))
@property
def lib_ldf_mode(self):
return self.validate_ldf_mode(
self._manifest.get("build", {}).get(
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self)
)
)
self.env.GetProjectOption(
"lib_ldf_mode",
self._manifest.get("build", {}).get(
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self))))
@property
def lib_compat_mode(self):
return self.validate_compat_mode(
self._manifest.get("build", {}).get(
"libCompatMode", LibBuilderBase.lib_compat_mode.fget(self)
)
)
self.env.GetProjectOption(
"lib_compat_mode",
self._manifest.get("build", {}).get(
"libCompatMode",
LibBuilderBase.lib_compat_mode.fget(self))))
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms")
@@ -763,12 +775,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
include_dirs = LibBuilderBase.get_include_dirs(self)
# backwards compatibility with PlatformIO 2.0
if (
"build" not in self._manifest
and self._has_arduino_manifest()
and not isdir(join(self.path, "src"))
and isdir(join(self.path, "utility"))
):
if ("build" not in self._manifest and self._is_arduino_manifest()
and not isdir(join(self.path, "src"))
and isdir(join(self.path, "utility"))):
include_dirs.append(join(self.path, "utility"))
for path in self.env.get("CPPPATH", []):
@@ -779,24 +788,25 @@ class PlatformIOLibBuilder(LibBuilderBase):
class ProjectAsLibBuilder(LibBuilderBase):
def __init__(self, env, *args, **kwargs):
# backup original value, will be reset in base.__init__
project_src_filter = env.get("SRC_FILTER")
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
self.env["SRC_FILTER"] = project_src_filter
self.env['SRC_FILTER'] = project_src_filter
@property
def include_dir(self):
include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
return include_dir if isdir(include_dir) else None
@property
def src_dir(self):
return self.env.subst("$PROJECT_SRC_DIR")
return self.env.subst("$PROJECTSRC_DIR")
def get_include_dirs(self):
include_dirs = []
project_include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
project_include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
if isdir(project_include_dir):
include_dirs.append(project_include_dir)
for include_dir in LibBuilderBase.get_include_dirs(self):
@@ -806,18 +816,21 @@ class ProjectAsLibBuilder(LibBuilderBase):
def get_search_files(self):
# project files
items = LibBuilderBase.get_search_files(self)
search_files = LibBuilderBase.get_search_files(self)
# test files
if "__test" in COMMAND_LINE_TARGETS:
items.extend(
[
join("$PROJECT_TEST_DIR", item)
for item in self.env.MatchSourceFiles(
"$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
)
]
)
return items
search_files.extend([
join("$PROJECTTEST_DIR",
item) for item in self.env.MatchSourceFiles(
"$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER")
])
if "arduino" in self.env.get("PIOFRAMEWORK", []):
search_files.extend([
join(self.src_dir, item)
for item in fs.match_src_files(self.src_dir, self.src_filter, (
"ino", "pde"))
])
return search_files
@property
def lib_ldf_mode(self):
@@ -829,7 +842,8 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def src_filter(self):
return self.env.get("SRC_FILTER") or LibBuilderBase.src_filter.fget(self)
return (self.env.get("SRC_FILTER")
or LibBuilderBase.src_filter.fget(self))
@property
def dependencies(self):
@@ -840,6 +854,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
pass
def install_dependencies(self):
def _is_builtin(uri):
for lb in self.env.GetLibBuilders():
if lb.name == uri:
@@ -862,7 +877,8 @@ class ProjectAsLibBuilder(LibBuilderBase):
not_found_uri.append(uri)
did_install = False
lm = LibraryManager(self.env.subst(join("$PROJECT_LIBDEPS_DIR", "$PIOENV")))
lm = LibraryManager(
self.env.subst(join("$PROJECTLIBDEPS_DIR", "$PIOENV")))
for uri in not_found_uri:
try:
lm.install(uri)
@@ -885,7 +901,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
if not lib_dir:
continue
for lb in self.env.GetLibBuilders():
if lib_dir != lb.path:
if lib_dir not in lb:
continue
if lb not in self.depbuilders:
self.depend_recursive(lb)
@@ -913,26 +929,28 @@ class ProjectAsLibBuilder(LibBuilderBase):
def GetLibSourceDirs(env):
items = env.GetProjectOption("lib_extra_dirs", [])
items.extend(env["LIBSOURCE_DIRS"])
items.extend(env['LIBSOURCE_DIRS'])
return [
env.subst(fs.expanduser(item) if item.startswith("~") else item)
env.subst(expanduser(item) if item.startswith("~") else item)
for item in items
]
def IsCompatibleLibBuilder(env, lb, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
def IsCompatibleLibBuilder(env,
lb,
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
compat_mode = lb.lib_compat_mode
if lb.name in env.GetProjectOption("lib_ignore", []):
if verbose:
sys.stderr.write("Ignored library %s\n" % lb.path)
return None
if compat_mode == "strict" and not lb.is_platforms_compatible(env["PIOPLATFORM"]):
if compat_mode == "strict" and not lb.is_platforms_compatible(
env['PIOPLATFORM']):
if verbose:
sys.stderr.write("Platform incompatible library %s\n" % lb.path)
return False
if compat_mode in ("soft", "strict") and not lb.is_frameworks_compatible(
env.get("PIOFRAMEWORK", [])
):
if (compat_mode in ("soft", "strict") and "PIOFRAMEWORK" in env
and not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", []))):
if verbose:
sys.stderr.write("Framework incompatible library %s\n" % lb.path)
return False
@@ -941,10 +959,8 @@ def IsCompatibleLibBuilder(env, lb, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)))
def GetLibBuilders(env): # pylint: disable=too-many-branches
if DefaultEnvironment().get("__PIO_LIB_BUILDERS", None) is not None:
return sorted(
DefaultEnvironment()["__PIO_LIB_BUILDERS"],
key=lambda lb: 0 if lb.dependent else 1,
)
return sorted(DefaultEnvironment()['__PIO_LIB_BUILDERS'],
key=lambda lb: 0 if lb.dependent else 1)
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=[])
@@ -964,8 +980,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
except exception.InvalidJSONFile:
if verbose:
sys.stderr.write(
"Skip library with broken manifest: %s\n" % lib_dir
)
"Skip library with broken manifest: %s\n" % lib_dir)
continue
if env.IsCompatibleLibBuilder(lb):
DefaultEnvironment().Append(__PIO_LIB_BUILDERS=[lb])
@@ -980,15 +995,15 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
if verbose and found_incompat:
sys.stderr.write(
'More details about "Library Compatibility Mode": '
"More details about \"Library Compatibility Mode\": "
"https://docs.platformio.org/page/librarymanager/ldf.html#"
"ldf-compat-mode\n"
)
"ldf-compat-mode\n")
return DefaultEnvironment()["__PIO_LIB_BUILDERS"]
return DefaultEnvironment()['__PIO_LIB_BUILDERS']
def ConfigureProjectLibBuilder(env):
def _get_vcs_info(lb):
path = LibraryManager.get_src_manifest_path(lb.path)
return fs.load_json(path) if path else None
@@ -1013,42 +1028,40 @@ def ConfigureProjectLibBuilder(env):
title += " %s" % lb.version
if vcs_info and vcs_info.get("version"):
title += " #%s" % vcs_info.get("version")
click.echo("%s|-- %s" % (margin, title), nl=False)
sys.stdout.write("%s|-- %s" % (margin, title))
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
if vcs_info:
click.echo(" [%s]" % vcs_info.get("url"), nl=False)
click.echo(" (", nl=False)
click.echo(lb.path, nl=False)
click.echo(")", nl=False)
click.echo("")
sys.stdout.write(" [%s]" % vcs_info.get("url"))
sys.stdout.write(" (")
sys.stdout.write(lb.path)
sys.stdout.write(")")
sys.stdout.write("\n")
if lb.depbuilders:
_print_deps_tree(lb, level + 1)
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
click.echo("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
click.echo(
"LDF Modes: Finder ~ %s, Compatibility ~ %s"
% (ldf_mode, project.lib_compat_mode)
)
print("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
print("LDF Modes: Finder ~ %s, Compatibility ~ %s" %
(ldf_mode, project.lib_compat_mode))
project.install_dependencies()
lib_builders = env.GetLibBuilders()
click.echo("Found %d compatible libraries" % len(lib_builders))
print("Found %d compatible libraries" % len(lib_builders))
click.echo("Scanning dependencies...")
print("Scanning dependencies...")
project.search_deps_recursive()
if ldf_mode.startswith("chain") and project.depbuilders:
_correct_found_libs(lib_builders)
if project.depbuilders:
click.echo("Dependency Graph")
print("Dependency Graph")
_print_deps_tree(project)
else:
click.echo("No dependencies")
print("No dependencies")
return project

View File

@@ -15,19 +15,17 @@
from __future__ import absolute_import
import atexit
import io
import re
import sys
from os import environ, remove, walk
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
from tempfile import mkstemp
import click
from SCons.Action import Action # pylint: disable=import-error
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from platformio import fs, util
from platformio.compat import get_filesystem_encoding, get_locale_encoding, glob_escape
from platformio.compat import get_file_contents, glob_escape
from platformio.managers.core import get_core_package_dir
from platformio.proc import exec_command
@@ -41,48 +39,13 @@ class InoToCPPConverter(object):
([a-z_\d]+\s*) # name of prototype
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
)\s*(\{|;) # must end with `{` or `;`
""",
re.X | re.M | re.I,
)
""", re.X | re.M | re.I)
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
def __init__(self, env):
self.env = env
self._main_ino = None
self._safe_encoding = None
def read_safe_contents(self, path):
error_reported = False
for encoding in (
"utf-8",
None,
get_filesystem_encoding(),
get_locale_encoding(),
"latin-1",
):
try:
with io.open(path, encoding=encoding) as fp:
contents = fp.read()
self._safe_encoding = encoding
return contents
except UnicodeDecodeError:
if not error_reported:
error_reported = True
click.secho(
"Unicode decode error has occurred, please remove invalid "
"(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8"
% path,
fg="yellow",
err=True,
)
return ""
def write_safe_contents(self, path, contents):
with io.open(
path, "w", encoding=self._safe_encoding, errors="backslashreplace"
) as fp:
return fp.write(contents)
def is_main_node(self, contents):
return self.DETECTMAIN_RE.search(contents)
@@ -97,8 +60,10 @@ class InoToCPPConverter(object):
assert nodes
lines = []
for node in nodes:
contents = self.read_safe_contents(node.get_path())
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
contents = get_file_contents(node.get_path())
_lines = [
'# 1 "%s"' % node.get_path().replace("\\", "/"), contents
]
if self.is_main_node(contents):
lines = _lines + lines
self._main_ino = node.get_path()
@@ -113,22 +78,23 @@ class InoToCPPConverter(object):
def process(self, contents):
out_file = self._main_ino + ".cpp"
assert self._gcc_preprocess(contents, out_file)
contents = self.read_safe_contents(out_file)
contents = get_file_contents(out_file)
contents = self._join_multiline_strings(contents)
self.write_safe_contents(out_file, self.append_prototypes(contents))
with open(out_file, "w") as fp:
fp.write(self.append_prototypes(contents))
return out_file
def _gcc_preprocess(self, contents, out_file):
tmp_path = mkstemp()[1]
self.write_safe_contents(tmp_path, contents)
self.env.Execute(
self.env.VerboseAction(
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
out_file, tmp_path
),
"Converting " + basename(out_file[:-4]),
)
)
with open(tmp_path, "w") as fp:
fp.write(contents)
env = self.env.Clone()
env.Append(CCFLAGS=["-x", "c++", "-E"])
cmd = env["CXXCOM"]\
.replace("$TARGET", '"%s"' % out_file)\
.replace("$SOURCES", '"%s"' % tmp_path)
env.Execute(
env.VerboseAction(cmd, "Converting " + basename(out_file[:-4])))
atexit.register(_delete_file, tmp_path)
return isfile(out_file)
@@ -150,15 +116,14 @@ class InoToCPPConverter(object):
stropen = True
newlines.append(line[:-1])
continue
if stropen:
elif stropen:
newlines[len(newlines) - 1] += line[:-1]
continue
elif stropen and line.endswith(('",', '";')):
newlines[len(newlines) - 1] += line
stropen = False
newlines.append(
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
)
newlines.append('#line %d "%s"' %
(linenum, self._main_ino.replace("\\", "/")))
continue
newlines.append(line)
@@ -178,10 +143,8 @@ class InoToCPPConverter(object):
prototypes = []
reserved_keywords = set(["if", "else", "while"])
for match in self.PROTOTYPE_RE.finditer(contents):
if (
set([match.group(2).strip(), match.group(3).strip()])
& reserved_keywords
):
if (set([match.group(2).strip(),
match.group(3).strip()]) & reserved_keywords):
continue
prototypes.append(match)
return prototypes
@@ -201,8 +164,11 @@ class InoToCPPConverter(object):
prototypes = self._parse_prototypes(contents) or []
# skip already declared prototypes
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
declared = set(
m.group(1).strip() for m in prototypes if m.group(4) == ";")
prototypes = [
m for m in prototypes if m.group(1).strip() not in declared
]
if not prototypes:
return contents
@@ -211,29 +177,23 @@ class InoToCPPConverter(object):
split_pos = prototypes[0].start()
match_ptrs = re.search(
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
contents[:split_pos],
re.M,
)
contents[:split_pos], re.M)
if match_ptrs:
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
result = []
result.append(contents[:split_pos].strip())
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
result.append(
'#line %d "%s"'
% (
self._get_total_lines(contents[:split_pos]),
self._main_ino.replace("\\", "/"),
)
)
result.append('#line %d "%s"' % (self._get_total_lines(
contents[:split_pos]), self._main_ino.replace("\\", "/")))
result.append(contents[split_pos:].strip())
return "\n".join(result)
def ConvertInoToCpp(env):
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
ino_nodes = env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde"))
src_dir = glob_escape(env.subst("$PROJECTSRC_DIR"))
ino_nodes = (env.Glob(join(src_dir, "*.ino")) +
env.Glob(join(src_dir, "*.pde")))
if not ino_nodes:
return
c = InoToCPPConverter(env)
@@ -256,13 +216,13 @@ def _get_compiler_type(env):
return "gcc"
try:
sysenv = environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
sysenv['PATH'] = str(env['ENV']['PATH'])
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
except OSError:
return None
if result["returncode"] != 0:
if result['returncode'] != 0:
return None
output = "".join([result["out"], result["err"]]).lower()
output = "".join([result['out'], result['err']]).lower()
if "clang" in output and "LLVM" in output:
return "clang"
if "gcc" in output:
@@ -275,6 +235,7 @@ def GetCompilerType(env):
def GetActualLDScript(env):
def _lookup_in_ldpath(script):
for d in env.get("LIBPATH", []):
path = join(env.subst(d), script)
@@ -289,7 +250,7 @@ def GetActualLDScript(env):
if f == "-T":
script_in_next = True
continue
if script_in_next:
elif script_in_next:
script_in_next = False
raw_script = f
elif f.startswith("-Wl,-T"):
@@ -305,13 +266,12 @@ def GetActualLDScript(env):
if script:
sys.stderr.write(
"Error: Could not find '%s' LD script in LDPATH '%s'\n"
% (script, env.subst("$LIBPATH"))
)
"Error: Could not find '%s' LD script in LDPATH '%s'\n" %
(script, env.subst("$LIBPATH")))
env.Exit(1)
if not script and "LDSCRIPT_PATH" in env:
path = _lookup_in_ldpath(env["LDSCRIPT_PATH"])
path = _lookup_in_ldpath(env['LDSCRIPT_PATH'])
if path:
return path
@@ -334,52 +294,35 @@ def PioClean(env, clean_dir):
for f in files:
dst = join(root, f)
remove(dst)
print(
"Removed %s" % (dst if clean_rel_path.startswith(".") else relpath(dst))
)
print("Removed %s" %
(dst if clean_rel_path.startswith(".") else relpath(dst)))
print("Done cleaning")
fs.rmtree(clean_dir)
env.Exit(0)
def ConfigureDebugFlags(env):
def _cleanup_debug_flags(scope):
if scope not in env:
return
unflags = ["-Os", "-g"]
for level in [0, 1, 2, 3]:
for flag in ("O", "g", "ggdb"):
unflags.append("-%s%d" % (flag, level))
env[scope] = [f for f in env.get(scope, []) if f not in unflags]
env.Append(CPPDEFINES=["__PLATFORMIO_BUILD_DEBUG__"])
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
_cleanup_debug_flags(scope)
debug_flags = env.ParseFlags(env.GetProjectOption("debug_build_flags"))
env.MergeFlags(debug_flags)
optimization_flags = [
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
]
if optimization_flags:
env.AppendUnique(ASFLAGS=optimization_flags, LINKFLAGS=optimization_flags)
def ProcessDebug(env):
if not env.subst("$PIODEBUGFLAGS"):
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
env.Append(BUILD_FLAGS=list(env['PIODEBUGFLAGS']) +
["-D__PLATFORMIO_BUILD_DEBUG__"])
unflags = ["-Os"]
for level in [0, 1, 2]:
for flag in ("O", "g", "ggdb"):
unflags.append("-%s%d" % (flag, level))
env.Append(BUILD_UNFLAGS=unflags)
def ConfigureTestTarget(env):
env.Append(
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")],
)
unitylib = env.BuildLibrary(
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity")
)
def ProcessTest(env):
env.Append(CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
unitylib = env.BuildLibrary(join("$BUILD_DIR", "UnityTestLib"),
get_core_package_dir("tool-unity"))
env.Prepend(LIBS=[unitylib])
src_filter = ["+<*.cpp>", "+<*.c>"]
if "PIOTEST_RUNNING_NAME" in env:
src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], sep))
src_filter.append("+<%s%s>" % (env['PIOTEST_RUNNING_NAME'], sep))
env.Replace(PIOTEST_SRC_FILTER=src_filter)
@@ -389,7 +332,7 @@ def GetExtraScripts(env, scope):
if scope == "post" and ":" not in item:
items.append(item)
elif item.startswith("%s:" % scope):
items.append(item[len(scope) + 1 :])
items.append(item[len(scope) + 1:])
if not items:
return items
with fs.cd(env.subst("$PROJECT_DIR")):
@@ -406,7 +349,7 @@ def generate(env):
env.AddMethod(GetActualLDScript)
env.AddMethod(VerboseAction)
env.AddMethod(PioClean)
env.AddMethod(ConfigureDebugFlags)
env.AddMethod(ConfigureTestTarget)
env.AddMethod(ProcessDebug)
env.AddMethod(ProcessTest)
env.AddMethod(GetExtraScripts)
return env

View File

@@ -33,28 +33,28 @@ def PioPlatform(env):
variables = env.GetProjectOptions(as_dict=True)
if "framework" in variables:
# support PIO Core 3.0 dev/platforms
variables["pioframework"] = variables["framework"]
p = PlatformFactory.newPlatform(env["PLATFORM_MANIFEST"])
variables['pioframework'] = variables['framework']
p = PlatformFactory.newPlatform(env['PLATFORM_MANIFEST'])
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
return p
def BoardConfig(env, board=None):
with fs.cd(env.subst("$PROJECT_DIR")):
try:
p = env.PioPlatform()
board = board or env.get("BOARD")
assert board, "BoardConfig: Board is not defined"
return p.board_config(board)
except (AssertionError, exception.UnknownBoard) as e:
sys.stderr.write("Error: %s\n" % str(e))
env.Exit(1)
p = env.PioPlatform()
try:
board = board or env.get("BOARD")
assert board, "BoardConfig: Board is not defined"
config = p.board_config(board)
except (AssertionError, exception.UnknownBoard) as e:
sys.stderr.write("Error: %s\n" % str(e))
env.Exit(1)
return config
def GetFrameworkScript(env, framework):
p = env.PioPlatform()
assert p.frameworks and framework in p.frameworks
script_path = env.subst(p.frameworks[framework]["script"])
script_path = env.subst(p.frameworks[framework]['script'])
if not isfile(script_path):
script_path = join(p.get_dir(), script_path)
return script_path
@@ -65,7 +65,7 @@ def LoadPioPlatform(env):
installed_packages = p.get_installed_packages()
# Ensure real platform name
env["PIOPLATFORM"] = p.name
env['PIOPLATFORM'] = p.name
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
systype = util.get_systype()
@@ -75,13 +75,14 @@ def LoadPioPlatform(env):
continue
pkg_dir = p.get_package_dir(name)
env.PrependENVPath(
"PATH", join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir
)
if not WINDOWS and isdir(join(pkg_dir, "lib")) and type_ != "toolchain":
"PATH",
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
if (not WINDOWS and isdir(join(pkg_dir, "lib"))
and type_ != "toolchain"):
env.PrependENVPath(
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"),
)
"DYLD_LIBRARY_PATH"
if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"))
# Platform specific LD Scripts
if isdir(join(p.get_dir(), "ldscripts")):
@@ -93,27 +94,16 @@ def LoadPioPlatform(env):
# update board manifest with overridden data from INI config
board_config = env.BoardConfig()
for option, value in env.GetProjectOptions():
if not option.startswith("board_"):
continue
option = option.lower()[6:]
try:
if isinstance(board_config.get(option), bool):
value = str(value).lower() in ("1", "yes", "true")
elif isinstance(board_config.get(option), int):
value = int(value)
except KeyError:
pass
board_config.update(option, value)
if option.startswith("board_"):
board_config.update(option.lower()[6:], value)
# load default variables from board config
for option_meta in ProjectOptions.values():
if not option_meta.buildenvvar or option_meta.buildenvvar in env:
continue
data_path = (
option_meta.name[6:]
if option_meta.name.startswith("board_")
else option_meta.name.replace("_", ".")
)
data_path = (option_meta.name[6:]
if option_meta.name.startswith("board_") else
option_meta.name.replace("_", "."))
try:
env[option_meta.buildenvvar] = board_config.get(data_path)
except KeyError:
@@ -128,22 +118,22 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
board_config = env.BoardConfig() if "BOARD" in env else None
def _get_configuration_data():
return (
None
if not board_config
else [
"CONFIGURATION:",
"https://docs.platformio.org/page/boards/%s/%s.html"
% (platform.name, board_config.id),
]
)
return None if not board_config else [
"CONFIGURATION:",
"https://docs.platformio.org/page/boards/%s/%s.html" %
(platform.name, board_config.id)
]
def _get_plaform_data():
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
if platform.src_version:
data.append("#" + platform.src_version)
if int(ARGUMENTS.get("PIOVERBOSE", 0)) and platform.src_url:
data.append("(%s)" % platform.src_url)
src_manifest_path = platform.pm.get_src_manifest_path(
platform.get_dir())
if src_manifest_path:
src_manifest = fs.load_json(src_manifest_path)
if "version" in src_manifest:
data.append("#" + src_manifest['version'])
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
data.append("(%s)" % src_manifest['url'])
if board_config:
data.extend([">", board_config.get("name")])
return data
@@ -161,22 +151,19 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
return data
ram = board_config.get("upload", {}).get("maximum_ram_size")
flash = board_config.get("upload", {}).get("maximum_size")
data.append(
"%s RAM, %s Flash" % (fs.format_filesize(ram), fs.format_filesize(flash))
)
data.append("%s RAM, %s Flash" %
(fs.format_filesize(ram), fs.format_filesize(flash)))
return data
def _get_debug_data():
debug_tools = (
board_config.get("debug", {}).get("tools") if board_config else None
)
debug_tools = board_config.get(
"debug", {}).get("tools") if board_config else None
if not debug_tools:
return None
data = [
"DEBUG:",
"Current",
"(%s)"
% board_config.get_debug_tool_name(env.GetProjectOption("debug_tool")),
"DEBUG:", "Current",
"(%s)" % board_config.get_debug_tool_name(
env.GetProjectOption("debug_tool"))
]
onboard = []
external = []
@@ -193,28 +180,28 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
def _get_packages_data():
data = []
for item in platform.dump_used_packages():
original_version = util.get_original_version(item["version"])
info = "%s %s" % (item["name"], item["version"])
for name, options in platform.packages.items():
if options.get("optional"):
continue
pkg_dir = platform.get_package_dir(name)
if not pkg_dir:
continue
manifest = platform.pm.load_manifest(pkg_dir)
original_version = util.get_original_version(manifest['version'])
info = "%s %s" % (manifest['name'], manifest['version'])
extra = []
if original_version:
extra.append(original_version)
if "src_url" in item and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(item["src_url"])
if "__src_url" in manifest and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(manifest['__src_url'])
if extra:
info += " (%s)" % ", ".join(extra)
data.append(info)
if not data:
return None
return ["PACKAGES:"] + ["\n - %s" % d for d in sorted(data)]
return ["PACKAGES:", ", ".join(data)]
for data in (
_get_configuration_data(),
_get_plaform_data(),
_get_hardware_data(),
_get_debug_data(),
_get_packages_data(),
):
for data in (_get_configuration_data(), _get_plaform_data(),
_get_hardware_data(), _get_debug_data(),
_get_packages_data()):
if data and len(data) > 1:
print(" ".join(data))

View File

@@ -14,29 +14,26 @@
from __future__ import absolute_import
from platformio.project.config import MISSING, ProjectConfig, ProjectOptions
from platformio.project.config import ProjectConfig, ProjectOptions
def GetProjectConfig(env):
return ProjectConfig.get_instance(env["PROJECT_CONFIG"])
return ProjectConfig.get_instance(env['PROJECT_CONFIG'])
def GetProjectOptions(env, as_dict=False):
return env.GetProjectConfig().items(env=env["PIOENV"], as_dict=as_dict)
return env.GetProjectConfig().items(env=env['PIOENV'], as_dict=as_dict)
def GetProjectOption(env, option, default=MISSING):
return env.GetProjectConfig().get("env:" + env["PIOENV"], option, default)
def GetProjectOption(env, option, default=None):
return env.GetProjectConfig().get("env:" + env['PIOENV'], option, default)
def LoadProjectOptions(env):
for option, value in env.GetProjectOptions():
option_meta = ProjectOptions.get("env." + option)
if (
not option_meta
or not option_meta.buildenvvar
or option_meta.buildenvvar in env
):
if (not option_meta or not option_meta.buildenvvar
or option_meta.buildenvvar in env):
continue
env[option_meta.buildenvvar] = value

View File

@@ -1,254 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-locals
from __future__ import absolute_import
import sys
from os import environ, makedirs, remove
from os.path import isdir, join, splitdrive
from elftools.elf.descriptions import describe_sh_flags
from elftools.elf.elffile import ELFFile
from platformio.compat import dump_json_to_unicode
from platformio.proc import exec_command
from platformio.util import get_systype
def _run_tool(cmd, env, tool_args):
sysenv = environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
build_dir = env.subst("$BUILD_DIR")
if not isdir(build_dir):
makedirs(build_dir)
tmp_file = join(build_dir, "size-data-longcmd.txt")
with open(tmp_file, "w") as fp:
fp.write("\n".join(tool_args))
cmd.append("@" + tmp_file)
result = exec_command(cmd, env=sysenv)
remove(tmp_file)
return result
def _get_symbol_locations(env, elf_path, addrs):
if not addrs:
return {}
cmd = [env.subst("$CC").replace("-gcc", "-addr2line"), "-e", elf_path]
result = _run_tool(cmd, env, addrs)
locations = [line for line in result["out"].split("\n") if line]
assert len(addrs) == len(locations)
return dict(zip(addrs, [l.strip() for l in locations]))
def _get_demangled_names(env, mangled_names):
if not mangled_names:
return {}
result = _run_tool(
[env.subst("$CC").replace("-gcc", "-c++filt")], env, mangled_names
)
demangled_names = [line for line in result["out"].split("\n") if line]
assert len(mangled_names) == len(demangled_names)
return dict(
zip(
mangled_names,
[dn.strip().replace("::__FUNCTION__", "") for dn in demangled_names],
)
)
def _determine_section(sections, symbol_addr):
for section, info in sections.items():
if not _is_flash_section(info) and not _is_ram_section(info):
continue
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
return section
return "unknown"
def _is_ram_section(section):
return (
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
and section.get("flags", "") == "WA"
)
def _is_flash_section(section):
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
def _is_valid_symbol(symbol_name, symbol_type, symbol_address):
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
def _collect_sections_info(elffile):
sections = {}
for section in elffile.iter_sections():
if section.is_null() or section.name.startswith(".debug"):
continue
section_type = section["sh_type"]
section_flags = describe_sh_flags(section["sh_flags"])
section_size = section.data_size
sections[section.name] = {
"size": section_size,
"start_addr": section["sh_addr"],
"type": section_type,
"flags": section_flags,
}
return sections
def _collect_symbols_info(env, elffile, elf_path, sections):
symbols = []
symbol_section = elffile.get_section_by_name(".symtab")
if symbol_section.is_null():
sys.stderr.write("Couldn't find symbol table. Is ELF file stripped?")
env.Exit(1)
sysenv = environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
symbol_addrs = []
mangled_names = []
for s in symbol_section.iter_symbols():
symbol_info = s.entry["st_info"]
symbol_addr = s["st_value"]
symbol_size = s["st_size"]
symbol_type = symbol_info["type"]
if not _is_valid_symbol(s.name, symbol_type, symbol_addr):
continue
symbol = {
"addr": symbol_addr,
"bind": symbol_info["bind"],
"name": s.name,
"type": symbol_type,
"size": symbol_size,
"section": _determine_section(sections, symbol_addr),
}
if s.name.startswith("_Z"):
mangled_names.append(s.name)
symbol_addrs.append(hex(symbol_addr))
symbols.append(symbol)
symbol_locations = _get_symbol_locations(env, elf_path, symbol_addrs)
demangled_names = _get_demangled_names(env, mangled_names)
for symbol in symbols:
if symbol["name"].startswith("_Z"):
symbol["demangled_name"] = demangled_names.get(symbol["name"])
location = symbol_locations.get(hex(symbol["addr"]))
if not location or "?" in location:
continue
if "windows" in get_systype():
drive, tail = splitdrive(location)
location = join(drive.upper(), tail)
symbol["file"] = location
symbol["line"] = 0
if ":" in location:
file_, line = location.rsplit(":", 1)
if line.isdigit():
symbol["file"] = file_
symbol["line"] = int(line)
return symbols
def _calculate_firmware_size(sections):
flash_size = ram_size = 0
for section_info in sections.values():
if _is_flash_section(section_info):
flash_size += section_info.get("size", 0)
if _is_ram_section(section_info):
ram_size += section_info.get("size", 0)
return ram_size, flash_size
def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
data = {"device": {}, "memory": {}, "version": 1}
board = env.BoardConfig()
if board:
data["device"] = {
"mcu": board.get("build.mcu", ""),
"cpu": board.get("build.cpu", ""),
"frequency": board.get("build.f_cpu"),
"flash": int(board.get("upload.maximum_size", 0)),
"ram": int(board.get("upload.maximum_ram_size", 0)),
}
if data["device"]["frequency"] and data["device"]["frequency"].endswith("L"):
data["device"]["frequency"] = int(data["device"]["frequency"][0:-1])
elf_path = env.subst("$PIOMAINPROG")
with open(elf_path, "rb") as fp:
elffile = ELFFile(fp)
if not elffile.has_dwarf_info():
sys.stderr.write("Elf file doesn't contain DWARF information")
env.Exit(1)
sections = _collect_sections_info(elffile)
firmware_ram, firmware_flash = _calculate_firmware_size(sections)
data["memory"]["total"] = {
"ram_size": firmware_ram,
"flash_size": firmware_flash,
"sections": sections,
}
files = dict()
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
file_path = symbol.get("file") or "unknown"
if not files.get(file_path, {}):
files[file_path] = {"symbols": [], "ram_size": 0, "flash_size": 0}
symbol_size = symbol.get("size", 0)
section = sections.get(symbol.get("section", ""), {})
if _is_ram_section(section):
files[file_path]["ram_size"] += symbol_size
if _is_flash_section(section):
files[file_path]["flash_size"] += symbol_size
files[file_path]["symbols"].append(symbol)
data["memory"]["files"] = list()
for k, v in files.items():
file_data = {"path": k}
file_data.update(v)
data["memory"]["files"].append(file_data)
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
fp.write(dump_json_to_unicode(data))
def exists(_):
return True
def generate(env):
env.AddMethod(DumpSizeData)
return env

View File

@@ -60,9 +60,9 @@ def WaitForNewSerialPort(env, before):
prev_port = env.subst("$UPLOAD_PORT")
new_port = None
elapsed = 0
before = [p["port"] for p in before]
before = [p['port'] for p in before]
while elapsed < 5 and new_port is None:
now = [p["port"] for p in util.get_serial_ports()]
now = [p['port'] for p in util.get_serial_ports()]
for p in now:
if p not in before:
new_port = p
@@ -84,12 +84,10 @@ def WaitForNewSerialPort(env, before):
sleep(1)
if not new_port:
sys.stderr.write(
"Error: Couldn't find a board on the selected port. "
"Check that you have the correct port selected. "
"If it is correct, try pressing the board's reset "
"button after initiating the upload.\n"
)
sys.stderr.write("Error: Couldn't find a board on the selected port. "
"Check that you have the correct port selected. "
"If it is correct, try pressing the board's reset "
"button after initiating the upload.\n")
env.Exit(1)
return new_port
@@ -101,8 +99,8 @@ def AutodetectUploadPort(*args, **kwargs):
def _get_pattern():
if "UPLOAD_PORT" not in env:
return None
if set(["*", "?", "[", "]"]) & set(env["UPLOAD_PORT"]):
return env["UPLOAD_PORT"]
if set(["*", "?", "[", "]"]) & set(env['UPLOAD_PORT']):
return env['UPLOAD_PORT']
return None
def _is_match_pattern(port):
@@ -114,13 +112,17 @@ def AutodetectUploadPort(*args, **kwargs):
def _look_for_mbed_disk():
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
for item in util.get_logical_devices():
if item["path"].startswith("/net") or not _is_match_pattern(item["path"]):
if item['path'].startswith("/net") or not _is_match_pattern(
item['path']):
continue
mbed_pages = [join(item["path"], n) for n in ("mbed.htm", "mbed.html")]
mbed_pages = [
join(item['path'], n) for n in ("mbed.htm", "mbed.html")
]
if any(isfile(p) for p in mbed_pages):
return item["path"]
if item["name"] and any(l in item["name"].lower() for l in msdlabels):
return item["path"]
return item['path']
if item['name'] \
and any(l in item['name'].lower() for l in msdlabels):
return item['path']
return None
def _look_for_serial_port():
@@ -130,17 +132,17 @@ def AutodetectUploadPort(*args, **kwargs):
if "BOARD" in env and "build.hwids" in env.BoardConfig():
board_hwids = env.BoardConfig().get("build.hwids")
for item in util.get_serial_ports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
if not _is_match_pattern(item['port']):
continue
port = item["port"]
port = item['port']
if upload_protocol.startswith("blackmagic"):
if WINDOWS and port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
if "GDB" in item['description']:
return port
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
if hwid_str in item['hwid']:
return port
return port
@@ -148,9 +150,9 @@ def AutodetectUploadPort(*args, **kwargs):
print(env.subst("Use manually specified: $UPLOAD_PORT"))
return
if env.subst("$UPLOAD_PROTOCOL") == "mbed" or (
"mbed" in env.subst("$PIOFRAMEWORK") and not env.subst("$UPLOAD_PROTOCOL")
):
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
or ("mbed" in env.subst("$PIOFRAMEWORK")
and not env.subst("$UPLOAD_PROTOCOL"))):
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
else:
try:
@@ -166,8 +168,7 @@ def AutodetectUploadPort(*args, **kwargs):
"Error: Please specify `upload_port` for environment or use "
"global `--upload-port` option.\n"
"For some development platforms it can be a USB flash "
"drive (i.e. /media/<user>/<device name>)\n"
)
"drive (i.e. /media/<user>/<device name>)\n")
env.Exit(1)
@@ -178,17 +179,16 @@ def UploadToDisk(_, target, source, env):
fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
if not isfile(fpath):
continue
copyfile(fpath, join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
print(
"Firmware has been successfully uploaded.\n"
"(Some boards may require manual hard reset)"
)
copyfile(fpath,
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
print("Firmware has been successfully uploaded.\n"
"(Some boards may require manual hard reset)")
def CheckUploadSize(_, target, source, env):
check_conditions = [
env.get("BOARD"),
env.get("SIZETOOL") or env.get("SIZECHECKCMD"),
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
]
if not all(check_conditions):
return
@@ -198,11 +198,9 @@ def CheckUploadSize(_, target, source, env):
return
def _configure_defaults():
env.Replace(
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+",
)
env.Replace(SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
def _get_size_output():
cmd = env.get("SIZECHECKCMD")
@@ -212,11 +210,11 @@ def CheckUploadSize(_, target, source, env):
cmd = cmd.split()
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
sysenv = environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
sysenv['PATH'] = str(env['ENV']['PATH'])
result = exec_command(env.subst(cmd), env=sysenv)
if result["returncode"] != 0:
if result['returncode'] != 0:
return None
return result["out"].strip()
return result['out'].strip()
def _calculate_size(output, pattern):
if not output or not pattern:
@@ -240,8 +238,7 @@ def CheckUploadSize(_, target, source, env):
if used_blocks > blocks_per_progress:
used_blocks = blocks_per_progress
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
)
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
_configure_defaults()
@@ -249,11 +246,12 @@ def CheckUploadSize(_, target, source, env):
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
print('Advanced Memory Usage is available via "PlatformIO Home > Project Inspect"')
print("Memory Usage -> http://bit.ly/pio-memory-usage")
if data_max_size and data_size > -1:
print("RAM: %s" % _format_availale_bytes(data_size, data_max_size))
print("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
if program_size > -1:
print("Flash: %s" % _format_availale_bytes(program_size, program_max_size))
print("PROGRAM: %s" %
_format_availale_bytes(program_size, program_max_size))
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
print(output)
@@ -264,10 +262,9 @@ def CheckUploadSize(_, target, source, env):
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
# env.Exit(1)
if program_size > program_max_size:
sys.stderr.write(
"Error: The program size (%d bytes) is greater "
"than maximum allowed (%s bytes)\n" % (program_size, program_max_size)
)
sys.stderr.write("Error: The program size (%d bytes) is greater "
"than maximum allowed (%s bytes)\n" %
(program_size, program_max_size))
env.Exit(1)
@@ -275,7 +272,8 @@ def PrintUploadInfo(env):
configured = env.subst("$UPLOAD_PROTOCOL")
available = [configured] if configured else []
if "BOARD" in env:
available.extend(env.BoardConfig().get("upload", {}).get("protocols", []))
available.extend(env.BoardConfig().get("upload",
{}).get("protocols", []))
if available:
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
if configured:

View File

@@ -22,12 +22,12 @@ from platformio.compat import WINDOWS, hashlib_encode_data
# Windows CLI has limit with command length to 8192
# Leave 2000 chars for flags and other options
MAX_LINE_LENGTH = 6000 if WINDOWS else 128072
MAX_SOURCES_LENGTH = 6000
def long_sources_hook(env, sources):
_sources = str(sources).replace("\\", "/")
if len(str(_sources)) < MAX_LINE_LENGTH:
if len(str(_sources)) < MAX_SOURCES_LENGTH:
return sources
# fix space in paths
@@ -43,7 +43,7 @@ def long_sources_hook(env, sources):
def long_incflags_hook(env, incflags):
_incflags = env.subst(incflags).replace("\\", "/")
if len(_incflags) < MAX_LINE_LENGTH:
if len(_incflags) < MAX_SOURCES_LENGTH:
return incflags
# fix space in paths
@@ -61,9 +61,8 @@ def _file_long_data(env, data):
build_dir = env.subst("$BUILD_DIR")
if not isdir(build_dir):
makedirs(build_dir)
tmp_file = join(
build_dir, "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest()
)
tmp_file = join(build_dir,
"longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest())
if isfile(tmp_file):
return tmp_file
with open(tmp_file, "w") as fp:
@@ -76,17 +75,18 @@ def exists(_):
def generate(env):
if not WINDOWS:
return None
env.Replace(_long_sources_hook=long_sources_hook)
env.Replace(_long_incflags_hook=long_incflags_hook)
coms = {}
for key in ("ARCOM", "LINKCOM"):
coms[key] = env.get(key, "").replace(
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
)
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}")
for key in ("_CCCOMCOM", "ASPPCOM"):
coms[key] = env.get(key, "").replace(
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}"
)
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}")
env.Replace(**coms)
return env

View File

@@ -14,12 +14,11 @@
from __future__ import absolute_import
import fnmatch
import os
import sys
from os.path import basename, dirname, isdir, join, realpath
from SCons import Builder, Util # pylint: disable=import-error
from SCons.Node import FS # pylint: disable=import-error
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
from SCons.Script import AlwaysBuild # pylint: disable=import-error
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
@@ -31,10 +30,8 @@ from platformio.compat import string_types
from platformio.util import pioversion_to_intstr
SRC_HEADER_EXT = ["h", "hpp"]
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
SRC_C_EXT = ["c"]
SRC_CXX_EXT = ["cc", "cpp", "cxx", "c++"]
SRC_BUILD_EXT = SRC_C_EXT + SRC_CXX_EXT + SRC_ASM_EXT
SRC_C_EXT = ["c", "cc", "cpp"]
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % os.sep, "-<.svn%s>" % os.sep]
@@ -46,58 +43,53 @@ def scons_patched_match_splitext(path, suffixes=None):
return tokens
def GetBuildType(env):
return (
"debug"
if (
set(["debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
or env.GetProjectOption("build_type") == "debug"
)
else "release"
)
def _build_project_deps(env):
project_lib_builder = env.ConfigureProjectLibBuilder()
# prepend project libs to the beginning of list
env.Prepend(LIBS=project_lib_builder.build())
# prepend extra linker related options from libs
env.PrependUnique(
**{
key: project_lib_builder.env.get(key)
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
if project_lib_builder.env.get(key)
})
projenv = env.Clone()
# CPPPATH from dependencies
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
# extra build flags from `platformio.ini`
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
if ("nobuild" not in COMMAND_LINE_TARGETS
and "arduino" in projenv.get("PIOFRAMEWORK", [])):
projenv.ConvertInoToCpp()
is_test = "__test" in COMMAND_LINE_TARGETS
if is_test:
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
"$PIOTEST_SRC_FILTER")
if not is_test or env.GetProjectOption("test_build_project_src", False):
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
env.get("SRC_FILTER"))
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
sys.stderr.write(
"Error: Nothing to build. Please put your source code files "
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
env.Exit(1)
Export("projenv")
def BuildProgram(env):
env.ProcessProgramDeps()
env.ProcessProjectDeps()
# append into the beginning a main LD script
if env.get("LDSCRIPT_PATH") and not any("-Wl,-T" in f for f in env["LINKFLAGS"]):
env.Prepend(LINKFLAGS=["-T", env.subst("$LDSCRIPT_PATH")])
# enable "cyclic reference" for linker
if env.get("LIBS") and env.GetCompilerType() == "gcc":
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
program = env.Program(
os.path.join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
)
env.Replace(PIOMAINPROG=program)
AlwaysBuild(
env.Alias(
"checkprogsize",
program,
env.VerboseAction(env.CheckUploadSize, "Checking size $PIOMAINPROG"),
)
)
print("Building in %s mode" % env.GetBuildType())
return program
def ProcessProgramDeps(env):
def _append_pio_macros():
env.AppendUnique(
CPPDEFINES=[
(
"PLATFORMIO",
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())),
)
]
)
env.AppendUnique(CPPDEFINES=[(
"PLATFORMIO",
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())))])
_append_pio_macros()
@@ -107,6 +99,10 @@ def ProcessProgramDeps(env):
if not Util.case_sensitive_suffixes(".s", ".S"):
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
if ("debug" in COMMAND_LINE_TARGETS
or env.GetProjectOption("build_type") == "debug"):
env.ProcessDebug()
# process extra flags from board
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
@@ -117,55 +113,39 @@ def ProcessProgramDeps(env):
# process framework scripts
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
if env.GetBuildType() == "debug":
env.ConfigureDebugFlags()
# restore PIO macros if it was deleted by framework
_append_pio_macros()
# remove specified flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
if "__test" in COMMAND_LINE_TARGETS:
env.ConfigureTestTarget()
env.ProcessTest()
# build project with dependencies
_build_project_deps(env)
def ProcessProjectDeps(env):
project_lib_builder = env.ConfigureProjectLibBuilder()
# append into the beginning a main LD script
if (env.get("LDSCRIPT_PATH")
and not any("-Wl,-T" in f for f in env['LINKFLAGS'])):
env.Prepend(LINKFLAGS=["-T", "$LDSCRIPT_PATH"])
# prepend project libs to the beginning of list
env.Prepend(LIBS=project_lib_builder.build())
# prepend extra linker related options from libs
env.PrependUnique(
**{
key: project_lib_builder.env.get(key)
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
if project_lib_builder.env.get(key)
}
)
# enable "cyclic reference" for linker
if env.get("LIBS") and env.GetCompilerType() == "gcc":
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
projenv = env.Clone()
program = env.Program(join("$BUILD_DIR", env.subst("$PROGNAME")),
env['PIOBUILDFILES'])
env.Replace(PIOMAINPROG=program)
# CPPPATH from dependencies
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
# extra build flags from `platformio.ini`
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
AlwaysBuild(
env.Alias(
"checkprogsize", program,
env.VerboseAction(env.CheckUploadSize,
"Checking size $PIOMAINPROG")))
is_test = "__test" in COMMAND_LINE_TARGETS
if is_test:
projenv.BuildSources(
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
)
if not is_test or env.GetProjectOption("test_build_project_src"):
projenv.BuildSources(
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
)
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
sys.stderr.write(
"Error: Nothing to build. Please put your source code files "
"to '%s' folder\n" % env.subst("$PROJECT_SRC_DIR")
)
env.Exit(1)
Export("projenv")
return program
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
@@ -179,30 +159,30 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
result[key].extend(value)
cppdefines = []
for item in result["CPPDEFINES"]:
for item in result['CPPDEFINES']:
if not Util.is_Sequence(item):
cppdefines.append(item)
continue
name, value = item[:2]
if '"' in value:
value = value.replace('"', '\\"')
if '\"' in value:
value = value.replace('\"', '\\\"')
elif value.isdigit():
value = int(value)
elif value.replace(".", "", 1).isdigit():
value = float(value)
cppdefines.append((name, value))
result["CPPDEFINES"] = cppdefines
result['CPPDEFINES'] = cppdefines
# fix relative CPPPATH & LIBPATH
for k in ("CPPPATH", "LIBPATH"):
for i, p in enumerate(result.get(k, [])):
if os.path.isdir(p):
result[k][i] = os.path.realpath(p)
if isdir(p):
result[k][i] = realpath(p)
# fix relative path for "-include"
for i, f in enumerate(result.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include":
result["CCFLAGS"][i] = (f[0], env.File(os.path.realpath(f[1].get_path())))
result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
return result
@@ -215,15 +195,14 @@ def ProcessFlags(env, flags): # pylint: disable=too-many-branches
# Cancel any previous definition of name, either built in or
# provided with a -U option // Issue #191
undefines = [
u
for u in env.get("CCFLAGS", [])
u for u in env.get("CCFLAGS", [])
if isinstance(u, string_types) and u.startswith("-U")
]
if undefines:
for undef in undefines:
env["CCFLAGS"].remove(undef)
if undef[2:] in env["CPPDEFINES"]:
env["CPPDEFINES"].remove(undef[2:])
env['CCFLAGS'].remove(undef)
if undef[2:] in env['CPPDEFINES']:
env['CPPDEFINES'].remove(undef[2:])
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
@@ -246,7 +225,8 @@ def ProcessUnFlags(env, flags):
for current in env.get(key, []):
conditions = [
unflag == current,
isinstance(current, (tuple, list)) and unflag[0] == current[0],
isinstance(current, (tuple, list))
and unflag[0] == current[0]
]
if any(conditions):
env[key].remove(current)
@@ -255,14 +235,15 @@ def ProcessUnFlags(env, flags):
def MatchSourceFiles(env, src_dir, src_filter=None):
src_filter = env.subst(src_filter) if src_filter else None
src_filter = src_filter or SRC_FILTER_DEFAULT
return fs.match_src_files(
env.subst(src_dir), src_filter, SRC_BUILD_EXT + SRC_HEADER_EXT
)
return fs.match_src_files(env.subst(src_dir), src_filter,
SRC_BUILD_EXT + SRC_HEADER_EXT)
def CollectBuildFiles(
env, variant_dir, src_dir, src_filter=None, duplicate=False
): # pylint: disable=too-many-locals
def CollectBuildFiles(env,
variant_dir,
src_dir,
src_filter=None,
duplicate=False):
sources = []
variants = []
@@ -271,44 +252,27 @@ def CollectBuildFiles(
src_dir = src_dir[:-1]
for item in env.MatchSourceFiles(src_dir, src_filter):
_reldir = os.path.dirname(item)
_src_dir = os.path.join(src_dir, _reldir) if _reldir else src_dir
_var_dir = os.path.join(variant_dir, _reldir) if _reldir else variant_dir
_reldir = dirname(item)
_src_dir = join(src_dir, _reldir) if _reldir else src_dir
_var_dir = join(variant_dir, _reldir) if _reldir else variant_dir
if _var_dir not in variants:
variants.append(_var_dir)
env.VariantDir(_var_dir, _src_dir, duplicate)
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
for callback, pattern in env.get("__PIO_BUILD_MIDDLEWARES", []):
tmp = []
for node in sources:
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
tmp.append(node)
continue
n = callback(node)
if n:
tmp.append(n)
sources = tmp
sources.append(env.File(join(_var_dir, basename(item))))
return sources
def AddBuildMiddleware(env, callback, pattern=None):
env.Append(__PIO_BUILD_MIDDLEWARES=[(callback, pattern)])
def BuildFrameworks(env, frameworks):
if not frameworks:
return
if "BOARD" not in env:
sys.stderr.write(
"Please specify `board` in `platformio.ini` to use "
"with '%s' framework\n" % ", ".join(frameworks)
)
sys.stderr.write("Please specify `board` in `platformio.ini` to use "
"with '%s' framework\n" % ", ".join(frameworks))
env.Exit(1)
board_frameworks = env.BoardConfig().get("frameworks", [])
@@ -316,37 +280,34 @@ def BuildFrameworks(env, frameworks):
if board_frameworks:
frameworks.insert(0, board_frameworks[0])
else:
sys.stderr.write("Error: Please specify `board` in `platformio.ini`\n")
sys.stderr.write(
"Error: Please specify `board` in `platformio.ini`\n")
env.Exit(1)
for f in frameworks:
if f == "arduino":
# Arduino IDE appends .o the end of filename
Builder.match_splitext = scons_patched_match_splitext
if "nobuild" not in COMMAND_LINE_TARGETS:
env.ConvertInoToCpp()
if f in board_frameworks:
SConscript(env.GetFrameworkScript(f), exports="env")
else:
sys.stderr.write("Error: This board doesn't support %s framework!\n" % f)
sys.stderr.write(
"Error: This board doesn't support %s framework!\n" % f)
env.Exit(1)
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
return env.StaticLibrary(
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
)
env.subst(variant_dir),
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
def BuildSources(env, variant_dir, src_dir, src_filter=None):
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
DefaultEnvironment().Append(
PIOBUILDFILES=[
env.Object(node) if isinstance(node, FS.File) else node for node in nodes
]
)
PIOBUILDFILES=[env.Object(node) for node in nodes])
def exists(_):
@@ -354,16 +315,12 @@ def exists(_):
def generate(env):
env.AddMethod(GetBuildType)
env.AddMethod(BuildProgram)
env.AddMethod(ProcessProgramDeps)
env.AddMethod(ProcessProjectDeps)
env.AddMethod(ParseFlagsExtended)
env.AddMethod(ProcessFlags)
env.AddMethod(ProcessUnFlags)
env.AddMethod(MatchSourceFiles)
env.AddMethod(CollectBuildFiles)
env.AddMethod(AddBuildMiddleware)
env.AddMethod(BuildFrameworks)
env.AddMethod(BuildLibrary)
env.AddMethod(BuildSources)

View File

@@ -13,6 +13,7 @@
# limitations under the License.
import os
from os.path import dirname, isfile, join
import click
@@ -21,21 +22,13 @@ class PlatformioCLI(click.MultiCommand):
leftover_args = []
def __init__(self, *args, **kwargs):
super(PlatformioCLI, self).__init__(*args, **kwargs)
self._pio_cmds_dir = os.path.dirname(__file__)
@staticmethod
def in_silence():
args = PlatformioCLI.leftover_args
return args and any(
[
args[0] == "debug" and "--interpreter" in " ".join(args),
args[0] == "upgrade",
"--json-output" in args,
"--version" in args,
]
)
return args and any([
args[0] == "debug" and "--interpreter" in " ".join(args),
args[0] == "upgrade", "--json-output" in args, "--version" in args
])
def invoke(self, ctx):
PlatformioCLI.leftover_args = ctx.args
@@ -45,36 +38,35 @@ class PlatformioCLI(click.MultiCommand):
def list_commands(self, ctx):
cmds = []
for cmd_name in os.listdir(self._pio_cmds_dir):
if cmd_name.startswith("__init__"):
cmds_dir = dirname(__file__)
for name in os.listdir(cmds_dir):
if name.startswith("__init__"):
continue
if os.path.isfile(os.path.join(self._pio_cmds_dir, cmd_name, "command.py")):
cmds.append(cmd_name)
elif cmd_name.endswith(".py"):
cmds.append(cmd_name[:-3])
if isfile(join(cmds_dir, name, "command.py")):
cmds.append(name)
elif name.endswith(".py"):
cmds.append(name[:-3])
cmds.sort()
return cmds
def get_command(self, ctx, cmd_name):
mod = None
try:
mod_path = "platformio.commands." + cmd_name
if os.path.isfile(os.path.join(self._pio_cmds_dir, cmd_name, "command.py")):
mod_path = "platformio.commands.%s.command" % cmd_name
mod = __import__(mod_path, None, None, ["cli"])
mod = __import__("platformio.commands." + cmd_name, None, None,
["cli"])
except ImportError:
try:
return self._handle_obsolate_command(cmd_name)
except AttributeError:
pass
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
return mod.cli
@staticmethod
def _handle_obsolate_command(name):
# pylint: disable=import-outside-toplevel
if name == "init":
from platformio.commands.project import project_init
return project_init
if name == "platforms":
from platformio.commands import platform
return platform.cli
if name == "serialports":
from platformio.commands import device
return device.cli
raise AttributeError()

View File

@@ -0,0 +1,72 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import sys
import click
from platformio.managers.core import pioplus_call
@click.group("account", short_help="Manage PIO Account")
def cli():
pass
@cli.command("register", short_help="Create new PIO Account")
@click.option("-u", "--username")
def account_register(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("login", short_help="Log in to PIO Account")
@click.option("-u", "--username")
@click.option("-p", "--password")
def account_login(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("logout", short_help="Log out of PIO Account")
def account_logout():
pioplus_call(sys.argv[1:])
@cli.command("password", short_help="Change password")
@click.option("--old-password")
@click.option("--new-password")
def account_password(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("token", short_help="Get or regenerate Authentication Token")
@click.option("-p", "--password")
@click.option("--regenerate", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_token(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("forgot", short_help="Forgot password")
@click.option("-u", "--username")
def account_forgot(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("show", short_help="PIO Account information")
@click.option("--offline", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_show(**kwargs):
pioplus_call(sys.argv[1:])

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,235 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import os
import time
import requests.adapters
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
from platformio import __pioaccount_api__, app
from platformio.commands.account import exception
class AccountClient(object):
def __init__(
self, api_base_url=__pioaccount_api__, retries=3,
):
if api_base_url.endswith("/"):
api_base_url = api_base_url[:-1]
self.api_base_url = api_base_url
self._session = requests.Session()
self._session.headers.update({"User-Agent": app.get_user_agent()})
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=2,
method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
)
adapter = requests.adapters.HTTPAdapter(max_retries=retry)
self._session.mount(api_base_url, adapter)
def login(self, username, password):
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise exception.AccountAlreadyAuthenticated(
app.get_state_item("account", {}).get("email", "")
)
response = self._session.post(
self.api_base_url + "/v1/login",
data={"username": username, "password": password},
)
result = self.raise_error_from_response(response)
app.set_state_item("account", result)
return result
def login_with_code(self, client_id, code, redirect_uri):
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise exception.AccountAlreadyAuthenticated(
app.get_state_item("account", {}).get("email", "")
)
response = self._session.post(
self.api_base_url + "/v1/login/code",
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
)
result = self.raise_error_from_response(response)
app.set_state_item("account", result)
return result
def logout(self):
try:
refresh_token = self.get_refresh_token()
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
response = requests.post(
self.api_base_url + "/v1/logout", data={"refresh_token": refresh_token},
)
try:
self.raise_error_from_response(response)
except exception.AccountError:
pass
app.delete_state_item("account")
return True
def change_password(self, old_password, new_password):
try:
token = self.fetch_authentication_token()
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
response = self._session.post(
self.api_base_url + "/v1/password",
headers={"Authorization": "Bearer %s" % token},
data={"old_password": old_password, "new_password": new_password},
)
self.raise_error_from_response(response)
return True
def registration(
self, username, email, password, firstname, lastname
): # pylint:disable=too-many-arguments
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise exception.AccountAlreadyAuthenticated(
app.get_state_item("account", {}).get("email", "")
)
response = self._session.post(
self.api_base_url + "/v1/registration",
data={
"username": username,
"email": email,
"password": password,
"firstname": firstname,
"lastname": lastname,
},
)
return self.raise_error_from_response(response)
def auth_token(self, password, regenerate):
try:
token = self.fetch_authentication_token()
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
response = self._session.post(
self.api_base_url + "/v1/token",
headers={"Authorization": "Bearer %s" % token},
data={"password": password, "regenerate": 1 if regenerate else 0},
)
return self.raise_error_from_response(response).get("auth_token")
def forgot_password(self, username):
response = self._session.post(
self.api_base_url + "/v1/forgot", data={"username": username},
)
return self.raise_error_from_response(response).get("auth_token")
def get_profile(self):
try:
token = self.fetch_authentication_token()
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
response = self._session.get(
self.api_base_url + "/v1/profile",
headers={"Authorization": "Bearer %s" % token},
)
return self.raise_error_from_response(response)
def update_profile(self, profile, current_password):
try:
token = self.fetch_authentication_token()
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
profile["current_password"] = current_password
response = self._session.put(
self.api_base_url + "/v1/profile",
headers={"Authorization": "Bearer %s" % token},
data=profile,
)
return self.raise_error_from_response(response)
def get_account_info(self, offline):
if offline:
account = app.get_state_item("account")
if not account:
raise exception.AccountNotAuthenticated()
return {
"profile": {
"email": account.get("email"),
"username": account.get("username"),
}
}
try:
token = self.fetch_authentication_token()
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
response = self._session.get(
self.api_base_url + "/v1/summary",
headers={"Authorization": "Bearer %s" % token},
)
return self.raise_error_from_response(response)
def fetch_authentication_token(self):
if "PLATFORMIO_AUTH_TOKEN" in os.environ:
return os.environ["PLATFORMIO_AUTH_TOKEN"]
auth = app.get_state_item("account", {}).get("auth", {})
if auth.get("access_token") and auth.get("access_token_expire"):
if auth.get("access_token_expire") > time.time():
return auth.get("access_token")
if auth.get("refresh_token"):
response = self._session.post(
self.api_base_url + "/v1/login",
headers={"Authorization": "Bearer %s" % auth.get("refresh_token")},
)
result = self.raise_error_from_response(response)
app.set_state_item("account", result)
return result.get("auth").get("access_token")
raise exception.AccountNotAuthenticated()
@staticmethod
def get_refresh_token():
try:
auth = app.get_state_item("account").get("auth").get("refresh_token")
return auth
except: # pylint:disable=bare-except
raise exception.AccountNotAuthenticated()
@staticmethod
def raise_error_from_response(response, expected_codes=(200, 201, 202)):
if response.status_code in expected_codes:
try:
return response.json()
except ValueError:
pass
try:
message = response.json()["message"]
except (KeyError, ValueError):
message = response.text
if "Authorization session has been expired" in message:
app.delete_state_item("account")
raise exception.AccountError(message)

View File

@@ -1,278 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import datetime
import json
import re
import click
from tabulate import tabulate
from platformio.commands.account import exception
from platformio.commands.account.client import AccountClient
@click.group("account", short_help="Manage PIO Account")
def cli():
pass
def validate_username(value):
value = str(value).strip()
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){3,38}$", value, flags=re.I):
raise click.BadParameter(
"Invalid username format. "
"Username must contain at least 4 characters including single hyphens,"
" and cannot begin or end with a hyphen"
)
return value
def validate_email(value):
value = str(value).strip()
if not re.match(r"^[a-z\d_.+-]+@[a-z\d\-]+\.[a-z\d\-.]+$", value, flags=re.I):
raise click.BadParameter("Invalid email address")
return value
def validate_password(value):
value = str(value).strip()
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
raise click.BadParameter(
"Invalid password format. "
"Password must contain at least 8 characters"
" including a number and a lowercase letter"
)
return value
@cli.command("register", short_help="Create new PIO Account")
@click.option(
"-u",
"--username",
prompt=True,
callback=lambda _, __, value: validate_username(value),
)
@click.option(
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
)
@click.option(
"-p",
"--password",
prompt=True,
hide_input=True,
confirmation_prompt=True,
callback=lambda _, __, value: validate_password(value),
)
@click.option("--firstname", prompt=True)
@click.option("--lastname", prompt=True)
def account_register(username, email, password, firstname, lastname):
client = AccountClient()
client.registration(username, email, password, firstname, lastname)
return click.secho(
"An account has been successfully created. "
"Please check your mail to activate your account and verify your email address.",
fg="green",
)
@cli.command("login", short_help="Log in to PIO Account")
@click.option("-u", "--username", prompt="Username or email")
@click.option("-p", "--password", prompt=True, hide_input=True)
def account_login(username, password):
client = AccountClient()
client.login(username, password)
return click.secho("Successfully logged in!", fg="green")
@cli.command("logout", short_help="Log out of PIO Account")
def account_logout():
client = AccountClient()
client.logout()
return click.secho("Successfully logged out!", fg="green")
@cli.command("password", short_help="Change password")
@click.option("--old-password", prompt=True, hide_input=True)
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
def account_password(old_password, new_password):
client = AccountClient()
client.change_password(old_password, new_password)
return click.secho("Password successfully changed!", fg="green")
@cli.command("token", short_help="Get or regenerate Authentication Token")
@click.option("-p", "--password", prompt=True, hide_input=True)
@click.option("--regenerate", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_token(password, regenerate, json_output):
client = AccountClient()
auth_token = client.auth_token(password, regenerate)
if json_output:
return click.echo(json.dumps({"status": "success", "result": auth_token}))
return click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
@cli.command("forgot", short_help="Forgot password")
@click.option("--username", prompt="Username or email")
def account_forgot(username):
client = AccountClient()
client.forgot_password(username)
return click.secho(
"If this account is registered, we will send the "
"further instructions to your email.",
fg="green",
)
@cli.command("update", short_help="Update profile information")
@click.option("--current-password", prompt=True, hide_input=True)
@click.option("--username")
@click.option("--email")
@click.option("--firstname")
@click.option("--lastname")
def account_update(current_password, **kwargs):
client = AccountClient()
profile = client.get_profile()
new_profile = profile.copy()
if not any(kwargs.values()):
for field in profile:
new_profile[field] = click.prompt(
field.replace("_", " ").capitalize(), default=profile[field]
)
if field == "email":
validate_email(new_profile[field])
if field == "username":
validate_username(new_profile[field])
else:
new_profile.update({key: value for key, value in kwargs.items() if value})
client.update_profile(new_profile, current_password)
click.secho("Profile successfully updated!", fg="green")
username_changed = new_profile["username"] != profile["username"]
email_changed = new_profile["email"] != profile["email"]
if not username_changed and not email_changed:
return None
try:
client.logout()
except exception.AccountNotAuthenticated:
pass
if email_changed:
return click.secho(
"Please check your mail to verify your new email address and re-login. ",
fg="yellow",
)
return click.secho("Please re-login.", fg="yellow")
@cli.command("show", short_help="PIO Account information")
@click.option("--offline", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_show(offline, json_output):
client = AccountClient()
info = client.get_account_info(offline)
if json_output:
return click.echo(json.dumps(info))
click.echo()
if info.get("profile"):
print_profile(info["profile"])
if info.get("packages"):
print_packages(info["packages"])
if info.get("subscriptions"):
print_subscriptions(info["subscriptions"])
return click.echo()
def print_profile(profile):
click.secho("Profile", fg="cyan", bold=True)
click.echo("=" * len("Profile"))
data = []
if profile.get("username"):
data.append(("Username:", profile["username"]))
if profile.get("email"):
data.append(("Email:", profile["email"]))
if profile.get("firstname"):
data.append(("First name:", profile["firstname"]))
if profile.get("lastname"):
data.append(("Last name:", profile["lastname"]))
click.echo(tabulate(data, tablefmt="plain"))
def print_packages(packages):
click.echo()
click.secho("Packages", fg="cyan")
click.echo("=" * len("Packages"))
for package in packages:
click.echo()
click.secho(package.get("name"), bold=True)
click.echo("-" * len(package.get("name")))
if package.get("description"):
click.echo(package.get("description"))
data = []
expire = "-"
if "subscription" in package:
expire = datetime.datetime.strptime(
(
package["subscription"].get("end_at")
or package["subscription"].get("next_bill_at")
),
"%Y-%m-%dT%H:%M:%SZ",
).strftime("%Y-%m-%d")
data.append(("Expire:", expire))
services = []
for key in package:
if not key.startswith("service."):
continue
if isinstance(package[key], dict):
services.append(package[key].get("title"))
else:
services.append(package[key])
if services:
data.append(("Services:", ", ".join(services)))
click.echo(tabulate(data, tablefmt="plain"))
def print_subscriptions(subscriptions):
click.echo()
click.secho("Subscriptions", fg="cyan")
click.echo("=" * len("Subscriptions"))
for subscription in subscriptions:
click.echo()
click.secho(subscription.get("product_name"), bold=True)
click.echo("-" * len(subscription.get("product_name")))
data = [("State:", subscription.get("status"))]
begin_at = datetime.datetime.strptime(
subscription.get("begin_at"), "%Y-%m-%dT%H:%M:%SZ"
).strftime("%Y-%m-%d %H:%M:%S")
data.append(("Start date:", begin_at or "-"))
end_at = subscription.get("end_at")
if end_at:
end_at = datetime.datetime.strptime(
subscription.get("end_at"), "%Y-%m-%dT%H:%M:%SZ"
).strftime("%Y-%m-%d %H:%M:%S")
data.append(("End date:", end_at or "-"))
next_bill_at = subscription.get("next_bill_at")
if next_bill_at:
next_bill_at = datetime.datetime.strptime(
subscription.get("next_bill_at"), "%Y-%m-%dT%H:%M:%SZ"
).strftime("%Y-%m-%d %H:%M:%S")
data.append(("Next payment:", next_bill_at or "-"))
data.append(
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
)
data.append(
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
)
click.echo(tabulate(data, tablefmt="plain"))

View File

@@ -1,30 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.exception import PlatformioException
class AccountError(PlatformioException):
MESSAGE = "{0}"
class AccountNotAuthenticated(AccountError):
MESSAGE = "You are not authenticated! Please login to PIO Account."
class AccountAlreadyAuthenticated(AccountError):
MESSAGE = "You are already authenticated with {0} account."

View File

@@ -32,14 +32,11 @@ def cli(query, installed, json_output): # pylint: disable=R0912
grpboards = {}
for board in _get_boards(installed):
if query and not any(
query.lower() in str(board.get(k, "")).lower()
for k in ("id", "name", "mcu", "vendor", "platform", "frameworks")
):
if query and query.lower() not in json.dumps(board).lower():
continue
if board["platform"] not in grpboards:
grpboards[board["platform"]] = []
grpboards[board["platform"]].append(board)
if board['platform'] not in grpboards:
grpboards[board['platform']] = []
grpboards[board['platform']].append(board)
terminal_width, _ = click.get_terminal_size()
for (platform, boards) in sorted(grpboards.items()):
@@ -53,21 +50,11 @@ def cli(query, installed, json_output): # pylint: disable=R0912
def print_boards(boards):
click.echo(
tabulate(
[
(
click.style(b["id"], fg="cyan"),
b["mcu"],
"%dMHz" % (b["fcpu"] / 1000000),
fs.format_filesize(b["rom"]),
fs.format_filesize(b["ram"]),
b["name"],
)
for b in boards
],
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"],
)
)
tabulate([(click.style(b['id'], fg="cyan"), b['mcu'], "%dMHz" %
(b['fcpu'] / 1000000), fs.format_filesize(
b['rom']), fs.format_filesize(b['ram']), b['name'])
for b in boards],
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"]))
def _get_boards(installed=False):
@@ -79,7 +66,7 @@ def _print_boards_json(query, installed=False):
result = []
for board in _get_boards(installed):
if query:
search_data = "%s %s" % (board["id"], json.dumps(board).lower())
search_data = "%s %s" % (board['id'], json.dumps(board).lower())
if query.lower() not in search_data.lower():
continue
result.append(board)

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,319 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
# pylint: disable=redefined-builtin,too-many-statements
import os
from collections import Counter
from os.path import dirname, isfile
from time import time
import click
from tabulate import tabulate
from platformio import app, exception, fs, util
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools import CheckToolFactory
from platformio.compat import dump_json_to_unicode
from platformio.project.config import ProjectConfig
from platformio.project.helpers import find_project_dir_above, get_project_dir
@click.command("check", short_help="Run a static analysis tool on code")
@click.option("-e", "--environment", multiple=True)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--pattern", multiple=True)
@click.option("--flags", multiple=True)
@click.option(
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.option("--json-output", is_flag=True)
@click.option(
"--fail-on-defect",
multiple=True,
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
)
@click.option("--skip-packages", is_flag=True)
def cli(
environment,
project_dir,
project_conf,
pattern,
flags,
severity,
silent,
verbose,
json_output,
fail_on_defect,
skip_packages,
):
app.set_session_var("custom_project_conf", project_conf)
# find project directory on upper level
if isfile(project_dir):
project_dir = find_project_dir_above(project_dir)
results = []
with fs.cd(project_dir):
config = ProjectConfig.get_instance(project_conf)
config.validate(environment)
default_envs = config.default_envs()
for envname in config.envs():
skipenv = any(
[
environment and envname not in environment,
not environment and default_envs and envname not in default_envs,
]
)
env_options = config.items(env=envname, as_dict=True)
env_dump = []
for k, v in env_options.items():
if k not in ("platform", "framework", "board"):
continue
env_dump.append(
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
)
default_patterns = [
config.get_optional_dir("src"),
config.get_optional_dir("include"),
]
tool_options = dict(
verbose=verbose,
silent=silent,
patterns=pattern or env_options.get("check_patterns", default_patterns),
flags=flags or env_options.get("check_flags"),
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
if silent
else severity or config.get("env:" + envname, "check_severity"),
skip_packages=skip_packages or env_options.get("check_skip_packages"),
)
for tool in config.get("env:" + envname, "check_tool"):
if skipenv:
results.append({"env": envname, "tool": tool})
continue
if not silent and not json_output:
print_processing_header(tool, envname, env_dump)
ct = CheckToolFactory.new(
tool, project_dir, config, envname, tool_options
)
result = {"env": envname, "tool": tool, "duration": time()}
rc = ct.check(
on_defect_callback=None
if (json_output or verbose)
else lambda defect: click.echo(repr(defect))
)
result["defects"] = ct.get_defects()
result["duration"] = time() - result["duration"]
result["succeeded"] = rc == 0
if fail_on_defect:
result["succeeded"] = rc == 0 and not any(
DefectItem.SEVERITY_LABELS[d.severity] in fail_on_defect
for d in result["defects"]
)
result["stats"] = collect_component_stats(result)
results.append(result)
if verbose:
click.echo("\n".join(repr(d) for d in result["defects"]))
if not json_output and not silent:
if rc != 0:
click.echo(
"Error: %s failed to perform check! Please "
"examine tool output in verbose mode." % tool
)
elif not result["defects"]:
click.echo("No defects found")
print_processing_footer(result)
if json_output:
click.echo(dump_json_to_unicode(results_to_json(results)))
elif not silent:
print_check_summary(results)
command_failed = any(r.get("succeeded") is False for r in results)
if command_failed:
raise exception.ReturnErrorCode(1)
def results_to_json(raw):
results = []
for item in raw:
if item.get("succeeded") is None:
continue
item.update(
{
"succeeded": bool(item.get("succeeded")),
"defects": [d.as_dict() for d in item.get("defects", [])],
}
)
results.append(item)
return results
def print_processing_header(tool, envname, envdump):
click.echo(
"Checking %s > %s (%s)"
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
)
terminal_width, _ = click.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
def print_processing_footer(result):
is_failed = not result.get("succeeded")
util.print_labeled_bar(
"[%s] Took %.2f seconds"
% (
(
click.style("FAILED", fg="red", bold=True)
if is_failed
else click.style("PASSED", fg="green", bold=True)
),
result["duration"],
),
is_error=is_failed,
)
def collect_component_stats(result):
components = dict()
def _append_defect(component, defect):
if not components.get(component):
components[component] = Counter()
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
for defect in result.get("defects", []):
component = dirname(defect.file) or defect.file
_append_defect(component, defect)
if component.lower().startswith(get_project_dir().lower()):
while os.sep in component:
component = dirname(component)
_append_defect(component, defect)
return components
def print_defects_stats(results):
if not results:
return
component_stats = {}
for r in results:
for k, v in r.get("stats", {}).items():
if not component_stats.get(k):
component_stats[k] = Counter()
component_stats[k].update(r["stats"][k])
if not component_stats:
return
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
severity_labels.reverse()
tabular_data = list()
for k, v in component_stats.items():
tool_defect = [v.get(s, 0) for s in severity_labels]
tabular_data.append([k] + tool_defect)
total = ["Total"] + [sum(d) for d in list(zip(*tabular_data))[1:]]
tabular_data.sort()
tabular_data.append([]) # Empty line as delimiter
tabular_data.append(total)
headers = ["Component"]
headers.extend([l.upper() for l in severity_labels])
headers = [click.style(h, bold=True) for h in headers]
click.echo(tabulate(tabular_data, headers=headers, numalign="center"))
click.echo()
def print_check_summary(results):
click.echo()
tabular_data = []
succeeded_nums = 0
failed_nums = 0
duration = 0
print_defects_stats(results)
for result in results:
duration += result.get("duration", 0)
if result.get("succeeded") is False:
failed_nums += 1
status_str = click.style("FAILED", fg="red")
elif result.get("succeeded") is None:
status_str = "IGNORED"
else:
succeeded_nums += 1
status_str = click.style("PASSED", fg="green")
tabular_data.append(
(
click.style(result["env"], fg="cyan"),
result["tool"],
status_str,
util.humanize_duration_time(result.get("duration")),
)
)
click.echo(
tabulate(
tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Environment", "Tool", "Status", "Duration")
],
),
err=failed_nums,
)
util.print_labeled_bar(
"%s%d succeeded in %s"
% (
"%d failed, " % failed_nums if failed_nums else "",
succeeded_nums,
util.humanize_duration_time(duration),
),
is_error=failed_nums,
fg="red" if failed_nums else "green",
)

View File

@@ -1,95 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import click
from platformio.project.helpers import get_project_dir
# pylint: disable=too-many-instance-attributes, redefined-builtin
# pylint: disable=too-many-arguments
class DefectItem(object):
SEVERITY_HIGH = 1
SEVERITY_MEDIUM = 2
SEVERITY_LOW = 4
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
def __init__(
self,
severity,
category,
message,
file="unknown",
line=0,
column=0,
id=None,
callstack=None,
cwe=None,
):
assert severity in (self.SEVERITY_HIGH, self.SEVERITY_MEDIUM, self.SEVERITY_LOW)
self.severity = severity
self.category = category
self.message = message
self.line = int(line)
self.column = int(column)
self.callstack = callstack
self.cwe = cwe
self.id = id
self.file = file
if file.lower().startswith(get_project_dir().lower()):
self.file = os.path.relpath(file, get_project_dir())
def __repr__(self):
defect_color = None
if self.severity == self.SEVERITY_HIGH:
defect_color = "red"
elif self.severity == self.SEVERITY_MEDIUM:
defect_color = "yellow"
format_str = "{file}:{line}: [{severity}:{category}] {message} {id}"
return format_str.format(
severity=click.style(self.SEVERITY_LABELS[self.severity], fg=defect_color),
category=click.style(self.category.lower(), fg=defect_color),
file=click.style(self.file, bold=True),
message=self.message,
line=self.line,
id="%s" % "[%s]" % self.id if self.id else "",
)
def __or__(self, defect):
return self.severity | defect.severity
@staticmethod
def severity_to_int(label):
for key, value in DefectItem.SEVERITY_LABELS.items():
if label == value:
return key
raise Exception("Unknown severity label -> %s" % label)
def as_dict(self):
return {
"severity": self.SEVERITY_LABELS[self.severity],
"category": self.category,
"message": self.message,
"file": os.path.realpath(self.file),
"line": self.line,
"column": self.column,
"callstack": self.callstack,
"id": self.id,
"cwe": self.cwe,
}

View File

@@ -1,33 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio import exception
from platformio.commands.check.tools.clangtidy import ClangtidyCheckTool
from platformio.commands.check.tools.cppcheck import CppcheckCheckTool
from platformio.commands.check.tools.pvsstudio import PvsStudioCheckTool
class CheckToolFactory(object):
@staticmethod
def new(tool, project_dir, config, envname, options):
cls = None
if tool == "cppcheck":
cls = CppcheckCheckTool
elif tool == "clangtidy":
cls = ClangtidyCheckTool
elif tool == "pvs-studio":
cls = PvsStudioCheckTool
else:
raise exception.PlatformioException("Unknown check tool `%s`" % tool)
return cls(project_dir, config, envname, options)

View File

@@ -1,215 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
from tempfile import NamedTemporaryFile
import click
from platformio import fs, proc
from platformio.commands.check.defect import DefectItem
from platformio.project.helpers import load_project_ide_data
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
def __init__(self, project_dir, config, envname, options):
self.config = config
self.envname = envname
self.options = options
self.cc_flags = []
self.cxx_flags = []
self.cpp_includes = []
self.cpp_defines = []
self.toolchain_defines = []
self._tmp_files = []
self.cc_path = None
self.cxx_path = None
self._defects = []
self._on_defect_callback = None
self._bad_input = False
self._load_cpp_data(project_dir)
# detect all defects by default
if not self.options.get("severity"):
self.options["severity"] = [
DefectItem.SEVERITY_LOW,
DefectItem.SEVERITY_MEDIUM,
DefectItem.SEVERITY_HIGH,
]
# cast to severity by ids
self.options["severity"] = [
s if isinstance(s, int) else DefectItem.severity_to_int(s)
for s in self.options["severity"]
]
def _load_cpp_data(self, project_dir):
data = load_project_ide_data(project_dir, self.envname)
if not data:
return
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
self.cpp_includes = self._dump_includes(data.get("includes", {}))
self.cpp_defines = data.get("defines", [])
self.cc_path = data.get("cc_path")
self.cxx_path = data.get("cxx_path")
self.toolchain_defines = self._get_toolchain_defines()
def get_flags(self, tool):
result = []
flags = self.options.get("flags") or []
for flag in flags:
if ":" not in flag or flag.startswith("-"):
result.extend([f for f in flag.split(" ") if f])
elif flag.startswith("%s:" % tool):
result.extend([f for f in flag.split(":", 1)[1].split(" ") if f])
return result
def _get_toolchain_defines(self):
def _extract_defines(language, includes_file):
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
defines = []
cmd = "echo | %s -x %s %s %s -dM -E -" % (
self.cc_path,
language,
" ".join([f for f in build_flags if f.startswith(("-m", "-f"))]),
includes_file,
)
result = proc.exec_command(cmd, shell=True)
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
if len(tokens) > 2:
defines.append("%s=%s" % (tokens[1], tokens[2]))
else:
defines.append(tokens[1])
return defines
incflags_file = self._long_includes_hook(self.cpp_includes)
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
def _create_tmp_file(self, data):
with NamedTemporaryFile("w", delete=False) as fp:
fp.write(data)
self._tmp_files.append(fp.name)
return fp.name
def _long_includes_hook(self, includes):
data = []
for inc in includes:
data.append('-I"%s"' % fs.to_unix_path(inc))
return '@"%s"' % self._create_tmp_file(" ".join(data))
@staticmethod
def _dump_includes(includes_map):
result = []
for includes in includes_map.values():
for include in includes:
if include not in result:
result.append(include)
return result
@staticmethod
def is_flag_set(flag, flags):
return any(flag in f for f in flags)
def get_defects(self):
return self._defects
def configure_command(self):
raise NotImplementedError
def on_tool_output(self, line):
line = self.tool_output_filter(line)
if not line:
return
defect = self.parse_defect(line)
if not isinstance(defect, DefectItem):
if self.options.get("verbose"):
click.echo(line)
return
if defect.severity not in self.options["severity"]:
return
self._defects.append(defect)
if self._on_defect_callback:
self._on_defect_callback(defect)
@staticmethod
def tool_output_filter(line):
return line
@staticmethod
def parse_defect(raw_line):
return raw_line
def clean_up(self):
for f in self._tmp_files:
if os.path.isfile(f):
os.remove(f)
@staticmethod
def get_project_target_files(patterns):
c_extension = (".c",)
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
header_extensions = (".h", ".hh", ".hpp", ".hxx")
result = {"c": [], "c++": [], "headers": []}
def _add_file(path):
if path.endswith(header_extensions):
result["headers"].append(os.path.realpath(path))
elif path.endswith(c_extension):
result["c"].append(os.path.realpath(path))
elif path.endswith(cpp_extensions):
result["c++"].append(os.path.realpath(path))
for pattern in patterns:
for item in glob.glob(pattern):
if not os.path.isdir(item):
_add_file(item)
for root, _, files in os.walk(item, followlinks=True):
for f in files:
_add_file(os.path.join(root, f))
return result
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
cmd = self.configure_command()
if cmd:
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
else:
if self.options.get("verbose"):
click.echo("Error: Couldn't configure command")
self._bad_input = True
self.clean_up()
return self._bad_input

View File

@@ -1,84 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from os.path import join
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
class ClangtidyCheckTool(CheckToolBase):
def tool_output_filter(self, line):
if not self.options.get("verbose") and "[clang-diagnostic-error]" in line:
return ""
if "[CommonOptionsParser]" in line:
self._bad_input = True
return line
if any(d in line for d in ("note: ", "error: ", "warning: ")):
return line
return ""
def parse_defect(self, raw_line):
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line)
if not match:
return raw_line
file_, line, column, category, message, defect_id = match.groups()
severity = DefectItem.SEVERITY_LOW
if category == "error":
severity = DefectItem.SEVERITY_HIGH
elif category == "warning":
severity = DefectItem.SEVERITY_MEDIUM
return DefectItem(severity, category, message, file_, line, column, defect_id)
def configure_command(self):
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")
cmd = [tool_path, "--quiet"]
flags = self.get_flags("clangtidy")
if not self.is_flag_set("--checks", flags):
cmd.append("--checks=*")
project_files = self.get_project_target_files(self.options["patterns"])
src_files = []
for scope in project_files:
src_files.extend(project_files[scope])
cmd.extend(flags)
cmd.extend(src_files)
cmd.append("--")
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines["c++"]]
)
includes = []
for inc in self.cpp_includes:
if self.options.get("skip_packages") and inc.lower().startswith(
self.config.get_optional_dir("packages").lower()
):
continue
includes.append(inc)
cmd.append("--extra-arg=" + self._long_includes_hook(includes))
return cmd

View File

@@ -1,241 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import click
from platformio import proc
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
class CppcheckCheckTool(CheckToolBase):
def __init__(self, *args, **kwargs):
self.defect_fields = [
"severity",
"message",
"file",
"line",
"column",
"callstack",
"cwe",
"id",
]
super(CppcheckCheckTool, self).__init__(*args, **kwargs)
def tool_output_filter(self, line):
if (
not self.options.get("verbose")
and "--suppress=unmatchedSuppression:" in line
):
return ""
if any(
msg in line
for msg in (
"No C or C++ source files found",
"unrecognized command line option",
)
):
self._bad_input = True
return line
def parse_defect(self, raw_line):
if "<&PIO&>" not in raw_line or any(
f not in raw_line for f in self.defect_fields
):
return None
args = dict()
for field in raw_line.split("<&PIO&>"):
field = field.strip().replace('"', "")
name, value = field.split("=", 1)
args[name] = value
args["category"] = args["severity"]
if args["severity"] == "error":
args["severity"] = DefectItem.SEVERITY_HIGH
elif args["severity"] == "warning":
args["severity"] = DefectItem.SEVERITY_MEDIUM
else:
args["severity"] = DefectItem.SEVERITY_LOW
# Skip defects found in third-party software, but keep in mind that such defects
# might break checking process so defects from project files are not reported
breaking_defect_ids = ("preprocessorErrorDirective", "syntaxError")
if (
args.get("file", "")
.lower()
.startswith(self.config.get_optional_dir("packages").lower())
):
if args["id"] in breaking_defect_ids:
if self.options.get("verbose"):
click.echo(
"Error: Found a breaking defect '%s' in %s:%s\n"
"Please note: check results might not be valid!\n"
"Try adding --skip-packages"
% (args.get("message"), args.get("file"), args.get("line"))
)
click.echo()
self._bad_input = True
return None
return DefectItem(**args)
def configure_command(
self, language, src_files
): # pylint: disable=arguments-differ
tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck")
cmd = [
tool_path,
"--error-exitcode=1",
"--verbose" if self.options.get("verbose") else "--quiet",
]
cmd.append(
'--template="%s"'
% "<&PIO&>".join(["{0}={{{0}}}".format(f) for f in self.defect_fields])
)
flags = self.get_flags("cppcheck")
if not flags:
# by default user can suppress reporting individual defects
# directly in code // cppcheck-suppress warningID
cmd.append("--inline-suppr")
if not self.is_flag_set("--platform", flags):
cmd.append("--platform=unspecified")
if not self.is_flag_set("--enable", flags):
enabled_checks = [
"warning",
"style",
"performance",
"portability",
"unusedFunction",
]
cmd.append("--enable=%s" % ",".join(enabled_checks))
if not self.is_flag_set("--language", flags):
cmd.append("--language=" + language)
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
for flag in build_flags:
if "-std" in flag:
# Standards with GNU extensions are not allowed
cmd.append("-" + flag.replace("gnu", "c"))
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
)
cmd.extend(flags)
cmd.extend(
"--include=" + inc
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
)
cmd.append("--file-list=%s" % self._generate_src_file(src_files))
cmd.append("--includes-file=%s" % self._generate_inc_file())
return cmd
@staticmethod
def get_forced_includes(build_flags, includes):
def _extract_filepath(flag, include_options, build_flags):
path = ""
for option in include_options:
if not flag.startswith(option):
continue
if flag.split(option)[1].strip():
path = flag.split(option)[1].strip()
elif build_flags.index(flag) + 1 < len(build_flags):
path = build_flags[build_flags.index(flag) + 1]
return path
def _search_include_dir(filepath, include_paths):
for inc_path in include_paths:
path = os.path.join(inc_path, filepath)
if os.path.isfile(path):
return path
return ""
result = []
include_options = ("-include", "-imacros")
for f in build_flags:
if f.startswith(include_options):
filepath = _extract_filepath(f, include_options, build_flags)
if not os.path.isabs(filepath):
filepath = _search_include_dir(filepath, includes)
if os.path.isfile(filepath):
result.append(filepath)
return result
def _generate_src_file(self, src_files):
return self._create_tmp_file("\n".join(src_files))
def _generate_inc_file(self):
result = []
for inc in self.cpp_includes:
if self.options.get("skip_packages") and inc.lower().startswith(
self.config.get_optional_dir("packages").lower()
):
continue
result.append(inc)
return self._create_tmp_file("\n".join(result))
def clean_up(self):
super(CppcheckCheckTool, self).clean_up()
# delete temporary dump files generated by addons
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
return
for files in self.get_project_target_files(self.options["patterns"]).values():
for f in files:
dump_file = f + ".dump"
if os.path.isfile(dump_file):
os.remove(dump_file)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
project_files = self.get_project_target_files(self.options["patterns"])
languages = ("c", "c++")
if not any([project_files[t] for t in languages]):
click.echo("Error: Nothing to check.")
return True
for language in languages:
if not project_files[language]:
continue
cmd = self.configure_command(language, project_files[language])
if not cmd:
self._bad_input = True
continue
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
self.clean_up()
return self._bad_input

View File

@@ -1,233 +0,0 @@
# Copyright (c) 2020-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
from xml.etree.ElementTree import fromstring
import click
from platformio import proc, util
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes
def __init__(self, *args, **kwargs):
self._tmp_dir = tempfile.mkdtemp(prefix="piocheck")
self._tmp_preprocessed_file = self._generate_tmp_file_path() + ".i"
self._tmp_output_file = self._generate_tmp_file_path() + ".pvs"
self._tmp_cfg_file = self._generate_tmp_file_path() + ".cfg"
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
self.tool_path = os.path.join(
get_core_package_dir("tool-pvs-studio"),
"x64" if "windows" in util.get_systype() else "bin",
"pvs-studio",
)
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
with open(self._tmp_cfg_file, "w") as fp:
fp.write(
"exclude-path = "
+ self.config.get_optional_dir("packages").replace("\\", "/")
)
with open(self._tmp_cmd_file, "w") as fp:
fp.write(
" ".join(
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
)
)
def _process_defects(self, defects):
for defect in defects:
if not isinstance(defect, DefectItem):
return
if defect.severity not in self.options["severity"]:
return
self._defects.append(defect)
if self._on_defect_callback:
self._on_defect_callback(defect)
def _demangle_report(self, output_file):
converter_tool = os.path.join(
get_core_package_dir("tool-pvs-studio"),
"HtmlGenerator"
if "windows" in util.get_systype()
else os.path.join("bin", "plog-converter"),
)
cmd = (
converter_tool,
"-t",
"xml",
output_file,
"-m",
"cwe",
"-m",
"misra",
"-a",
# Enable all possible analyzers and defect levels
"GA:1,2,3;64:1,2,3;OP:1,2,3;CS:1,2,3;MISRA:1,2,3",
"--cerr",
)
result = proc.exec_command(cmd)
if result["returncode"] != 0:
click.echo(result["err"])
self._bad_input = True
return result["err"]
def parse_defects(self, output_file):
defects = []
report = self._demangle_report(output_file)
if not report:
self._bad_input = True
return []
try:
defects_data = fromstring(report)
except: # pylint: disable=bare-except
click.echo("Error: Couldn't decode generated report!")
self._bad_input = True
return []
for table in defects_data.iter("PVS-Studio_Analysis_Log"):
message = table.find("Message").text
category = table.find("ErrorType").text
line = table.find("Line").text
file_ = table.find("File").text
defect_id = table.find("ErrorCode").text
cwe = table.find("CWECode")
cwe_id = None
if cwe is not None:
cwe_id = cwe.text.lower().replace("cwe-", "")
misra = table.find("MISRA")
if misra is not None:
message += " [%s]" % misra.text
severity = DefectItem.SEVERITY_LOW
if category == "error":
severity = DefectItem.SEVERITY_HIGH
elif category == "warning":
severity = DefectItem.SEVERITY_MEDIUM
defects.append(
DefectItem(
severity, category, message, file_, line, id=defect_id, cwe=cwe_id
)
)
return defects
def configure_command(self, src_file): # pylint: disable=arguments-differ
if os.path.isfile(self._tmp_output_file):
os.remove(self._tmp_output_file)
if not os.path.isfile(self._tmp_preprocessed_file):
click.echo("Error: Missing preprocessed file for '%s'" % src_file)
return ""
cmd = [
self.tool_path,
"--skip-cl-exe",
"yes",
"--language",
"C" if src_file.endswith(".c") else "C++",
"--preprocessor",
"gcc",
"--cfg",
self._tmp_cfg_file,
"--source-file",
src_file,
"--i-file",
self._tmp_preprocessed_file,
"--output-file",
self._tmp_output_file,
]
flags = self.get_flags("pvs-studio")
if not self.is_flag_set("--platform", flags):
cmd.append("--platform=arm")
cmd.extend(flags)
return cmd
def _generate_tmp_file_path(self):
# pylint: disable=protected-access
return os.path.join(self._tmp_dir, next(tempfile._get_candidate_names()))
def _prepare_preprocessed_file(self, src_file):
if os.path.isfile(self._tmp_preprocessed_file):
os.remove(self._tmp_preprocessed_file)
flags = self.cxx_flags
compiler = self.cxx_path
if src_file.endswith(".c"):
flags = self.cc_flags
compiler = self.cc_path
cmd = [compiler, src_file, "-E", "-o", self._tmp_preprocessed_file]
cmd.extend([f for f in flags if f])
cmd.extend(["-D%s" % d for d in self.cpp_defines])
cmd.append('@"%s"' % self._tmp_cmd_file)
# Explicitly specify C++ as the language used in .ino files
if src_file.endswith(".ino"):
cmd.insert(1, "-xc++")
result = proc.exec_command(" ".join(cmd), shell=True)
if result["returncode"] != 0 or result["err"]:
if self.options.get("verbose"):
click.echo(" ".join(cmd))
click.echo(result["err"])
self._bad_input = True
def clean_up(self):
super(PvsStudioCheckTool, self).clean_up()
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
for scope, files in self.get_project_target_files(
self.options["patterns"]
).items():
if scope not in ("c", "c++"):
continue
for src_file in files:
self._prepare_preprocessed_file(src_file)
cmd = self.configure_command(src_file)
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if not cmd:
self._bad_input = True
continue
result = proc.exec_command(cmd)
# pylint: disable=unsupported-membership-test
if result["returncode"] != 0 or "license" in result["err"].lower():
self._bad_input = True
click.echo(result["err"])
continue
self._process_defects(self.parse_defects(self._tmp_output_file))
self.clean_up()
return self._bad_input

View File

@@ -14,16 +14,16 @@
from glob import glob
from os import getenv, makedirs, remove
from os.path import basename, isdir, isfile, join, realpath
from os.path import abspath, basename, expanduser, isdir, isfile, join
from shutil import copyfile, copytree
from tempfile import mkdtemp
import click
from platformio import app, fs
from platformio.commands.project import project_init as cmd_project_init
from platformio.commands.project import validate_boards
from platformio.commands.run.command import cli as cmd_run
from platformio.commands.init import cli as cmd_init
from platformio.commands.init import validate_boards
from platformio.commands.run import cli as cmd_run
from platformio.compat import glob_escape
from platformio.exception import CIBuildEnvsEmpty
from platformio.project.config import ProjectConfig
@@ -34,8 +34,8 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
value = list(value)
for i, p in enumerate(value):
if p.startswith("~"):
value[i] = fs.expanduser(p)
value[i] = realpath(value[i])
value[i] = expanduser(p)
value[i] = abspath(value[i])
if not glob(value[i]):
invalid_path = p
break
@@ -48,37 +48,37 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
@click.command("ci", short_help="Continuous Integration")
@click.argument("src", nargs=-1, callback=validate_path)
@click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
@click.option("-l",
"--lib",
multiple=True,
callback=validate_path,
metavar="DIRECTORY")
@click.option("--exclude", multiple=True)
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option(
"--build-dir",
default=mkdtemp,
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
@click.option("-b",
"--board",
multiple=True,
metavar="ID",
callback=validate_boards)
@click.option("--build-dir",
default=mkdtemp,
type=click.Path(file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("--keep-build-dir", is_flag=True)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.option("-O", "--project-option", multiple=True)
@click.option("-v", "--verbose", is_flag=True)
@click.pass_context
def cli( # pylint: disable=too-many-arguments, too-many-branches
ctx,
src,
lib,
exclude,
board,
build_dir,
keep_build_dir,
project_conf,
project_option,
verbose,
):
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
project_option, verbose):
if not src and getenv("PLATFORMIO_CI_SRC"):
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
@@ -110,12 +110,10 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
_exclude_contents(build_dir, exclude)
# initialise project
ctx.invoke(
cmd_project_init,
project_dir=build_dir,
board=board,
project_option=project_option,
)
ctx.invoke(cmd_init,
project_dir=build_dir,
board=board,
project_option=project_option)
# process project
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
@@ -129,27 +127,27 @@ def _copy_contents(dst_dir, contents):
for path in contents:
if isdir(path):
items["dirs"].add(path)
items['dirs'].add(path)
elif isfile(path):
items["files"].add(path)
items['files'].add(path)
dst_dir_name = basename(dst_dir)
if dst_dir_name == "src" and len(items["dirs"]) == 1:
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
if dst_dir_name == "src" and len(items['dirs']) == 1:
copytree(list(items['dirs']).pop(), dst_dir, symlinks=True)
else:
if not isdir(dst_dir):
makedirs(dst_dir)
for d in items["dirs"]:
for d in items['dirs']:
copytree(d, join(dst_dir, basename(d)), symlinks=True)
if not items["files"]:
if not items['files']:
return
if dst_dir_name == "lib":
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
for f in items["files"]:
for f in items['files']:
dst_file = join(dst_dir, basename(f))
if f == dst_file:
continue
@@ -161,7 +159,7 @@ def _exclude_contents(dst_dir, patterns):
for p in patterns:
contents += glob(join(glob_escape(dst_dir), p))
for path in contents:
path = realpath(path)
path = abspath(path)
if isdir(path):
fs.rmtree(path)
elif isfile(path):

View File

@@ -11,3 +11,5 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.debug.command import cli

View File

@@ -12,28 +12,29 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import re
import signal
import time
from hashlib import sha1
from os.path import basename, dirname, isdir, join, realpath, splitext
from os.path import abspath, basename, dirname, isdir, join, splitext
from tempfile import mkdtemp
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import protocol # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import stdio # pylint: disable=import-error
from twisted.internet import task # pylint: disable=import-error
from platformio import app, fs, proc, telemetry, util
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.initcfgs import get_gdb_init_config
from platformio.commands.debug.process.base import BaseProcess
from platformio.commands.debug.process.server import DebugServer
from platformio.compat import hashlib_encode_data, is_bytes
from platformio import app, exception, fs, proc, util
from platformio.commands.debug import helpers, initcfgs
from platformio.commands.debug.process import BaseProcess
from platformio.commands.debug.server import DebugServer
from platformio.compat import hashlib_encode_data
from platformio.project.helpers import get_project_cache_dir
from platformio.telemetry import MeasurementProtocol
LOG_FILE = None
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
@@ -42,7 +43,6 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
def __init__(self, project_dir, args, debug_options, env_options):
super(GDBClient, self).__init__()
self.project_dir = project_dir
self.args = list(args)
self.debug_options = debug_options
@@ -53,13 +53,13 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
if not isdir(get_project_cache_dir()):
os.makedirs(get_project_cache_dir())
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(),
prefix=".piodebug-")
self._target_is_run = False
self._last_server_activity = 0
self._auto_continue_timer = None
self._errors_buffer = b""
@defer.inlineCallbacks
def spawn(self, gdb_path, prog_path):
session_hash = gdb_path + prog_path
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
@@ -70,84 +70,95 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
"PROG_PATH": prog_path,
"PROG_DIR": dirname(prog_path),
"PROG_NAME": basename(splitext(prog_path)[0]),
"DEBUG_PORT": self.debug_options["port"],
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
"INIT_BREAK": self.debug_options["init_break"] or "",
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
"DEBUG_PORT": self.debug_options['port'],
"UPLOAD_PROTOCOL": self.debug_options['upload_protocol'],
"INIT_BREAK": self.debug_options['init_break'] or "",
"LOAD_CMDS": "\n".join(self.debug_options['load_cmds'] or []),
}
yield self._debug_server.spawn(patterns)
if not patterns["DEBUG_PORT"]:
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
self._debug_server.spawn(patterns)
if not patterns['DEBUG_PORT']:
patterns['DEBUG_PORT'] = self._debug_server.get_debug_port()
self.generate_pioinit(self._gdbsrc_dir, patterns)
# start GDB client
args = [
"piogdb",
"-q",
"--directory",
self._gdbsrc_dir,
"--directory",
self.project_dir,
"-l",
"10",
]
"--directory", self._gdbsrc_dir,
"--directory", self.project_dir,
"-l", "10"
] # yapf: disable
args.extend(self.args)
if not gdb_path:
raise DebugInvalidOptionsError("GDB client is not configured")
raise exception.DebugInvalidOptions("GDB client is not configured")
gdb_data_dir = self._get_data_dir(gdb_path)
if gdb_data_dir:
args.extend(["--data-directory", gdb_data_dir])
args.append(patterns["PROG_PATH"])
args.append(patterns['PROG_PATH'])
transport = reactor.spawnProcess(
self, gdb_path, args, path=self.project_dir, env=os.environ
)
defer.returnValue(transport)
return reactor.spawnProcess(self,
gdb_path,
args,
path=self.project_dir,
env=os.environ)
@staticmethod
def _get_data_dir(gdb_path):
if "msp430" in gdb_path:
return None
gdb_data_dir = realpath(join(dirname(gdb_path), "..", "share", "gdb"))
gdb_data_dir = abspath(join(dirname(gdb_path), "..", "share", "gdb"))
return gdb_data_dir if isdir(gdb_data_dir) else None
def generate_pioinit(self, dst_dir, patterns):
# default GDB init commands depending on debug tool
commands = get_gdb_init_config(self.debug_options).split("\n")
server_exe = (self.debug_options.get("server")
or {}).get("executable", "").lower()
if "jlink" in server_exe:
cfg = initcfgs.GDB_JLINK_INIT_CONFIG
elif "st-util" in server_exe:
cfg = initcfgs.GDB_STUTIL_INIT_CONFIG
elif "mspdebug" in server_exe:
cfg = initcfgs.GDB_MSPDEBUG_INIT_CONFIG
elif "qemu" in server_exe:
cfg = initcfgs.GDB_QEMU_INIT_CONFIG
elif self.debug_options['require_debug_port']:
cfg = initcfgs.GDB_BLACKMAGIC_INIT_CONFIG
else:
cfg = initcfgs.GDB_DEFAULT_INIT_CONFIG
commands = cfg.split("\n")
if self.debug_options["init_cmds"]:
commands = self.debug_options["init_cmds"]
commands.extend(self.debug_options["extra_cmds"])
if self.debug_options['init_cmds']:
commands = self.debug_options['init_cmds']
commands.extend(self.debug_options['extra_cmds'])
if not any("define pio_reset_run_target" in cmd for cmd in commands):
if not any("define pio_reset_target" in cmd for cmd in commands):
commands = [
"define pio_reset_run_target",
" echo Warning! Undefined pio_reset_run_target command\\n",
" monitor reset",
"end",
] + commands
"define pio_reset_target",
" echo Warning! Undefined pio_reset_target command\\n",
" mon reset",
"end"
] + commands # yapf: disable
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
commands = [
"define pio_reset_halt_target",
" echo Warning! Undefined pio_reset_halt_target command\\n",
" monitor reset halt",
"end",
] + commands
" mon reset halt",
"end"
] + commands # yapf: disable
if not any("define pio_restart_target" in cmd for cmd in commands):
commands += [
"define pio_restart_target",
" pio_reset_halt_target",
" $INIT_BREAK",
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
"end",
]
" %s" % ("continue" if patterns['INIT_BREAK'] else "next"),
"end"
] # yapf: disable
banner = [
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
"echo PlatformIO: Initializing remote target...\\n",
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options['tool'],
"echo PlatformIO: Initializing remote target...\\n"
]
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
commands = banner + commands + footer
@@ -163,7 +174,12 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
stdio.StandardIO(p)
def onStdInData(self, data):
super(GDBClient, self).onStdInData(data)
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
self._last_server_activity = time.time()
if b"-exec-run" in data:
if self._target_is_run:
token, _ = data.split(b"-", 1)
@@ -176,7 +192,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)
self.transport.write(b"pio_reset_run_target\n")
self.transport.write(b"pio_reset_target\n")
self.transport.write(data)
def processEnded(self, reason): # pylint: disable=unused-argument
@@ -189,14 +205,17 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
reactor.stop()
def outReceived(self, data):
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
self._last_server_activity = time.time()
super(GDBClient, self).outReceived(data)
self._handle_error(data)
# go to init break automatically
if self.INIT_COMPLETED_BANNER.encode() in data:
telemetry.send_event(
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
)
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
self._auto_continue_timer = task.LoopingCall(
self._auto_exec_continue)
self._auto_continue_timer.start(0.1)
def errReceived(self, data):
@@ -204,49 +223,43 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
self._handle_error(data)
def console_log(self, msg):
if helpers.is_gdbmi_mode():
msg = helpers.escape_gdbmi_stream("~", msg)
self.outReceived(msg if is_bytes(msg) else msg.encode())
if helpers.is_mi_mode(self.args):
self.outReceived(('~"%s\\n"\n' % msg).encode())
else:
self.outReceived(("%s\n" % msg).encode())
def _auto_exec_continue(self):
auto_exec_delay = 0.5 # in seconds
if self._last_activity > (time.time() - auto_exec_delay):
if self._last_server_activity > (time.time() - auto_exec_delay):
return
if self._auto_continue_timer:
self._auto_continue_timer.stop()
self._auto_continue_timer = None
if not self.debug_options["init_break"] or self._target_is_run:
if not self.debug_options['init_break'] or self._target_is_run:
return
self.console_log(
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
% self.debug_options["init_break"]
)
self.console_log(
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
)
self.transport.write(
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
)
"PlatformIO: Resume the execution to `debug_init_break = %s`" %
self.debug_options['init_break'])
self.console_log("PlatformIO: More configuration options -> "
"http://bit.ly/pio-debug")
self.transport.write(b"0-exec-continue\n" if helpers.
is_mi_mode(self.args) else b"continue\n")
self._target_is_run = True
def _handle_error(self, data):
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
if not (
self.PIO_SRC_NAME.encode() in self._errors_buffer
and b"Error in sourced" in self._errors_buffer
):
if (self.PIO_SRC_NAME.encode() not in data
or b"Error in sourced" not in data):
return
last_erros = self._errors_buffer.decode()
last_erros = " ".join(reversed(last_erros.split("\n")))
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
err = "%s -> %s" % (
telemetry.dump_run_environment(self.env_options),
last_erros,
)
telemetry.send_exception("DebugInitError: %s" % err)
configuration = {"debug": self.debug_options, "env": self.env_options}
exd = re.sub(r'\\(?!")', "/", json.dumps(configuration))
exd = re.sub(r'"(?:[a-z]\:)?((/[^"/]+)+)"',
lambda m: '"%s"' % join(*m.group(1).split("/")[-2:]), exd,
re.I | re.M)
mp = MeasurementProtocol()
mp['exd'] = "DebugGDBPioInitError: %s" % exd
mp['exf'] = 1
mp.send("exception")
self.transport.loseConnection()
def _kill_previous_session(self):

View File

@@ -17,47 +17,43 @@
import os
import signal
from os.path import isfile
from os.path import isfile, join
import click
from platformio import app, exception, fs, proc, util
from platformio import exception, fs, proc, util
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.managers.core import inject_contrib_pysite
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectEnvsNotAvailableError
from platformio.project.helpers import is_platformio_project, load_project_ide_data
from platformio.project.helpers import (is_platformio_project,
load_project_ide_data)
@click.command(
"debug",
context_settings=dict(ignore_unknown_options=True),
short_help="PIO Unified Debugger",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.command("debug",
context_settings=dict(ignore_unknown_options=True),
short_help="PIO Unified Debugger")
@click.option("-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.option("--environment", "-e", metavar="<environment>")
@click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
app.set_session_var("custom_project_conf", project_conf)
def cli(ctx, project_dir, project_conf, environment, verbose, interface,
__unprocessed):
# use env variables from Eclipse or CLion
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir):
@@ -66,92 +62,88 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
project_dir = os.getenv(sysenv)
with fs.cd(project_dir):
config = ProjectConfig.get_instance(project_conf)
config = ProjectConfig.get_instance(
project_conf or join(project_dir, "platformio.ini"))
config.validate(envs=[environment] if environment else None)
env_name = environment or helpers.get_default_debug_env(config)
env_options = config.items(env=env_name, as_dict=True)
if not set(env_options.keys()) >= set(["platform", "board"]):
raise ProjectEnvsNotAvailableError()
raise exception.ProjectEnvsNotAvailable()
debug_options = helpers.validate_debug_options(ctx, env_options)
assert debug_options
if not interface:
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
return helpers.predebug_project(ctx, project_dir, env_name, False,
verbose)
configuration = load_project_ide_data(project_dir, env_name)
if not configuration:
raise DebugInvalidOptionsError("Could not load debug configuration")
raise exception.DebugInvalidOptions(
"Could not load debug configuration")
if "--version" in __unprocessed:
result = proc.exec_command([configuration["gdb_path"], "--version"])
if result["returncode"] == 0:
return click.echo(result["out"])
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
result = proc.exec_command([configuration['gdb_path'], "--version"])
if result['returncode'] == 0:
return click.echo(result['out'])
raise exception.PlatformioException("\n".join(
[result['out'], result['err']]))
try:
fs.ensure_udev_rules()
except exception.InvalidUdevRules as e:
click.echo(
helpers.escape_gdbmi_stream("~", str(e) + "\n")
if helpers.is_gdbmi_mode()
else str(e) + "\n",
nl=False,
)
for line in str(e).split("\n") + [""]:
click.echo(
('~"%s\\n"' if helpers.is_mi_mode(__unprocessed) else "%s") %
line)
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
debug_options, configuration
)
debug_options['load_cmds'] = helpers.configure_esp32_load_cmds(
debug_options, configuration)
rebuild_prog = False
preload = debug_options["load_cmds"] == ["preload"]
load_mode = debug_options["load_mode"]
preload = debug_options['load_cmds'] == ["preload"]
load_mode = debug_options['load_mode']
if load_mode == "always":
rebuild_prog = preload or not helpers.has_debug_symbols(
configuration["prog_path"]
)
rebuild_prog = (
preload
or not helpers.has_debug_symbols(configuration['prog_path']))
elif load_mode == "modified":
rebuild_prog = helpers.is_prog_obsolete(
configuration["prog_path"]
) or not helpers.has_debug_symbols(configuration["prog_path"])
rebuild_prog = (
helpers.is_prog_obsolete(configuration['prog_path'])
or not helpers.has_debug_symbols(configuration['prog_path']))
else:
rebuild_prog = not isfile(configuration["prog_path"])
rebuild_prog = not isfile(configuration['prog_path'])
if preload or (not rebuild_prog and load_mode != "always"):
# don't load firmware through debug server
debug_options["load_cmds"] = []
debug_options['load_cmds'] = []
if rebuild_prog:
if helpers.is_gdbmi_mode():
click.echo(
helpers.escape_gdbmi_stream(
"~", "Preparing firmware for debugging...\n"
),
nl=False,
)
stream = helpers.GDBMIConsoleStream()
with util.capture_std_streams(stream):
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
stream.close()
if helpers.is_mi_mode(__unprocessed):
click.echo('~"Preparing firmware for debugging...\\n"')
output = helpers.GDBBytesIO()
with util.capture_std_streams(output):
helpers.predebug_project(ctx, project_dir, env_name, preload,
verbose)
output.close()
else:
click.echo("Preparing firmware for debugging...")
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
helpers.predebug_project(ctx, project_dir, env_name, preload,
verbose)
# save SHA sum of newly created prog
if load_mode == "modified":
helpers.is_prog_obsolete(configuration["prog_path"])
helpers.is_prog_obsolete(configuration['prog_path'])
if not isfile(configuration["prog_path"]):
raise DebugInvalidOptionsError("Program/firmware is missed")
if not isfile(configuration['prog_path']):
raise exception.DebugInvalidOptions("Program/firmware is missed")
# run debugging client
inject_contrib_pysite()
# pylint: disable=import-outside-toplevel
from platformio.commands.debug.process.client import GDBClient, reactor
from platformio.commands.debug.client import GDBClient, reactor
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
client.spawn(configuration["gdb_path"], configuration["prog_path"])
client.spawn(configuration['gdb_path'], configuration['prog_path'])
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
reactor.run()

View File

@@ -1,33 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.exception import PlatformioException, UserSideException
class DebugError(PlatformioException):
pass
class DebugSupportError(DebugError, UserSideException):
MESSAGE = (
"Currently, PlatformIO does not support debugging for `{0}`.\n"
"Please request support at https://github.com/platformio/"
"platformio-core/issues \nor visit -> https://docs.platformio.org"
"/page/plus/debugging.html"
)
class DebugInvalidOptionsError(DebugError, UserSideException):
pass

View File

@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
@@ -21,47 +20,28 @@ from io import BytesIO
from os.path import isfile
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import is_bytes
from platformio.commands.platform import \
platform_install as cmd_platform_install
from platformio.commands.run import cli as cmd_run
from platformio.managers.platform import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.options import ProjectOptions
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
class GDBBytesIO(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
if "\n" in text:
for line in text.strip().split("\n"):
self.STDOUT.write('~"%s\\n"\n' % line)
else:
self.STDOUT.write('~"%s"' % text)
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def is_mi_mode(args):
return "--interpreter" in " ".join(args)
def get_default_debug_env(config):
@@ -77,41 +57,41 @@ def get_default_debug_env(config):
def predebug_project(ctx, project_dir, env_name, preload, verbose):
ctx.invoke(
cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose,
)
ctx.invoke(cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose)
if preload:
time.sleep(5)
def validate_debug_options(cmd_ctx, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
return [
"$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items
]
try:
platform = PlatformFactory.newPlatform(env_options["platform"])
platform = PlatformFactory.newPlatform(env_options['platform'])
except exception.UnknownPlatform:
cmd_ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.newPlatform(env_options["platform"])
cmd_ctx.invoke(cmd_platform_install,
platforms=[env_options['platform']],
skip_default_package=True)
platform = PlatformFactory.newPlatform(env_options['platform'])
board_config = platform.board_config(env_options["board"])
board_config = platform.board_config(env_options['board'])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
tool_settings = board_config.get("debug", {}).get("tools",
{}).get(tool_name, {})
server_options = None
# specific server per a system
if isinstance(tool_settings.get("server", {}), list):
for item in tool_settings["server"][:]:
tool_settings["server"] = item
for item in tool_settings['server'][:]:
tool_settings['server'] = item
if util.get_systype() in item.get("system", []):
break
@@ -120,101 +100,76 @@ def validate_debug_options(cmd_ctx, env_options):
server_options = {
"cwd": None,
"executable": None,
"arguments": env_options.get("debug_server"),
"arguments": env_options.get("debug_server")
}
server_options["executable"] = server_options["arguments"][0]
server_options["arguments"] = server_options["arguments"][1:]
server_options['executable'] = server_options['arguments'][0]
server_options['arguments'] = server_options['arguments'][1:]
elif "server" in tool_settings:
server_options = tool_settings["server"]
server_package = server_options.get("package")
server_package_dir = (
platform.get_package_dir(server_package) if server_package else None
)
server_package = tool_settings['server'].get("package")
server_package_dir = platform.get_package_dir(
server_package) if server_package else None
if server_package and not server_package_dir:
platform.install_packages(
with_packages=[server_package], skip_default_package=True, silent=True
)
platform.install_packages(with_packages=[server_package],
skip_default_package=True,
silent=True)
server_package_dir = platform.get_package_dir(server_package)
server_options.update(
dict(
cwd=server_package_dir if server_package else None,
executable=server_options.get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
for a in server_options.get("arguments", [])
],
)
)
server_options = dict(
cwd=server_package_dir if server_package else None,
executable=tool_settings['server'].get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir else a
for a in tool_settings['server'].get("arguments", [])
])
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
result = dict(
tool=tool_name,
upload_protocol=env_options.get(
"upload_protocol", board_config.get("upload", {}).get("protocol")
),
"upload_protocol",
board_config.get("upload", {}).get("protocol")),
load_cmds=_cleanup_cmds(
env_options.get(
"debug_load_cmds",
tool_settings.get(
"load_cmds",
tool_settings.get(
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
),
),
)
),
load_mode=env_options.get(
"debug_load_mode",
tool_settings.get(
"load_mode", ProjectOptions["env.debug_load_mode"].default
),
),
tool_settings.get("load_cmds",
tool_settings.get("load_cmd", "load")))),
load_mode=env_options.get("debug_load_mode",
tool_settings.get("load_mode", "always")),
init_break=env_options.get(
"debug_init_break",
tool_settings.get(
"init_break", ProjectOptions["env.debug_init_break"].default
),
),
"debug_init_break", tool_settings.get("init_break",
"tbreak main")),
init_cmds=_cleanup_cmds(
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
),
env_options.get("debug_init_cmds",
tool_settings.get("init_cmds"))),
extra_cmds=extra_cmds,
require_debug_port=tool_settings.get("require_debug_port", False),
port=reveal_debug_port(
env_options.get("debug_port", tool_settings.get("port")),
tool_name,
tool_settings,
),
server=server_options,
)
tool_name, tool_settings),
server=server_options)
return result
def configure_esp32_load_cmds(debug_options, configuration):
ignore_conds = [
debug_options["load_cmds"] != ["load"],
debug_options['load_cmds'] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not configuration.get("flash_extra_images"),
not all(
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
),
not configuration.get("flash_extra_images"), not all([
isfile(item['path'])
for item in configuration.get("flash_extra_images")
])
]
if any(ignore_conds):
return debug_options["load_cmds"]
return debug_options['load_cmds']
mon_cmds = [
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
path=fs.to_unix_path(item['path']), offset=item['offset'])
for item in configuration.get("flash_extra_images")
]
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
% fs.to_unix_path(configuration["prog_path"][:-4])
)
mon_cmds.append('monitor program_esp32 "{%s.bin}" 0x10000 verify' %
fs.to_unix_path(configuration['prog_path'][:-4]))
return mon_cmds
@@ -226,7 +181,7 @@ def has_debug_symbols(prog_path):
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
b"__PLATFORMIO_BUILD_DEBUG__": False,
b"__PLATFORMIO_BUILD_DEBUG__": False
}
with open(prog_path, "rb") as fp:
last_data = b""
@@ -257,7 +212,7 @@ def is_prog_obsolete(prog_path):
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path) as fp:
with open(prog_hash_path, "r") as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
@@ -267,6 +222,7 @@ def is_prog_obsolete(prog_path):
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
@@ -282,21 +238,18 @@ def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
if not _is_match_pattern(item['port']):
continue
port = item["port"]
port = item['port']
if tool_name.startswith("blackmagic"):
if (
"windows" in util.get_systype()
and port.startswith("COM")
and len(port) > 4
):
if "windows" in util.get_systype() and \
port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
if "GDB" in item['description']:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
if hwid_str in item['hwid']:
return port
return None
@@ -307,5 +260,6 @@ def reveal_debug_port(env_debug_port, tool_name, tool_settings):
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
raise exception.DebugInvalidOptions(
"Please specify `debug_port` for environment")
return debug_port

View File

@@ -17,51 +17,49 @@ define pio_reset_halt_target
monitor reset halt
end
define pio_reset_run_target
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor init
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
pio_reset_halt_target
$LOAD_CMDS
monitor init
pio_reset_halt_target
"""
GDB_STUTIL_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
monitor reset
end
define pio_reset_run_target
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
$INIT_BREAK
pio_reset_halt_target
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_JLINK_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
monitor reset
end
define pio_reset_run_target
monitor clrbp
define pio_reset_target
monitor reset
monitor go
end
target extended-remote $DEBUG_PORT
monitor clrbp
monitor speed auto
$INIT_BREAK
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_BLACKMAGIC_INIT_CONFIG = """
@@ -75,7 +73,7 @@ define pio_reset_halt_target
set language auto
end
define pio_reset_run_target
define pio_reset_target
pio_reset_halt_target
end
@@ -83,8 +81,8 @@ target extended-remote $DEBUG_PORT
monitor swdp_scan
attach 1
set mem inaccessible-by-default off
$LOAD_CMDS
$INIT_BREAK
$LOAD_CMDS
set language c
set *0xE000ED0C = 0x05FA0004
@@ -99,14 +97,14 @@ GDB_MSPDEBUG_INIT_CONFIG = """
define pio_reset_halt_target
end
define pio_reset_run_target
define pio_reset_target
end
target extended-remote $DEBUG_PORT
$INIT_BREAK
monitor erase
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_QEMU_INIT_CONFIG = """
@@ -114,48 +112,12 @@ define pio_reset_halt_target
monitor system_reset
end
define pio_reset_run_target
monitor system_reset
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_RENODE_INIT_CONFIG = """
define pio_reset_halt_target
monitor machine Reset
$LOAD_CMDS
monitor start
end
define pio_reset_run_target
define pio_reset_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
$INIT_BREAK
monitor start
$LOAD_CMDS
pio_reset_halt_target
"""
TOOL_TO_CONFIG = {
"jlink": GDB_JLINK_INIT_CONFIG,
"mspdebug": GDB_MSPDEBUG_INIT_CONFIG,
"qemu": GDB_QEMU_INIT_CONFIG,
"blackmagic": GDB_BLACKMAGIC_INIT_CONFIG,
"renode": GDB_RENODE_INIT_CONFIG,
}
def get_gdb_init_config(debug_options):
tool = debug_options.get("tool")
if tool and tool in TOOL_TO_CONFIG:
return TOOL_TO_CONFIG[tool]
server_exe = (debug_options.get("server") or {}).get("executable", "").lower()
if "st-util" in server_exe:
return GDB_STUTIL_INIT_CONFIG
return GDB_DEFAULT_INIT_CONFIG

View File

@@ -13,7 +13,6 @@
# limitations under the License.
import signal
import time
import click
from twisted.internet import protocol # pylint: disable=import-error
@@ -23,21 +22,19 @@ from platformio.compat import string_types
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_core_dir
LOG_FILE = None
class BaseProcess(protocol.ProcessProtocol, object):
STDOUT_CHUNK_SIZE = 2048
LOG_FILE = None
COMMON_PATTERNS = {
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
"PYTHONEXE": get_pythonexe_path(),
"PYTHONEXE": get_pythonexe_path()
}
def __init__(self):
self._last_activity = 0
def apply_patterns(self, source, patterns=None):
_patterns = self.COMMON_PATTERNS.copy()
_patterns.update(patterns or {})
@@ -55,7 +52,8 @@ class BaseProcess(protocol.ProcessProtocol, object):
if isinstance(source, string_types):
source = _replace(source)
elif isinstance(source, (list, dict)):
items = enumerate(source) if isinstance(source, list) else source.items()
items = enumerate(source) if isinstance(source,
list) else source.items()
for key, value in items:
if isinstance(value, string_types):
source[key] = _replace(value)
@@ -64,30 +62,23 @@ class BaseProcess(protocol.ProcessProtocol, object):
return source
def onStdInData(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
def outReceived(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
while data:
chunk = data[: self.STDOUT_CHUNK_SIZE]
chunk = data[:self.STDOUT_CHUNK_SIZE]
click.echo(chunk, nl=False)
data = data[self.STDOUT_CHUNK_SIZE :]
data = data[self.STDOUT_CHUNK_SIZE:]
def errReceived(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
@staticmethod
def errReceived(data):
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
click.echo(data, nl=False, err=True)
def processEnded(self, _):
self._last_activity = time.time()
@staticmethod
def processEnded(_):
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,166 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
from os.path import isdir, isfile, join
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from platformio import fs, util
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
from platformio.commands.debug.process.base import BaseProcess
from platformio.proc import where_is_program
class DebugServer(BaseProcess):
def __init__(self, debug_options, env_options):
super(DebugServer, self).__init__()
self.debug_options = debug_options
self.env_options = env_options
self._debug_port = ":3333"
self._transport = None
self._process_ended = False
self._ready = False
@defer.inlineCallbacks
def spawn(self, patterns): # pylint: disable=too-many-branches
systype = util.get_systype()
server = self.debug_options.get("server")
if not server:
defer.returnValue(None)
server = self.apply_patterns(server, patterns)
server_executable = server["executable"]
if not server_executable:
defer.returnValue(None)
if server["cwd"]:
server_executable = join(server["cwd"], server_executable)
if (
"windows" in systype
and not server_executable.endswith(".exe")
and isfile(server_executable + ".exe")
):
server_executable = server_executable + ".exe"
if not isfile(server_executable):
server_executable = where_is_program(server_executable)
if not isfile(server_executable):
raise DebugInvalidOptionsError(
"\nCould not launch Debug Server '%s'. Please check that it "
"is installed and is included in a system PATH\n\n"
"See documentation or contact contact@platformio.org:\n"
"https://docs.platformio.org/page/plus/debugging.html\n"
% server_executable
)
openocd_pipe_allowed = all(
[not self.debug_options["port"], "openocd" in server_executable]
)
if openocd_pipe_allowed:
args = []
if server["cwd"]:
args.extend(["-s", server["cwd"]])
args.extend(
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
)
args.extend(server["arguments"])
str_args = " ".join(
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
)
self._debug_port = '| "%s" %s' % (server_executable, str_args)
self._debug_port = fs.to_unix_path(self._debug_port)
defer.returnValue(self._debug_port)
env = os.environ.copy()
# prepend server "lib" folder to LD path
if (
"windows" not in systype
and server["cwd"]
and isdir(join(server["cwd"], "lib"))
):
ld_key = "DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
env[ld_key] = join(server["cwd"], "lib")
if os.environ.get(ld_key):
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
# prepend BIN to PATH
if server["cwd"] and isdir(join(server["cwd"], "bin")):
env["PATH"] = "%s%s%s" % (
join(server["cwd"], "bin"),
os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")),
)
self._transport = reactor.spawnProcess(
self,
server_executable,
[server_executable] + server["arguments"],
path=server["cwd"],
env=env,
)
if "mspdebug" in server_executable.lower():
self._debug_port = ":2000"
elif "jlink" in server_executable.lower():
self._debug_port = ":2331"
elif "qemu" in server_executable.lower():
self._debug_port = ":1234"
yield self._wait_until_ready()
defer.returnValue(self._debug_port)
@defer.inlineCallbacks
def _wait_until_ready(self):
timeout = 10
elapsed = 0
delay = 0.5
auto_ready_delay = 0.5
while not self._ready and not self._process_ended and elapsed < timeout:
yield self.async_sleep(delay)
if not self.debug_options.get("server", {}).get("ready_pattern"):
self._ready = self._last_activity < (time.time() - auto_ready_delay)
elapsed += delay
@staticmethod
def async_sleep(secs):
d = defer.Deferred()
reactor.callLater(secs, d.callback, None)
return d
def get_debug_port(self):
return self._debug_port
def outReceived(self, data):
super(DebugServer, self).outReceived(
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
)
if self._ready:
return
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
if ready_pattern:
self._ready = ready_pattern.encode() in data
def processEnded(self, reason):
self._process_ended = True
super(DebugServer, self).processEnded(reason)
def terminate(self):
if self._process_ended or not self._transport:
return
try:
self._transport.signalProcess("KILL")
except: # pylint: disable=bare-except
pass

View File

@@ -0,0 +1,122 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from os.path import isdir, isfile, join
from twisted.internet import error # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.commands.debug.process import BaseProcess
from platformio.proc import where_is_program
class DebugServer(BaseProcess):
def __init__(self, debug_options, env_options):
self.debug_options = debug_options
self.env_options = env_options
self._debug_port = None
self._transport = None
self._process_ended = False
def spawn(self, patterns): # pylint: disable=too-many-branches
systype = util.get_systype()
server = self.debug_options.get("server")
if not server:
return None
server = self.apply_patterns(server, patterns)
server_executable = server['executable']
if not server_executable:
return None
if server['cwd']:
server_executable = join(server['cwd'], server_executable)
if ("windows" in systype and not server_executable.endswith(".exe")
and isfile(server_executable + ".exe")):
server_executable = server_executable + ".exe"
if not isfile(server_executable):
server_executable = where_is_program(server_executable)
if not isfile(server_executable):
raise exception.DebugInvalidOptions(
"\nCould not launch Debug Server '%s'. Please check that it "
"is installed and is included in a system PATH\n\n"
"See documentation or contact contact@platformio.org:\n"
"http://docs.platformio.org/page/plus/debugging.html\n" %
server_executable)
self._debug_port = ":3333"
openocd_pipe_allowed = all([
not self.debug_options['port'],
"openocd" in server_executable
]) # yapf: disable
if openocd_pipe_allowed:
args = []
if server['cwd']:
args.extend(["-s", server['cwd']])
args.extend([
"-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"
])
args.extend(server['arguments'])
str_args = " ".join(
[arg if arg.startswith("-") else '"%s"' % arg for arg in args])
self._debug_port = '| "%s" %s' % (server_executable, str_args)
self._debug_port = fs.to_unix_path(self._debug_port)
else:
env = os.environ.copy()
# prepend server "lib" folder to LD path
if ("windows" not in systype and server['cwd']
and isdir(join(server['cwd'], "lib"))):
ld_key = ("DYLD_LIBRARY_PATH"
if "darwin" in systype else "LD_LIBRARY_PATH")
env[ld_key] = join(server['cwd'], "lib")
if os.environ.get(ld_key):
env[ld_key] = "%s:%s" % (env[ld_key],
os.environ.get(ld_key))
# prepend BIN to PATH
if server['cwd'] and isdir(join(server['cwd'], "bin")):
env['PATH'] = "%s%s%s" % (
join(server['cwd'], "bin"), os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")))
self._transport = reactor.spawnProcess(
self,
server_executable, [server_executable] + server['arguments'],
path=server['cwd'],
env=env)
if "mspdebug" in server_executable.lower():
self._debug_port = ":2000"
elif "jlink" in server_executable.lower():
self._debug_port = ":2331"
elif "qemu" in server_executable.lower():
self._debug_port = ":1234"
return self._transport
def get_debug_port(self):
return self._debug_port
def processEnded(self, reason):
self._process_ended = True
super(DebugServer, self).processEnded(reason)
def terminate(self):
if self._process_ended or not self._transport:
return
try:
self._transport.signalProcess("KILL")
except (OSError, error.ProcessExitedAlready):
pass

View File

@@ -0,0 +1,221 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from fnmatch import fnmatch
from os import getcwd
from os.path import join
import click
from serial.tools import miniterm
from platformio import exception, util
from platformio.compat import dump_json_to_unicode
from platformio.project.config import ProjectConfig
@click.group(short_help="Monitor device or list existing")
def cli():
pass
@cli.command("list", short_help="List devices")
@click.option("--serial", is_flag=True, help="List serial ports, default")
@click.option("--logical", is_flag=True, help="List logical devices")
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
@click.option("--json-output", is_flag=True)
def device_list( # pylint: disable=too-many-branches
serial, logical, mdns, json_output):
if not logical and not mdns:
serial = True
data = {}
if serial:
data['serial'] = util.get_serial_ports()
if logical:
data['logical'] = util.get_logical_devices()
if mdns:
data['mdns'] = util.get_mdns_services()
single_key = list(data)[0] if len(list(data)) == 1 else None
if json_output:
return click.echo(
dump_json_to_unicode(data[single_key] if single_key else data))
titles = {
"serial": "Serial Ports",
"logical": "Logical Devices",
"mdns": "Multicast DNS Services"
}
for key, value in data.items():
if not single_key:
click.secho(titles[key], bold=True)
click.echo("=" * len(titles[key]))
if key == "serial":
for item in value:
click.secho(item['port'], fg="cyan")
click.echo("-" * len(item['port']))
click.echo("Hardware ID: %s" % item['hwid'])
click.echo("Description: %s" % item['description'])
click.echo("")
if key == "logical":
for item in value:
click.secho(item['path'], fg="cyan")
click.echo("-" * len(item['path']))
click.echo("Name: %s" % item['name'])
click.echo("")
if key == "mdns":
for item in value:
click.secho(item['name'], fg="cyan")
click.echo("-" * len(item['name']))
click.echo("Type: %s" % item['type'])
click.echo("IP: %s" % item['ip'])
click.echo("Port: %s" % item['port'])
if item['properties']:
click.echo("Properties: %s" % ("; ".join([
"%s=%s" % (k, v)
for k, v in item['properties'].items()
])))
click.echo("")
if single_key:
click.echo("")
return True
@cli.command("monitor", short_help="Monitor device (Serial)")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
@click.option("--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N")
@click.option("--rtscts",
is_flag=True,
help="Enable RTS/CTS flow control, default=Off")
@click.option("--xonxoff",
is_flag=True,
help="Enable software flow control, default=Off")
@click.option("--rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state")
@click.option("--dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state")
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option("--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8")
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option("--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF")
@click.option("--raw",
is_flag=True,
help="Do not apply any encodings/transformations")
@click.option("--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)")
@click.option("--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)")
@click.option("--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
resolve_path=True))
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment")
def device_monitor(**kwargs): # pylint: disable=too-many-branches
env_options = {}
try:
env_options = get_project_options(kwargs['project_dir'],
kwargs['environment'])
for k in ("port", "speed", "rts", "dtr"):
k2 = "monitor_%s" % k
if k == "speed":
k = "baud"
if kwargs[k] is None and k2 in env_options:
kwargs[k] = env_options[k2]
if k != "port":
kwargs[k] = int(kwargs[k])
except exception.NotPlatformIOProject:
pass
if not kwargs['port']:
ports = util.get_serial_ports(filter_hwid=True)
if len(ports) == 1:
kwargs['port'] = ports[0]['port']
sys.argv = ["monitor"] + env_options.get("monitor_flags", [])
for k, v in kwargs.items():
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
continue
k = "--" + k.replace("_", "-")
if k in env_options.get("monitor_flags", []):
continue
if isinstance(v, bool):
if v:
sys.argv.append(k)
elif isinstance(v, tuple):
for i in v:
sys.argv.extend([k, i])
else:
sys.argv.extend([k, str(v)])
if kwargs['port'] and (set(["*", "?", "[", "]"]) & set(kwargs['port'])):
for item in util.get_serial_ports():
if fnmatch(item['port'], kwargs['port']):
kwargs['port'] = item['port']
break
try:
miniterm.main(default_port=kwargs['port'],
default_baudrate=kwargs['baud'] or 9600,
default_rts=kwargs['rts'],
default_dtr=kwargs['dtr'])
except Exception as e:
raise exception.MinitermException(e)
def get_project_options(project_dir, environment=None):
config = ProjectConfig.get_instance(join(project_dir, "platformio.ini"))
config.validate(envs=[environment] if environment else None)
if not environment:
default_envs = config.default_envs()
if default_envs:
environment = default_envs[0]
else:
environment = config.envs()[0]
return config.items(env=environment, as_dict=True)

View File

@@ -1,15 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.device.filters.base import DeviceMonitorFilter

View File

@@ -1,243 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from fnmatch import fnmatch
import click
from serial.tools import miniterm
from platformio import exception, fs, util
from platformio.commands.device import helpers as device_helpers
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformFactory
from platformio.project.exception import NotPlatformIOProjectError
@click.group(short_help="Monitor device or list existing")
def cli():
pass
@cli.command("list", short_help="List devices")
@click.option("--serial", is_flag=True, help="List serial ports, default")
@click.option("--logical", is_flag=True, help="List logical devices")
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
@click.option("--json-output", is_flag=True)
def device_list( # pylint: disable=too-many-branches
serial, logical, mdns, json_output
):
if not logical and not mdns:
serial = True
data = {}
if serial:
data["serial"] = util.get_serial_ports()
if logical:
data["logical"] = util.get_logical_devices()
if mdns:
data["mdns"] = util.get_mdns_services()
single_key = list(data)[0] if len(list(data)) == 1 else None
if json_output:
return click.echo(
dump_json_to_unicode(data[single_key] if single_key else data)
)
titles = {
"serial": "Serial Ports",
"logical": "Logical Devices",
"mdns": "Multicast DNS Services",
}
for key, value in data.items():
if not single_key:
click.secho(titles[key], bold=True)
click.echo("=" * len(titles[key]))
if key == "serial":
for item in value:
click.secho(item["port"], fg="cyan")
click.echo("-" * len(item["port"]))
click.echo("Hardware ID: %s" % item["hwid"])
click.echo("Description: %s" % item["description"])
click.echo("")
if key == "logical":
for item in value:
click.secho(item["path"], fg="cyan")
click.echo("-" * len(item["path"]))
click.echo("Name: %s" % item["name"])
click.echo("")
if key == "mdns":
for item in value:
click.secho(item["name"], fg="cyan")
click.echo("-" * len(item["name"]))
click.echo("Type: %s" % item["type"])
click.echo("IP: %s" % item["ip"])
click.echo("Port: %s" % item["port"])
if item["properties"]:
click.echo(
"Properties: %s"
% (
"; ".join(
[
"%s=%s" % (k, v)
for k, v in item["properties"].items()
]
)
)
)
click.echo("")
if single_key:
click.echo("")
return True
@cli.command("monitor", short_help="Monitor device (Serial)")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add filters/text transformations")
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment",
)
def device_monitor(**kwargs): # pylint: disable=too-many-branches
# load default monitor filters
filters_dir = os.path.join(fs.get_source_dir(), "commands", "device", "filters")
for name in os.listdir(filters_dir):
if not name.endswith(".py"):
continue
device_helpers.load_monitor_filter(os.path.join(filters_dir, name))
project_options = {}
try:
with fs.cd(kwargs["project_dir"]):
project_options = device_helpers.get_project_options(kwargs["environment"])
kwargs = device_helpers.apply_project_monitor_options(kwargs, project_options)
except NotPlatformIOProjectError:
pass
platform = None
if "platform" in project_options:
with fs.cd(kwargs["project_dir"]):
platform = PlatformFactory.newPlatform(project_options["platform"])
device_helpers.register_platform_filters(
platform, kwargs["project_dir"], kwargs["environment"]
)
if not kwargs["port"]:
ports = util.get_serial_ports(filter_hwid=True)
if len(ports) == 1:
kwargs["port"] = ports[0]["port"]
elif "platform" in project_options and "board" in project_options:
board_hwids = device_helpers.get_board_hwids(
kwargs["project_dir"], platform, project_options["board"],
)
for item in ports:
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
kwargs["port"] = item["port"]
break
if kwargs["port"]:
break
elif kwargs["port"] and (set(["*", "?", "[", "]"]) & set(kwargs["port"])):
for item in util.get_serial_ports():
if fnmatch(item["port"], kwargs["port"]):
kwargs["port"] = item["port"]
break
# override system argv with patched options
sys.argv = ["monitor"] + device_helpers.options_to_argv(
kwargs,
project_options,
ignore=("port", "baud", "rts", "dtr", "environment", "project_dir"),
)
if not kwargs["quiet"]:
click.echo(
"--- Available filters and text transformations: %s"
% ", ".join(sorted(miniterm.TRANSFORMATIONS.keys()))
)
click.echo("--- More details at http://bit.ly/pio-monitor-filters")
try:
miniterm.main(
default_port=kwargs["port"],
default_baudrate=kwargs["baud"] or 9600,
default_rts=kwargs["rts"],
default_dtr=kwargs["dtr"],
)
except Exception as e:
raise exception.MinitermException(e)

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,42 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from serial.tools import miniterm
from platformio.project.config import ProjectConfig
class DeviceMonitorFilter(miniterm.Transform):
def __init__(self, project_dir=None, environment=None):
""" Called by PlatformIO to pass context """
miniterm.Transform.__init__(self)
self.project_dir = project_dir
self.environment = environment
self.config = ProjectConfig.get_instance()
if not self.environment:
default_envs = self.config.default_envs()
if default_envs:
self.environment = default_envs[0]
elif self.config.envs():
self.environment = self.config.envs()[0]
def __call__(self):
""" Called by the miniterm library when the filter is actually used """
return self
@property
def NAME(self):
raise NotImplementedError("Please declare NAME attribute for the filter class")

View File

@@ -1,38 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import serial
from platformio.commands.device import DeviceMonitorFilter
class Hexlify(DeviceMonitorFilter):
NAME = "hexlify"
def __init__(self, *args, **kwargs):
super(Hexlify, self).__init__(*args, **kwargs)
self._counter = 0
def rx(self, text):
result = ""
for b in serial.iterbytes(text):
if (self._counter % 16) == 0:
result += "\n{:04X} | ".format(self._counter)
asciicode = ord(b)
if asciicode <= 255:
result += "{:02X} ".format(asciicode)
else:
result += "?? "
self._counter += 1
return result

View File

@@ -1,44 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os.path
from datetime import datetime
from platformio.commands.device import DeviceMonitorFilter
class LogToFile(DeviceMonitorFilter):
NAME = "log2file"
def __init__(self, *args, **kwargs):
super(LogToFile, self).__init__(*args, **kwargs)
self._log_fp = None
def __call__(self):
log_file_name = "platformio-device-monitor-%s.log" % datetime.now().strftime(
"%y%m%d-%H%M%S"
)
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
self._log_fp = io.open(log_file_name, "w", encoding="utf-8")
return self
def __del__(self):
if self._log_fp:
self._log_fp.close()
def rx(self, text):
self._log_fp.write(text)
self._log_fp.flush()
return text

View File

@@ -1,31 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.device import DeviceMonitorFilter
class SendOnEnter(DeviceMonitorFilter):
NAME = "send_on_enter"
def __init__(self, *args, **kwargs):
super(SendOnEnter, self).__init__(*args, **kwargs)
self._buffer = ""
def tx(self, text):
self._buffer += text
if self._buffer.endswith("\r\n"):
text = self._buffer[:-2]
self._buffer = ""
return text
return ""

View File

@@ -1,34 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from platformio.commands.device import DeviceMonitorFilter
class Timestamp(DeviceMonitorFilter):
NAME = "time"
def __init__(self, *args, **kwargs):
super(Timestamp, self).__init__(*args, **kwargs)
self._first_text_received = False
def rx(self, text):
if self._first_text_received and "\n" not in text:
return text
timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3]
if not self._first_text_received:
self._first_text_received = True
return "%s > %s" % (timestamp, text)
return text.replace("\n", "\n%s > " % timestamp)

View File

@@ -1,106 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import os
from serial.tools import miniterm
from platformio import fs
from platformio.commands.device import DeviceMonitorFilter
from platformio.compat import get_object_members, load_python_module
from platformio.project.config import ProjectConfig
def apply_project_monitor_options(cli_options, project_options):
for k in ("port", "speed", "rts", "dtr"):
k2 = "monitor_%s" % k
if k == "speed":
k = "baud"
if cli_options[k] is None and k2 in project_options:
cli_options[k] = project_options[k2]
if k != "port":
cli_options[k] = int(cli_options[k])
return cli_options
def options_to_argv(cli_options, project_options, ignore=None):
confmon_flags = project_options.get("monitor_flags", [])
result = confmon_flags[::]
for f in project_options.get("monitor_filters", []):
result.extend(["--filter", f])
for k, v in cli_options.items():
if v is None or (ignore and k in ignore):
continue
k = "--" + k.replace("_", "-")
if k in confmon_flags:
continue
if isinstance(v, bool):
if v:
result.append(k)
elif isinstance(v, tuple):
for i in v:
result.extend([k, i])
else:
result.extend([k, str(v)])
return result
def get_project_options(environment=None):
config = ProjectConfig.get_instance()
config.validate(envs=[environment] if environment else None)
if not environment:
default_envs = config.default_envs()
if default_envs:
environment = default_envs[0]
else:
environment = config.envs()[0]
return config.items(env=environment, as_dict=True)
def get_board_hwids(project_dir, platform, board):
with fs.cd(project_dir):
return platform.board_config(board).get("build.hwids", [])
def load_monitor_filter(path, project_dir=None, environment=None):
name = os.path.basename(path)
name = name[: name.find(".")]
module = load_python_module("platformio.commands.device.filters.%s" % name, path)
for cls in get_object_members(module).values():
if (
not inspect.isclass(cls)
or not issubclass(cls, DeviceMonitorFilter)
or cls == DeviceMonitorFilter
):
continue
obj = cls(project_dir, environment)
miniterm.TRANSFORMATIONS[obj.NAME] = obj
return True
def register_platform_filters(platform, project_dir, environment):
monitor_dir = os.path.join(platform.get_dir(), "monitor")
if not os.path.isdir(monitor_dir):
return
for name in os.listdir(monitor_dir):
if not name.startswith("filter_") or not name.endswith(".py"):
continue
path = os.path.join(monitor_dir, name)
if not os.path.isfile(path):
continue
load_monitor_filter(path, project_dir, environment)

View File

@@ -11,3 +11,5 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.home.command import cli

View File

@@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-locals,too-many-statements
import mimetypes
import socket
from os.path import isdir
@@ -21,12 +19,8 @@ from os.path import isdir
import click
from platformio import exception
from platformio.compat import WINDOWS
from platformio.managers.core import (
build_contrib_pysite_deps,
get_core_package_dir,
inject_contrib_pysite,
)
from platformio.managers.core import (get_core_package_dir,
inject_contrib_pysite)
@click.command("home", short_help="PIO Home")
@@ -34,55 +28,33 @@ from platformio.managers.core import (
@click.option(
"--host",
default="127.0.0.1",
help=(
"HTTP host, default=127.0.0.1. You can open PIO Home for inbound "
"connections with --host=0.0.0.0"
),
)
@click.option("--no-open", is_flag=True)
@click.option(
"--shutdown-timeout",
default=0,
type=int,
help=(
"Automatically shutdown server on timeout (in seconds) when no clients "
"are connected. Default is 0 which means never auto shutdown"
),
)
def cli(port, host, no_open, shutdown_timeout):
# pylint: disable=import-error, import-outside-toplevel
help="HTTP host, default=127.0.0.1. "
"You can open PIO Home for inbound connections with --host=0.0.0.0")
@click.option("--no-open", is_flag=True) # pylint: disable=too-many-locals
def cli(port, host, no_open):
# import contrib modules
inject_contrib_pysite()
try:
from autobahn.twisted.resource import WebSocketResource
except: # pylint: disable=bare-except
build_contrib_pysite_deps(get_core_package_dir("contrib-pysite"))
from autobahn.twisted.resource import WebSocketResource
# pylint: disable=import-error
from autobahn.twisted.resource import WebSocketResource
from twisted.internet import reactor
from twisted.web import server
from twisted.internet.error import CannotListenError
# pylint: enable=import-error
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
from platformio.commands.home.rpc.handlers.misc import MiscRPC
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.handlers.account import AccountRPC
from platformio.commands.home.rpc.server import JSONRPCServerFactory
from platformio.commands.home.web import WebRoot
factory = JSONRPCServerFactory(shutdown_timeout)
factory = JSONRPCServerFactory()
factory.addHandler(AppRPC(), namespace="app")
factory.addHandler(IDERPC(), namespace="ide")
factory.addHandler(MiscRPC(), namespace="misc")
factory.addHandler(OSRPC(), namespace="os")
factory.addHandler(PIOCoreRPC(), namespace="core")
factory.addHandler(ProjectRPC(), namespace="project")
factory.addHandler(AccountRPC(), namespace="account")
contrib_dir = get_core_package_dir("contrib-piohome")
if not isdir(contrib_dir):
@@ -101,7 +73,15 @@ def cli(port, host, no_open, shutdown_timeout):
if host == "__do_not_start__":
return
already_started = is_port_used(host, port)
# if already started
already_started = False
socket.setdefaulttimeout(1)
try:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
already_started = True
except: # pylint: disable=bare-except
pass
home_url = "http://%s:%d" % (host, port)
if not no_open:
if already_started:
@@ -109,53 +89,21 @@ def cli(port, host, no_open, shutdown_timeout):
else:
reactor.callLater(1, lambda: click.launch(home_url))
click.echo(
"\n".join(
[
"",
" ___I_",
" /\\-_--\\ PlatformIO Home",
"/ \\_-__\\",
"|[]| [] | %s" % home_url,
"|__|____|______________%s" % ("_" * len(host)),
]
)
)
click.echo("\n".join([
"",
" ___I_",
" /\\-_--\\ PlatformIO Home",
"/ \\_-__\\",
"|[]| [] | %s" % home_url,
"|__|____|______________%s" % ("_" * len(host)),
]))
click.echo("")
click.echo("Open PlatformIO Home in your browser by this URL => %s" % home_url)
try:
reactor.listenTCP(port, site, interface=host)
except CannotListenError as e:
click.secho(str(e), fg="red", err=True)
already_started = True
click.echo("Open PIO Home in your browser by this URL => %s" % home_url)
if already_started:
click.secho(
"PlatformIO Home server is already started in another process.", fg="yellow"
)
return
click.echo("PIO Home has been started. Press Ctrl+C to shutdown.")
reactor.listenTCP(port, site, interface=host)
reactor.run()
def is_port_used(host, port):
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if WINDOWS:
try:
s.bind((host, port))
s.close()
return False
except (OSError, socket.error):
pass
else:
try:
s.connect((host, port))
s.close()
except socket.error:
return False
return True

View File

@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=keyword-arg-before-vararg,arguments-differ,signature-differs
# pylint: disable=keyword-arg-before-vararg, arguments-differ
import os
import socket
@@ -27,6 +27,7 @@ from platformio.proc import where_is_program
class AsyncSession(requests.Session):
def __init__(self, n=None, *args, **kwargs):
if n:
pool = reactor.getThreadPool()
@@ -50,8 +51,7 @@ def requests_session():
@util.memoized(expire="60s")
def get_core_fullpath():
return where_is_program(
"platformio" + (".exe" if "windows" in util.get_systype() else "")
)
"platformio" + (".exe" if "windows" in util.get_systype() else ""))
@util.memoized(expire="10s")
@@ -60,7 +60,9 @@ def is_twitter_blocked():
timeout = 2
try:
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
requests.get("http://%s" % ip, allow_redirects=False, timeout=timeout)
requests.get("http://%s" % ip,
allow_redirects=False,
timeout=timeout)
else:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, 80))
return False

View File

@@ -1,29 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jsonrpc # pylint: disable=import-error
from platformio.commands.account.client import AccountClient
class AccountRPC(object):
@staticmethod
def call_client(method, *args, **kwargs):
try:
client = AccountClient()
return getattr(client, method)(*args, **kwargs)
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4003, message="PIO Account Call Error", data=str(e)
)

View File

@@ -14,10 +14,11 @@
from __future__ import absolute_import
from os.path import join
from os.path import expanduser, join
from platformio import __version__, app, fs, util
from platformio.project.helpers import get_project_core_dir, is_platformio_project
from platformio import __version__, app, util
from platformio.project.helpers import (get_project_core_dir,
is_platformio_project)
class AppRPC(object):
@@ -25,13 +26,8 @@ class AppRPC(object):
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
IGNORE_STORAGE_KEYS = [
"cid",
"coreVersion",
"coreSystype",
"coreCaller",
"coreSettings",
"homeDir",
"projectsDir",
"cid", "coreVersion", "coreSystype", "coreCaller", "coreSettings",
"homeDir", "projectsDir"
]
@staticmethod
@@ -41,28 +37,31 @@ class AppRPC(object):
# base data
caller_id = app.get_session_var("caller_id")
storage["cid"] = app.get_cid()
storage["coreVersion"] = __version__
storage["coreSystype"] = util.get_systype()
storage["coreCaller"] = str(caller_id).lower() if caller_id else None
storage["coreSettings"] = {
storage['cid'] = app.get_cid()
storage['coreVersion'] = __version__
storage['coreSystype'] = util.get_systype()
storage['coreCaller'] = (str(caller_id).lower()
if caller_id else None)
storage['coreSettings'] = {
name: {
"description": data["description"],
"default_value": data["value"],
"value": app.get_setting(name),
"description": data['description'],
"default_value": data['value'],
"value": app.get_setting(name)
}
for name, data in app.DEFAULT_SETTINGS.items()
}
storage["homeDir"] = fs.expanduser("~")
storage["projectsDir"] = storage["coreSettings"]["projects_dir"]["value"]
storage['homeDir'] = expanduser("~")
storage['projectsDir'] = storage['coreSettings']['projects_dir'][
'value']
# skip non-existing recent projects
storage["recentProjects"] = [
p for p in storage.get("recentProjects", []) if is_platformio_project(p)
storage['recentProjects'] = [
p for p in storage.get("recentProjects", [])
if is_platformio_project(p)
]
state["storage"] = storage
state['storage'] = storage
state.modified = False # skip saving extra fields
return state.as_dict()

View File

@@ -19,18 +19,20 @@ from twisted.internet import defer # pylint: disable=import-error
class IDERPC(object):
def __init__(self):
self._queue = {}
def send_command(self, sid, command, params):
def send_command(self, command, params, sid=0):
if not self._queue.get(sid):
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4005, message="PIO Home IDE agent is not started"
)
code=4005, message="PIO Home IDE agent is not started")
while self._queue[sid]:
self._queue[sid].pop().callback(
{"id": time.time(), "method": command, "params": params}
)
self._queue[sid].pop().callback({
"id": time.time(),
"method": command,
"params": params
})
def listen_commands(self, sid=0):
if sid not in self._queue:
@@ -38,10 +40,5 @@ class IDERPC(object):
self._queue[sid].append(defer.Deferred())
return self._queue[sid][-1]
def open_project(self, sid, project_dir):
return self.send_command(sid, "open_project", project_dir)
def open_text_document(self, sid, path, line=None, column=None):
return self.send_command(
sid, "open_text_document", dict(path=path, line=line, column=column)
)
def open_project(self, project_dir, sid=0):
return self.send_command("open_project", project_dir, sid)

View File

@@ -22,31 +22,33 @@ from platformio.commands.home.rpc.handlers.os import OSRPC
class MiscRPC(object):
def load_latest_tweets(self, data_url):
cache_key = app.ContentCache.key_from_args(data_url, "tweets")
def load_latest_tweets(self, username):
cache_key = "piohome_latest_tweets_" + str(username)
cache_valid = "7d"
with app.ContentCache() as cc:
cache_data = cc.get(cache_key)
if cache_data:
cache_data = json.loads(cache_data)
# automatically update cache in background every 12 hours
if cache_data["time"] < (time.time() - (3600 * 12)):
reactor.callLater(
5, self._preload_latest_tweets, data_url, cache_key, cache_valid
)
return cache_data["result"]
if cache_data['time'] < (time.time() - (3600 * 12)):
reactor.callLater(5, self._preload_latest_tweets, username,
cache_key, cache_valid)
return cache_data['result']
result = self._preload_latest_tweets(data_url, cache_key, cache_valid)
result = self._preload_latest_tweets(username, cache_key, cache_valid)
return result
@staticmethod
@defer.inlineCallbacks
def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((yield OSRPC.fetch_content(data_url)))
def _preload_latest_tweets(username, cache_key, cache_valid):
result = yield OSRPC.fetch_content(
"https://api.platformio.org/tweets/" + username)
result = json.loads(result)
with app.ContentCache() as cc:
cc.set(
cache_key,
json.dumps({"time": int(time.time()), "result": result}),
cache_valid,
)
cc.set(cache_key,
json.dumps({
"time": int(time.time()),
"result": result
}), cache_valid)
defer.returnValue(result)

View File

@@ -14,33 +14,35 @@
from __future__ import absolute_import
import codecs
import glob
import io
import os
import shutil
from functools import cmp_to_key
from os.path import expanduser, isdir, isfile, join
import click
from twisted.internet import defer # pylint: disable=import-error
from platformio import app, fs, util
from platformio import app, util
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding
class OSRPC(object):
@staticmethod
@defer.inlineCallbacks
def fetch_content(uri, data=None, headers=None, cache_valid=None):
if not headers:
headers = {
"User-Agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
"AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 "
"Safari/603.3.8"
)
"User-Agent":
("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
"AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 "
"Safari/603.3.8")
}
cache_key = app.ContentCache.key_from_args(uri, data) if cache_valid else None
cache_key = (app.ContentCache.key_from_args(uri, data)
if cache_valid else None)
with app.ContentCache() as cc:
if cache_key:
result = cc.get(cache_key)
@@ -64,12 +66,12 @@ class OSRPC(object):
defer.returnValue(result)
def request_content(self, uri, data=None, headers=None, cache_valid=None):
if uri.startswith("http"):
if uri.startswith('http'):
return self.fetch_content(uri, data, headers, cache_valid)
if os.path.isfile(uri):
with io.open(uri, encoding="utf-8") as fp:
return fp.read()
return None
if not isfile(uri):
return None
with codecs.open(uri, encoding="utf-8") as fp:
return fp.read()
@staticmethod
def open_url(url):
@@ -78,29 +80,21 @@ class OSRPC(object):
@staticmethod
def reveal_file(path):
return click.launch(
path.encode(get_filesystem_encoding()) if PY2 else path, locate=True
)
@staticmethod
def open_file(path):
return click.launch(path.encode(get_filesystem_encoding()) if PY2 else path)
path.encode(get_filesystem_encoding()) if PY2 else path,
locate=True)
@staticmethod
def is_file(path):
return os.path.isfile(path)
return isfile(path)
@staticmethod
def is_dir(path):
return os.path.isdir(path)
return isdir(path)
@staticmethod
def make_dirs(path):
return os.makedirs(path)
@staticmethod
def get_file_mtime(path):
return os.path.getmtime(path)
@staticmethod
def rename(src, dst):
return os.rename(src, dst)
@@ -115,11 +109,13 @@ class OSRPC(object):
pathnames = [pathnames]
result = set()
for pathname in pathnames:
result |= set(glob.glob(os.path.join(root, pathname) if root else pathname))
result |= set(
glob.glob(join(root, pathname) if root else pathname))
return list(result)
@staticmethod
def list_dir(path):
def _cmp(x, y):
if x[1] and not y[1]:
return -1
@@ -133,14 +129,14 @@ class OSRPC(object):
items = []
if path.startswith("~"):
path = fs.expanduser(path)
if not os.path.isdir(path):
path = expanduser(path)
if not isdir(path):
return items
for item in os.listdir(path):
try:
item_is_dir = os.path.isdir(os.path.join(path, item))
item_is_dir = isdir(join(path, item))
if item_is_dir:
os.listdir(os.path.join(path, item))
os.listdir(join(path, item))
items.append((item, item_is_dir))
except OSError:
pass
@@ -150,7 +146,7 @@ class OSRPC(object):
def get_logical_devices():
items = []
for item in util.get_logical_devices():
if item["name"]:
item["name"] = item["name"]
if item['name']:
item['name'] = item['name']
items.append(item)
return items

View File

@@ -27,7 +27,8 @@ from twisted.internet import utils # pylint: disable=import-error
from platformio import __main__, __version__, fs
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding, is_bytes, string_types
from platformio.compat import (PY2, get_filesystem_encoding, is_bytes,
string_types)
try:
from thread import get_ident as thread_get_ident
@@ -36,6 +37,7 @@ except ImportError:
class MultiThreadingStdStream(object):
def __init__(self, parent_stream):
self._buffers = {thread_get_ident(): parent_stream}
@@ -52,8 +54,7 @@ class MultiThreadingStdStream(object):
thread_id = thread_get_ident()
self._ensure_thread_buffer(thread_id)
return self._buffers[thread_id].write(
value.decode() if is_bytes(value) else value
)
value.decode() if is_bytes(value) else value)
def get_value_and_reset(self):
result = ""
@@ -67,6 +68,7 @@ class MultiThreadingStdStream(object):
class PIOCoreRPC(object):
@staticmethod
def version():
return __version__
@@ -96,21 +98,22 @@ class PIOCoreRPC(object):
to_json = "--json-output" in args
try:
if args and args[0] == "remote":
if args and args[0] in ("account", "remote"):
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
else:
result = yield PIOCoreRPC._call_inline(args, options)
try:
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
defer.returnValue(
PIOCoreRPC._process_result(result, to_json))
except ValueError:
# fall-back to subprocess method
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
defer.returnValue(
PIOCoreRPC._process_result(result, to_json))
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4003, message="PIO Core Call Error", data=str(e)
)
code=4003, message="PIO Core Call Error", data=str(e))
@staticmethod
def _call_inline(args, options):
@@ -120,11 +123,8 @@ class PIOCoreRPC(object):
def _thread_task():
with fs.cd(cwd):
exit_code = __main__.main(["-c"] + args)
return (
PIOCoreRPC.thread_stdout.get_value_and_reset(),
PIOCoreRPC.thread_stderr.get_value_and_reset(),
exit_code,
)
return (PIOCoreRPC.thread_stdout.get_value_and_reset(),
PIOCoreRPC.thread_stderr.get_value_and_reset(), exit_code)
return threads.deferToThread(_thread_task)
@@ -135,8 +135,8 @@ class PIOCoreRPC(object):
helpers.get_core_fullpath(),
args,
path=cwd,
env={k: v for k, v in os.environ.items() if "%" not in k},
)
env={k: v
for k, v in os.environ.items() if "%" not in k})
@staticmethod
def _process_result(result, to_json=False):
@@ -146,8 +146,6 @@ class PIOCoreRPC(object):
raise Exception(text)
if not to_json:
return text
if is_bytes(out):
out = out.decode()
try:
return json.loads(out)
except ValueError as e:

View File

@@ -17,6 +17,8 @@ from __future__ import absolute_import
import os
import shutil
import time
from os.path import (basename, expanduser, getmtime, isdir, isfile, join,
realpath, sep)
import jsonrpc # pylint: disable=import-error
@@ -27,175 +29,137 @@ from platformio.compat import PY2, get_filesystem_encoding
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectError
from platformio.project.helpers import get_project_dir, is_platformio_project
from platformio.project.options import get_config_options_schema
from platformio.project.helpers import (get_project_libdeps_dir,
get_project_src_dir,
is_platformio_project)
class ProjectRPC(object):
@staticmethod
def config_call(init_kwargs, method, *args):
assert isinstance(init_kwargs, dict)
assert "path" in init_kwargs
project_dir = get_project_dir()
if os.path.isfile(init_kwargs["path"]):
project_dir = os.path.dirname(init_kwargs["path"])
with fs.cd(project_dir):
return getattr(ProjectConfig(**init_kwargs), method)(*args)
@staticmethod
def config_load(path):
return ProjectConfig(
path, parse_extra=False, expand_interpolations=False
).as_tuple()
def _get_projects(project_dirs=None):
@staticmethod
def config_dump(path, data):
config = ProjectConfig(path, parse_extra=False, expand_interpolations=False)
config.update(data, clear=True)
return config.save()
@staticmethod
def config_update_description(path, text):
config = ProjectConfig(path, parse_extra=False, expand_interpolations=False)
if not config.has_section("platformio"):
config.add_section("platformio")
if text:
config.set("platformio", "description", text)
else:
if config.has_option("platformio", "description"):
config.remove_option("platformio", "description")
if not config.options("platformio"):
config.remove_section("platformio")
return config.save()
@staticmethod
def get_config_schema():
return get_config_options_schema()
@staticmethod
def get_projects():
def _get_project_data():
def _get_project_data(project_dir):
data = {"boards": [], "envLibdepsDirs": [], "libExtraDirs": []}
config = ProjectConfig()
data["envs"] = config.envs()
data["description"] = config.get("platformio", "description")
data["libExtraDirs"].extend(config.get("platformio", "lib_extra_dirs", []))
config = ProjectConfig(join(project_dir, "platformio.ini"))
libdeps_dir = get_project_libdeps_dir()
data['libExtraDirs'].extend(
config.get("platformio", "lib_extra_dirs", []))
libdeps_dir = config.get_optional_dir("libdeps")
for section in config.sections():
if not section.startswith("env:"):
continue
data["envLibdepsDirs"].append(os.path.join(libdeps_dir, section[4:]))
data['envLibdepsDirs'].append(join(libdeps_dir, section[4:]))
if config.has_option(section, "board"):
data["boards"].append(config.get(section, "board"))
data["libExtraDirs"].extend(config.get(section, "lib_extra_dirs", []))
data['boards'].append(config.get(section, "board"))
data['libExtraDirs'].extend(
config.get(section, "lib_extra_dirs", []))
# skip non existing folders and resolve full path
for key in ("envLibdepsDirs", "libExtraDirs"):
data[key] = [
fs.expanduser(d) if d.startswith("~") else os.path.realpath(d)
for d in data[key]
if os.path.isdir(d)
expanduser(d) if d.startswith("~") else realpath(d)
for d in data[key] if isdir(d)
]
return data
def _path_to_name(path):
return (os.path.sep).join(path.split(os.path.sep)[-2:])
return (sep).join(path.split(sep)[-2:])
if not project_dirs:
project_dirs = AppRPC.load_state()['storage']['recentProjects']
result = []
pm = PlatformManager()
for project_dir in AppRPC.load_state()["storage"]["recentProjects"]:
if not os.path.isdir(project_dir):
continue
for project_dir in project_dirs:
data = {}
boards = []
try:
with fs.cd(project_dir):
data = _get_project_data()
except ProjectError:
data = _get_project_data(project_dir)
except exception.PlatformIOProjectException:
continue
for board_id in data.get("boards", []):
name = board_id
try:
name = pm.board_config(board_id)["name"]
name = pm.board_config(board_id)['name']
except exception.PlatformioException:
pass
boards.append({"id": board_id, "name": name})
result.append(
{
"path": project_dir,
"name": _path_to_name(project_dir),
"modified": int(os.path.getmtime(project_dir)),
"boards": boards,
"description": data.get("description"),
"envs": data.get("envs", []),
"envLibStorages": [
{"name": os.path.basename(d), "path": d}
for d in data.get("envLibdepsDirs", [])
],
"extraLibStorages": [
{"name": _path_to_name(d), "path": d}
for d in data.get("libExtraDirs", [])
],
}
)
result.append({
"path":
project_dir,
"name":
_path_to_name(project_dir),
"modified":
int(getmtime(project_dir)),
"boards":
boards,
"envLibStorages": [{
"name": basename(d),
"path": d
} for d in data.get("envLibdepsDirs", [])],
"extraLibStorages": [{
"name": _path_to_name(d),
"path": d
} for d in data.get("libExtraDirs", [])]
})
return result
def get_projects(self, project_dirs=None):
return self._get_projects(project_dirs)
@staticmethod
def get_project_examples():
result = []
for manifest in PlatformManager().get_installed():
examples_dir = os.path.join(manifest["__pkg_dir"], "examples")
if not os.path.isdir(examples_dir):
examples_dir = join(manifest['__pkg_dir'], "examples")
if not isdir(examples_dir):
continue
items = []
for project_dir, _, __ in os.walk(examples_dir):
project_description = None
try:
config = ProjectConfig(os.path.join(project_dir, "platformio.ini"))
config = ProjectConfig(join(project_dir, "platformio.ini"))
config.validate(silent=True)
project_description = config.get("platformio", "description")
except ProjectError:
project_description = config.get("platformio",
"description")
except exception.PlatformIOProjectException:
continue
path_tokens = project_dir.split(os.path.sep)
items.append(
{
"name": "/".join(
path_tokens[path_tokens.index("examples") + 1 :]
),
"path": project_dir,
"description": project_description,
}
)
result.append(
{
"platform": {
"title": manifest["title"],
"version": manifest["version"],
},
"items": sorted(items, key=lambda item: item["name"]),
}
)
return sorted(result, key=lambda data: data["platform"]["title"])
path_tokens = project_dir.split(sep)
items.append({
"name":
"/".join(path_tokens[path_tokens.index("examples") + 1:]),
"path":
project_dir,
"description":
project_description
})
result.append({
"platform": {
"title": manifest['title'],
"version": manifest['version']
},
"items": sorted(items, key=lambda item: item['name'])
})
return sorted(result, key=lambda data: data['platform']['title'])
def init(self, board, framework, project_dir):
assert project_dir
state = AppRPC.load_state()
if not os.path.isdir(project_dir):
if not isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board]
if framework:
args.extend(["--project-option", "framework = %s" % framework])
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
in ProjectGenerator.get_supported_ides()):
args.extend(["--ide", state['storage']['coreCaller']])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._generate_project_main, project_dir, framework)
return d
@@ -204,101 +168,90 @@ class ProjectRPC(object):
def _generate_project_main(_, project_dir, framework):
main_content = None
if framework == "arduino":
main_content = "\n".join(
[
"#include <Arduino.h>",
"",
"void setup() {",
" // put your setup code here, to run once:",
"}",
"",
"void loop() {",
" // put your main code here, to run repeatedly:",
"}",
"",
]
)
main_content = "\n".join([
"#include <Arduino.h>",
"",
"void setup() {",
" // put your setup code here, to run once:",
"}",
"",
"void loop() {",
" // put your main code here, to run repeatedly:",
"}"
""
]) # yapf: disable
elif framework == "mbed":
main_content = "\n".join(
[
"#include <mbed.h>",
"",
"int main() {",
"",
" // put your setup code here, to run once:",
"",
" while(1) {",
" // put your main code here, to run repeatedly:",
" }",
"}",
"",
]
)
main_content = "\n".join([
"#include <mbed.h>",
"",
"int main() {",
"",
" // put your setup code here, to run once:",
"",
" while(1) {",
" // put your main code here, to run repeatedly:",
" }",
"}",
""
]) # yapf: disable
if not main_content:
return project_dir
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get_optional_dir("src")
main_path = os.path.join(src_dir, "main.cpp")
if os.path.isfile(main_path):
src_dir = get_project_src_dir()
main_path = join(src_dir, "main.cpp")
if isfile(main_path):
return project_dir
if not os.path.isdir(src_dir):
if not isdir(src_dir):
os.makedirs(src_dir)
with open(main_path, "w") as fp:
fp.write(main_content.strip())
with open(main_path, "w") as f:
f.write(main_content.strip())
return project_dir
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
board = str(board)
if arduino_project_dir and PY2:
arduino_project_dir = arduino_project_dir.encode(get_filesystem_encoding())
arduino_project_dir = arduino_project_dir.encode(
get_filesystem_encoding())
# don't import PIO Project
if is_platformio_project(arduino_project_dir):
return arduino_project_dir
is_arduino_project = any(
[
os.path.isfile(
os.path.join(
arduino_project_dir,
"%s.%s" % (os.path.basename(arduino_project_dir), ext),
)
)
for ext in ("ino", "pde")
]
)
is_arduino_project = any([
isfile(
join(arduino_project_dir,
"%s.%s" % (basename(arduino_project_dir), ext)))
for ext in ("ino", "pde")
])
if not is_arduino_project:
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
)
code=4000,
message="Not an Arduino project: %s" % arduino_project_dir)
state = AppRPC.load_state()
project_dir = os.path.join(
state["storage"]["projectsDir"], time.strftime("%y%m%d-%H%M%S-") + board
)
if not os.path.isdir(project_dir):
project_dir = join(state['storage']['projectsDir'],
time.strftime("%y%m%d-%H%M%S-") + board)
if not isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board]
args.extend(["--project-option", "framework = arduino"])
if use_arduino_libs:
args.extend(
["--project-option", "lib_extra_dirs = ~/Documents/Arduino/libraries"]
)
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
args.extend([
"--project-option",
"lib_extra_dirs = ~/Documents/Arduino/libraries"
])
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
in ProjectGenerator.get_supported_ides()):
args.extend(["--ide", state['storage']['coreCaller']])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
d.addCallback(self._finalize_arduino_import, project_dir,
arduino_project_dir)
return d
@staticmethod
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get_optional_dir("src")
if os.path.isdir(src_dir):
src_dir = get_project_src_dir()
if isdir(src_dir):
fs.rmtree(src_dir)
shutil.copytree(arduino_project_dir, src_dir)
return project_dir
@@ -307,21 +260,18 @@ class ProjectRPC(object):
def import_pio(project_dir):
if not project_dir or not is_platformio_project(project_dir):
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4001, message="Not an PlatformIO project: %s" % project_dir
)
new_project_dir = os.path.join(
AppRPC.load_state()["storage"]["projectsDir"],
time.strftime("%y%m%d-%H%M%S-") + os.path.basename(project_dir),
)
code=4001,
message="Not an PlatformIO project: %s" % project_dir)
new_project_dir = join(
AppRPC.load_state()['storage']['projectsDir'],
time.strftime("%y%m%d-%H%M%S-") + basename(project_dir))
shutil.copytree(project_dir, new_project_dir)
state = AppRPC.load_state()
args = ["init"]
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
in ProjectGenerator.get_supported_ides()):
args.extend(["--ide", state['storage']['coreCaller']])
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
d.addCallback(lambda _: new_project_dir)
return d

View File

@@ -16,58 +16,49 @@
import click
import jsonrpc
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from autobahn.twisted.websocket import (WebSocketServerFactory,
WebSocketServerProtocol)
from jsonrpc.exceptions import JSONRPCDispatchException
from twisted.internet import defer, reactor
from twisted.internet import defer
from platformio.compat import PY2, dump_json_to_unicode, is_bytes
class JSONRPCServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.connection_nums += 1
if self.factory.shutdown_timer:
self.factory.shutdown_timer.cancel()
self.factory.shutdown_timer = None
def onClose(self, wasClean, code, reason): # pylint: disable=unused-argument
self.factory.connection_nums -= 1
if self.factory.connection_nums == 0:
self.factory.shutdownByTimeout()
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
# click.echo("> %s" % payload)
response = jsonrpc.JSONRPCResponseManager.handle(
payload, self.factory.dispatcher
).data
payload, self.factory.dispatcher).data
# if error
if "result" not in response:
self.sendJSONResponse(response)
return None
d = defer.maybeDeferred(lambda: response["result"])
d = defer.maybeDeferred(lambda: response['result'])
d.addCallback(self._callback, response)
d.addErrback(self._errback, response)
return None
def _callback(self, result, response):
response["result"] = result
response['result'] = result
self.sendJSONResponse(response)
def _errback(self, failure, response):
if isinstance(failure.value, JSONRPCDispatchException):
e = failure.value
else:
e = JSONRPCDispatchException(code=4999, message=failure.getErrorMessage())
e = JSONRPCDispatchException(code=4999,
message=failure.getErrorMessage())
del response["result"]
response["error"] = e.error._data # pylint: disable=protected-access
response['error'] = e.error._data # pylint: disable=protected-access
self.sendJSONResponse(response)
def sendJSONResponse(self, response):
# click.echo("< %s" % response)
if "error" in response:
click.secho("Error: %s" % response["error"], fg="red", err=True)
click.secho("Error: %s" % response['error'], fg="red", err=True)
response = dump_json_to_unicode(response)
if not PY2 and not is_bytes(response):
response = response.encode("utf-8")
@@ -77,25 +68,10 @@ class JSONRPCServerProtocol(WebSocketServerProtocol):
class JSONRPCServerFactory(WebSocketServerFactory):
protocol = JSONRPCServerProtocol
connection_nums = 0
shutdown_timer = 0
def __init__(self, shutdown_timeout=0):
def __init__(self):
super(JSONRPCServerFactory, self).__init__()
self.shutdown_timeout = shutdown_timeout
self.dispatcher = jsonrpc.Dispatcher()
def shutdownByTimeout(self):
if self.shutdown_timeout < 1:
return
def _auto_shutdown_server():
click.echo("Automatically shutdown server on timeout")
reactor.stop()
self.shutdown_timer = reactor.callLater(
self.shutdown_timeout, _auto_shutdown_server
)
def addHandler(self, handler, namespace):
self.dispatcher.build_method_map(handler, prefix="%s." % namespace)

View File

@@ -17,12 +17,14 @@ from twisted.web import static # pylint: disable=import-error
class WebRoot(static.File):
def render_GET(self, request):
if request.args.get(b"__shutdown__", False):
if request.args.get("__shutdown__", False):
reactor.stop()
return "Server has been stopped"
request.setHeader("cache-control", "no-cache, no-store, must-revalidate")
request.setHeader("cache-control",
"no-cache, no-store, must-revalidate")
request.setHeader("pragma", "no-cache")
request.setHeader("expires", "0")
return static.File.render_GET(self, request)

View File

@@ -14,59 +14,22 @@
# pylint: disable=too-many-arguments,too-many-locals, too-many-branches
import os
from os import getcwd, makedirs
from os.path import isdir, isfile, join
import click
from tabulate import tabulate
from platformio import exception, fs
from platformio.commands.platform import platform_install as cli_platform_install
from platformio.commands.platform import \
platform_install as cli_platform_install
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
from platformio.project.helpers import is_platformio_project
@click.group(short_help="Project Manager")
def cli():
pass
@cli.command("config", short_help="Show computed configuration")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("--json-output", is_flag=True)
def project_config(project_dir, json_output):
if not is_platformio_project(project_dir):
raise NotPlatformIOProjectError(project_dir)
with fs.cd(project_dir):
config = ProjectConfig.get_instance()
if json_output:
return click.echo(config.to_json())
click.echo(
"Computed project configuration for %s" % click.style(project_dir, fg="cyan")
)
for section, options in config.as_tuple():
click.echo()
click.secho(section, fg="cyan")
click.echo("-" * len(section))
click.echo(
tabulate(
[
(name, "=", "\n".join(value) if isinstance(value, list) else value)
for name, value in options
],
tablefmt="plain",
)
)
return None
from platformio.project.helpers import (get_project_include_dir,
get_project_lib_dir,
get_project_src_dir,
get_project_test_dir,
is_platformio_project)
def validate_boards(ctx, param, value): # pylint: disable=W0613
@@ -77,66 +40,66 @@ def validate_boards(ctx, param, value): # pylint: disable=W0613
except exception.UnknownBoard:
raise click.BadParameter(
"`%s`. Please search for board ID using `platformio boards` "
"command" % id_
)
"command" % id_)
return value
@cli.command("init", short_help="Initialize a project or update existing")
@click.option(
"--project-dir",
"-d",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.command("init",
short_help="Initialize PlatformIO project or update existing")
@click.option("--project-dir",
"-d",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-b",
"--board",
multiple=True,
metavar="ID",
callback=validate_boards)
@click.option("--ide",
type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.option("-O", "--project-option", multiple=True)
@click.option("--env-prefix", default="")
@click.option("-s", "--silent", is_flag=True)
@click.pass_context
def project_init(
ctx, # pylint: disable=R0913
project_dir,
board,
ide,
project_option,
env_prefix,
silent,
):
def cli(
ctx, # pylint: disable=R0913
project_dir,
board,
ide,
project_option,
env_prefix,
silent):
if not silent:
if project_dir == os.getcwd():
click.secho("\nThe current working directory", fg="yellow", nl=False)
if project_dir == getcwd():
click.secho("\nThe current working directory",
fg="yellow",
nl=False)
click.secho(" %s " % project_dir, fg="cyan", nl=False)
click.secho("will be used for the project.", fg="yellow")
click.echo("")
click.echo(
"The next files/directories have been created in %s"
% click.style(project_dir, fg="cyan")
)
click.echo(
"%s - Put project header files here" % click.style("include", fg="cyan")
)
click.echo(
"%s - Put here project specific (private) libraries"
% click.style("lib", fg="cyan")
)
click.echo("%s - Put project source files here" % click.style("src", fg="cyan"))
click.echo(
"%s - Project Configuration File" % click.style("platformio.ini", fg="cyan")
)
click.echo("The next files/directories have been created in %s" %
click.style(project_dir, fg="cyan"))
click.echo("%s - Put project header files here" %
click.style("include", fg="cyan"))
click.echo("%s - Put here project specific (private) libraries" %
click.style("lib", fg="cyan"))
click.echo("%s - Put project source files here" %
click.style("src", fg="cyan"))
click.echo("%s - Project Configuration File" %
click.style("platformio.ini", fg="cyan"))
is_new_project = not is_platformio_project(project_dir)
if is_new_project:
init_base_project(project_dir)
if board:
fill_project_envs(
ctx, project_dir, board, project_option, env_prefix, ide is not None
)
fill_project_envs(ctx, project_dir, board, project_option, env_prefix,
ide is not None)
if ide:
pg = ProjectGenerator(project_dir, ide, board)
@@ -152,9 +115,9 @@ def project_init(
if ide:
click.secho(
"\nProject has been successfully %s including configuration files "
"for `%s` IDE." % ("initialized" if is_new_project else "updated", ide),
fg="green",
)
"for `%s` IDE." %
("initialized" if is_new_project else "updated", ide),
fg="green")
else:
click.secho(
"\nProject has been successfully %s! Useful commands:\n"
@@ -162,34 +125,31 @@ def project_init(
"`pio run --target upload` or `pio run -t upload` "
"- upload firmware to a target\n"
"`pio run --target clean` - clean project (remove compiled files)"
"\n`pio run --help` - additional information"
% ("initialized" if is_new_project else "updated"),
fg="green",
)
"\n`pio run --help` - additional information" %
("initialized" if is_new_project else "updated"),
fg="green")
def init_base_project(project_dir):
ProjectConfig(join(project_dir, "platformio.ini")).save()
with fs.cd(project_dir):
config = ProjectConfig()
config.save()
dir_to_readme = [
(config.get_optional_dir("src"), None),
(config.get_optional_dir("include"), init_include_readme),
(config.get_optional_dir("lib"), init_lib_readme),
(config.get_optional_dir("test"), init_test_readme),
(get_project_src_dir(), None),
(get_project_include_dir(), init_include_readme),
(get_project_lib_dir(), init_lib_readme),
(get_project_test_dir(), init_test_readme),
]
for (path, cb) in dir_to_readme:
if os.path.isdir(path):
if isdir(path):
continue
os.makedirs(path)
makedirs(path)
if cb:
cb(path)
def init_include_readme(include_dir):
with open(os.path.join(include_dir, "README"), "w") as fp:
fp.write(
"""
with open(join(include_dir, "README"), "w") as f:
f.write("""
This directory is intended for project header files.
A header file is a file containing C declarations and macro definitions
@@ -228,15 +188,12 @@ Read more about using header files in official GCC documentation:
* Computed Includes
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
""",
)
""")
def init_lib_readme(lib_dir):
# pylint: disable=line-too-long
with open(os.path.join(lib_dir, "README"), "w") as fp:
fp.write(
"""
with open(join(lib_dir, "README"), "w") as f:
f.write("""
This directory is intended for project specific (private) libraries.
PlatformIO will compile them to static libraries and link into executable file.
@@ -282,14 +239,12 @@ libraries scanning project source files.
More information about PlatformIO Library Dependency Finder
- https://docs.platformio.org/page/librarymanager/ldf.html
""",
)
""")
def init_test_readme(test_dir):
with open(os.path.join(test_dir, "README"), "w") as fp:
fp.write(
"""
with open(join(test_dir, "README"), "w") as f:
f.write("""
This directory is intended for PIO Unit Testing and project tests.
Unit Testing is a software testing method by which individual units of
@@ -300,17 +255,15 @@ in the development cycle.
More information about PIO Unit Testing:
- https://docs.platformio.org/page/plus/unit-testing.html
""",
)
""")
def init_ci_conf(project_dir):
conf_path = os.path.join(project_dir, ".travis.yml")
if os.path.isfile(conf_path):
conf_path = join(project_dir, ".travis.yml")
if isfile(conf_path):
return
with open(conf_path, "w") as fp:
fp.write(
"""# Continuous Integration (CI) is the practice, in software
with open(conf_path, "w") as f:
f.write("""# Continuous Integration (CI) is the practice, in software
# engineering, of merging all developer working copies with a shared mainline
# several times a day < https://docs.platformio.org/page/ci/index.html >
#
@@ -377,27 +330,27 @@ def init_ci_conf(project_dir):
#
# script:
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
""",
)
""")
def init_cvs_ignore(project_dir):
conf_path = os.path.join(project_dir, ".gitignore")
if os.path.isfile(conf_path):
conf_path = join(project_dir, ".gitignore")
if isfile(conf_path):
return
with open(conf_path, "w") as fp:
fp.write(".pio\n")
def fill_project_envs(
ctx, project_dir, board_ids, project_option, env_prefix, force_download
):
config = ProjectConfig(
os.path.join(project_dir, "platformio.ini"), parse_extra=False
)
def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
force_download):
config = ProjectConfig(join(project_dir, "platformio.ini"),
parse_extra=False)
used_boards = []
for section in config.sections():
cond = [section.startswith("env:"), config.has_option(section, "board")]
cond = [
section.startswith("env:"),
config.has_option(section, "board")
]
if all(cond):
used_boards.append(config.get(section, "board"))
@@ -406,17 +359,17 @@ def fill_project_envs(
modified = False
for id_ in board_ids:
board_config = pm.board_config(id_)
used_platforms.append(board_config["platform"])
used_platforms.append(board_config['platform'])
if id_ in used_boards:
continue
used_boards.append(id_)
modified = True
envopts = {"platform": board_config["platform"], "board": id_}
envopts = {"platform": board_config['platform'], "board": id_}
# find default framework for board
frameworks = board_config.get("frameworks")
if frameworks:
envopts["framework"] = frameworks[0]
envopts['framework'] = frameworks[0]
for item in project_option:
if "=" not in item:
@@ -438,9 +391,10 @@ def fill_project_envs(
def _install_dependent_platforms(ctx, platforms):
installed_platforms = [p["name"] for p in PlatformManager().get_installed()]
installed_platforms = [
p['name'] for p in PlatformManager().get_installed()
]
if set(platforms) <= set(installed_platforms):
return
ctx.invoke(
cli_platform_install, platforms=list(set(platforms) - set(installed_platforms))
)
ctx.invoke(cli_platform_install,
platforms=list(set(platforms) - set(installed_platforms)))

View File

@@ -14,8 +14,8 @@
# pylint: disable=too-many-branches, too-many-locals
import os
import time
from os.path import isdir, join
import click
import semantic_version
@@ -24,13 +24,14 @@ from tabulate import tabulate
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.compat import dump_json_to_unicode
from platformio.managers.lib import LibraryManager, get_builtin_libs, is_builtin_lib
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.package.manifest.schema import ManifestSchema
from platformio.managers.lib import (LibraryManager, get_builtin_libs,
is_builtin_lib)
from platformio.proc import is_ci
from platformio.project.config import ProjectConfig
from platformio.project.exception import InvalidProjectConfError
from platformio.project.helpers import get_project_dir, is_platformio_project
from platformio.project.helpers import (get_project_dir,
get_project_global_lib_dir,
get_project_libdeps_dir,
is_platformio_project)
try:
from urllib.parse import quote
@@ -43,43 +44,36 @@ CTX_META_STORAGE_DIRS_KEY = __name__ + ".storage_dirs"
CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
def get_project_global_lib_dir():
return ProjectConfig.get_instance().get_optional_dir("globallib")
@click.group(short_help="Library Manager")
@click.option(
"-d",
"--storage-dir",
multiple=True,
default=None,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
help="Manage custom library storage",
)
@click.option(
"-g", "--global", is_flag=True, help="Manage global PlatformIO library storage"
)
@click.option("-d",
"--storage-dir",
multiple=True,
default=None,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True),
help="Manage custom library storage")
@click.option("-g",
"--global",
is_flag=True,
help="Manage global PlatformIO library storage")
@click.option(
"-e",
"--environment",
multiple=True,
help=(
"Manage libraries for the specific project build environments "
"declared in `platformio.ini`"
),
)
help=("Manage libraries for the specific project build environments "
"declared in `platformio.ini`"))
@click.pass_context
def cli(ctx, **options):
storage_cmds = ("install", "uninstall", "update", "list")
# skip commands that don't need storage folder
if ctx.invoked_subcommand not in storage_cmds or (
len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")
):
if ctx.invoked_subcommand not in storage_cmds or \
(len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")):
return
storage_dirs = list(options["storage_dir"])
if options["global"]:
storage_dirs = list(options['storage_dir'])
if options['global']:
storage_dirs.append(get_project_global_lib_dir())
if not storage_dirs:
if is_platformio_project():
@@ -90,16 +84,15 @@ def cli(ctx, **options):
"Warning! Global library storage is used automatically. "
"Please use `platformio lib --global %s` command to remove "
"this warning." % ctx.invoked_subcommand,
fg="yellow",
)
fg="yellow")
if not storage_dirs:
raise exception.NotGlobalLibDir(
get_project_dir(), get_project_global_lib_dir(), ctx.invoked_subcommand
)
raise exception.NotGlobalLibDir(get_project_dir(),
get_project_global_lib_dir(),
ctx.invoked_subcommand)
in_silence = PlatformioCLI.in_silence()
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options["environment"]
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options['environment']
ctx.meta[CTX_META_INPUT_DIRS_KEY] = storage_dirs
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = []
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY] = {}
@@ -108,19 +101,17 @@ def cli(ctx, **options):
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
continue
with fs.cd(storage_dir):
config = ProjectConfig.get_instance(
os.path.join(storage_dir, "platformio.ini")
)
config.validate(options["environment"], silent=in_silence)
libdeps_dir = config.get_optional_dir("libdeps")
for env in config.envs():
if options["environment"] and env not in options["environment"]:
continue
storage_dir = os.path.join(libdeps_dir, env)
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
"env:" + env, "lib_deps", []
)
libdeps_dir = get_project_libdeps_dir()
config = ProjectConfig.get_instance(join(storage_dir,
"platformio.ini"))
config.validate(options['environment'], silent=in_silence)
for env in config.envs():
if options['environment'] and env not in options['environment']:
continue
storage_dir = join(libdeps_dir, env)
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
"env:" + env, "lib_deps", [])
@cli.command("install", short_help="Install library")
@@ -128,19 +119,21 @@ def cli(ctx, **options):
@click.option(
"--save",
is_flag=True,
help="Save installed libraries into the `platformio.ini` dependency list",
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"--interactive", is_flag=True, help="Allow to make a choice for all prompts"
)
@click.option(
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
)
help="Save installed libraries into the `platformio.ini` dependency list")
@click.option("-s",
"--silent",
is_flag=True,
help="Suppress progress reporting")
@click.option("--interactive",
is_flag=True,
help="Allow to make a choice for all prompts")
@click.option("-f",
"--force",
is_flag=True,
help="Reinstall/redownload library if exists")
@click.pass_context
def lib_install( # pylint: disable=too-many-arguments
ctx, libraries, save, silent, interactive, force
):
ctx, libraries, save, silent, interactive, force):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
@@ -151,22 +144,25 @@ def lib_install( # pylint: disable=too-many-arguments
lm = LibraryManager(storage_dir)
if libraries:
for library in libraries:
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
pkg_dir = lm.install(library,
silent=silent,
interactive=interactive,
force=force)
installed_manifests[library] = lm.load_manifest(pkg_dir)
elif storage_dir in storage_libdeps:
builtin_lib_storages = None
for library in storage_libdeps[storage_dir]:
try:
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
pkg_dir = lm.install(library,
silent=silent,
interactive=interactive,
force=force)
installed_manifests[library] = lm.load_manifest(pkg_dir)
except exception.LibNotFound as e:
if builtin_lib_storages is None:
builtin_lib_storages = get_builtin_libs()
if not silent or not is_builtin_lib(builtin_lib_storages, library):
if not silent or not is_builtin_lib(
builtin_lib_storages, library):
click.secho("Warning! %s" % e, fg="yellow")
if not save or not libraries:
@@ -175,23 +171,20 @@ def lib_install( # pylint: disable=too-many-arguments
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
for input_dir in input_dirs:
config = ProjectConfig.get_instance(os.path.join(input_dir, "platformio.ini"))
config = ProjectConfig.get_instance(join(input_dir, "platformio.ini"))
config.validate(project_environments)
for env in config.envs():
if project_environments and env not in project_environments:
continue
config.expand_interpolations = False
try:
lib_deps = config.get("env:" + env, "lib_deps")
except InvalidProjectConfError:
lib_deps = []
lib_deps = config.get("env:" + env, "lib_deps", [])
for library in libraries:
if library in lib_deps:
continue
manifest = installed_manifests[library]
try:
assert library.lower() == manifest["name"].lower()
assert semantic_version.Version(manifest["version"])
assert library.lower() == manifest['name'].lower()
assert semantic_version.Version(manifest['version'])
lib_deps.append("{name}@^{version}".format(**manifest))
except (AssertionError, ValueError):
lib_deps.append(library)
@@ -213,15 +206,13 @@ def lib_uninstall(ctx, libraries):
@cli.command("update", short_help="Update installed libraries")
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
@click.option("--json-output", is_flag=True)
@click.pass_context
def lib_update(ctx, libraries, only_check, dry_run, json_output):
@@ -235,12 +226,14 @@ def lib_update(ctx, libraries, only_check, dry_run, json_output):
_libraries = libraries
if not _libraries:
_libraries = [manifest["__pkg_dir"] for manifest in lm.get_installed()]
_libraries = [
manifest['__pkg_dir'] for manifest in lm.get_installed()
]
if only_check and json_output:
result = []
for library in _libraries:
pkg_dir = library if os.path.isdir(library) else None
pkg_dir = library if isdir(library) else None
requirements = None
url = None
if not pkg_dir:
@@ -252,7 +245,7 @@ def lib_update(ctx, libraries, only_check, dry_run, json_output):
if not latest:
continue
manifest = lm.load_manifest(pkg_dir)
manifest["versionLatest"] = latest
manifest['versionLatest'] = latest
result.append(manifest)
json_result[storage_dir] = result
else:
@@ -261,10 +254,8 @@ def lib_update(ctx, libraries, only_check, dry_run, json_output):
if json_output:
return click.echo(
dump_json_to_unicode(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
dump_json_to_unicode(json_result[storage_dirs[0]]
if len(storage_dirs) == 1 else json_result))
return True
@@ -283,17 +274,15 @@ def lib_list(ctx, json_output):
if json_output:
json_result[storage_dir] = items
elif items:
for item in sorted(items, key=lambda i: i["name"]):
for item in sorted(items, key=lambda i: i['name']):
print_lib_item(item)
else:
click.echo("No items found")
if json_output:
return click.echo(
dump_json_to_unicode(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
dump_json_to_unicode(json_result[storage_dirs[0]]
if len(storage_dirs) == 1 else json_result))
return True
@@ -309,11 +298,9 @@ def lib_list(ctx, json_output):
@click.option("-f", "--framework", multiple=True)
@click.option("-p", "--platform", multiple=True)
@click.option("-i", "--header", multiple=True)
@click.option(
"--noninteractive",
is_flag=True,
help="Do not prompt, automatically paginate with delay",
)
@click.option("--noninteractive",
is_flag=True,
help="Do not prompt, automatically paginate with delay")
def lib_search(query, json_output, page, noninteractive, **filters):
if not query:
query = []
@@ -324,61 +311,55 @@ def lib_search(query, json_output, page, noninteractive, **filters):
for value in values:
query.append('%s:"%s"' % (key, value))
result = util.get_api_result(
"/v2/lib/search", dict(query=" ".join(query), page=page), cache_valid="1d"
)
result = util.get_api_result("/v2/lib/search",
dict(query=" ".join(query), page=page),
cache_valid="1d")
if json_output:
click.echo(dump_json_to_unicode(result))
return
if result["total"] == 0:
if result['total'] == 0:
click.secho(
"Nothing has been found by your request\n"
"Try a less-specific search or use truncation (or wildcard) "
"operator",
fg="yellow",
nl=False,
)
nl=False)
click.secho(" *", fg="green")
click.secho("For example: DS*, PCA*, DHT* and etc.\n", fg="yellow")
click.echo(
"For more examples and advanced search syntax, please use documentation:"
)
click.echo("For more examples and advanced search syntax, "
"please use documentation:")
click.secho(
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
fg="cyan",
)
fg="cyan")
return
click.secho(
"Found %d libraries:\n" % result["total"],
fg="green" if result["total"] else "yellow",
)
click.secho("Found %d libraries:\n" % result['total'],
fg="green" if result['total'] else "yellow")
while True:
for item in result["items"]:
for item in result['items']:
print_lib_item(item)
if int(result["page"]) * int(result["perpage"]) >= int(result["total"]):
if (int(result['page']) * int(result['perpage']) >= int(
result['total'])):
break
if noninteractive:
click.echo()
click.secho(
"Loading next %d libraries... Press Ctrl+C to stop!"
% result["perpage"],
fg="yellow",
)
click.secho("Loading next %d libraries... Press Ctrl+C to stop!" %
result['perpage'],
fg="yellow")
click.echo()
time.sleep(5)
elif not click.confirm("Show next libraries?"):
break
result = util.get_api_result(
"/v2/lib/search",
{"query": " ".join(query), "page": int(result["page"]) + 1},
cache_valid="1d",
)
result = util.get_api_result("/v2/lib/search", {
"query": " ".join(query),
"page": int(result['page']) + 1
},
cache_valid="1d")
@cli.command("builtin", short_help="List built-in libraries")
@@ -390,13 +371,13 @@ def lib_builtin(storage, json_output):
return click.echo(dump_json_to_unicode(items))
for storage_ in items:
if not storage_["items"]:
if not storage_['items']:
continue
click.secho(storage_["name"], fg="green")
click.echo("*" * len(storage_["name"]))
click.secho(storage_['name'], fg="green")
click.echo("*" * len(storage_['name']))
click.echo()
for item in sorted(storage_["items"], key=lambda i: i["name"]):
for item in sorted(storage_['items'], key=lambda i: i['name']):
print_lib_item(item)
return True
@@ -408,29 +389,27 @@ def lib_builtin(storage, json_output):
def lib_show(library, json_output):
lm = LibraryManager()
name, requirements, _ = lm.parse_pkg_uri(library)
lib_id = lm.search_lib_id(
{"name": name, "requirements": requirements},
silent=json_output,
interactive=not json_output,
)
lib_id = lm.search_lib_id({
"name": name,
"requirements": requirements
},
silent=json_output,
interactive=not json_output)
lib = util.get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
if json_output:
return click.echo(dump_json_to_unicode(lib))
click.secho(lib["name"], fg="cyan")
click.echo("=" * len(lib["name"]))
click.secho("#ID: %d" % lib["id"], bold=True)
click.echo(lib["description"])
click.secho(lib['name'], fg="cyan")
click.echo("=" * len(lib['name']))
click.secho("#ID: %d" % lib['id'], bold=True)
click.echo(lib['description'])
click.echo()
click.echo(
"Version: %s, released %s"
% (
lib["version"]["name"],
time.strftime("%c", util.parse_date(lib["version"]["released"])),
)
)
click.echo("Manifest: %s" % lib["confurl"])
"Version: %s, released %s" %
(lib['version']['name'],
time.strftime("%c", util.parse_date(lib['version']['released']))))
click.echo("Manifest: %s" % lib['confurl'])
for key in ("homepage", "repository", "license"):
if key not in lib or not lib[key]:
continue
@@ -457,33 +436,23 @@ def lib_show(library, json_output):
if _authors:
blocks.append(("Authors", _authors))
blocks.append(("Keywords", lib["keywords"]))
blocks.append(("Keywords", lib['keywords']))
for key in ("frameworks", "platforms"):
if key not in lib or not lib[key]:
continue
blocks.append(("Compatible %s" % key, [i["title"] for i in lib[key]]))
blocks.append(("Headers", lib["headers"]))
blocks.append(("Examples", lib["examples"]))
blocks.append(
(
"Versions",
[
"%s, released %s"
% (v["name"], time.strftime("%c", util.parse_date(v["released"])))
for v in lib["versions"]
],
)
)
blocks.append(
(
"Unique Downloads",
[
"Today: %s" % lib["dlstats"]["day"],
"Week: %s" % lib["dlstats"]["week"],
"Month: %s" % lib["dlstats"]["month"],
],
)
)
blocks.append(("Compatible %s" % key, [i['title'] for i in lib[key]]))
blocks.append(("Headers", lib['headers']))
blocks.append(("Examples", lib['examples']))
blocks.append(("Versions", [
"%s, released %s" %
(v['name'], time.strftime("%c", util.parse_date(v['released'])))
for v in lib['versions']
]))
blocks.append(("Unique Downloads", [
"Today: %s" % lib['dlstats']['day'],
"Week: %s" % lib['dlstats']['week'],
"Month: %s" % lib['dlstats']['month']
]))
for (title, rows) in blocks:
click.echo()
@@ -498,20 +467,16 @@ def lib_show(library, json_output):
@cli.command("register", short_help="Register a new library")
@click.argument("config_url")
def lib_register(config_url):
if not config_url.startswith("http://") and not config_url.startswith("https://"):
if (not config_url.startswith("http://")
and not config_url.startswith("https://")):
raise exception.InvalidLibConfURL(config_url)
# Validate manifest
ManifestSchema().load_manifest(
ManifestParserFactory.new_from_url(config_url).as_dict()
)
result = util.get_api_result("/lib/register", data=dict(config_url=config_url))
if "message" in result and result["message"]:
click.secho(
result["message"],
fg="green" if "successed" in result and result["successed"] else "red",
)
result = util.get_api_result("/lib/register",
data=dict(config_url=config_url))
if "message" in result and result['message']:
click.secho(result['message'],
fg="green" if "successed" in result and result['successed']
else "red")
@cli.command("stats", short_help="Library Registry Statistics")
@@ -523,56 +488,46 @@ def lib_stats(json_output):
return click.echo(dump_json_to_unicode(result))
for key in ("updated", "added"):
tabular_data = [
(
click.style(item["name"], fg="cyan"),
time.strftime("%c", util.parse_date(item["date"])),
"https://platformio.org/lib/show/%s/%s"
% (item["id"], quote(item["name"])),
)
for item in result.get(key, [])
]
table = tabulate(
tabular_data,
headers=[click.style("RECENTLY " + key.upper(), bold=True), "Date", "URL"],
)
tabular_data = [(click.style(item['name'], fg="cyan"),
time.strftime("%c", util.parse_date(item['date'])),
"https://platformio.org/lib/show/%s/%s" %
(item['id'], quote(item['name'])))
for item in result.get(key, [])]
table = tabulate(tabular_data,
headers=[
click.style("RECENTLY " + key.upper(), bold=True),
"Date", "URL"
])
click.echo(table)
click.echo()
for key in ("lastkeywords", "topkeywords"):
tabular_data = [
(
click.style(name, fg="cyan"),
"https://platformio.org/lib/search?query=" + quote("keyword:%s" % name),
)
for name in result.get(key, [])
]
tabular_data = [(click.style(name, fg="cyan"),
"https://platformio.org/lib/search?query=" +
quote("keyword:%s" % name))
for name in result.get(key, [])]
table = tabulate(
tabular_data,
headers=[
click.style(
("RECENT" if key == "lastkeywords" else "POPULAR") + " KEYWORDS",
bold=True,
),
"URL",
],
)
("RECENT" if key == "lastkeywords" else "POPULAR") +
" KEYWORDS",
bold=True), "URL"
])
click.echo(table)
click.echo()
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth", "Month")):
tabular_data = [
(
click.style(item["name"], fg="cyan"),
"https://platformio.org/lib/show/%s/%s"
% (item["id"], quote(item["name"])),
)
for item in result.get(key, [])
]
table = tabulate(
tabular_data,
headers=[click.style("FEATURED: " + title.upper(), bold=True), "URL"],
)
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
"Month")):
tabular_data = [(click.style(item['name'], fg="cyan"),
"https://platformio.org/lib/show/%s/%s" %
(item['id'], quote(item['name'])))
for item in result.get(key, [])]
table = tabulate(tabular_data,
headers=[
click.style("FEATURED: " + title.upper(),
bold=True), "URL"
])
click.echo(table)
click.echo()
@@ -583,16 +538,15 @@ def print_storage_header(storage_dirs, storage_dir):
if storage_dirs and storage_dirs[0] != storage_dir:
click.echo("")
click.echo(
click.style("Library Storage: ", bold=True)
+ click.style(storage_dir, fg="blue")
)
click.style("Library Storage: ", bold=True) +
click.style(storage_dir, fg="blue"))
def print_lib_item(item):
click.secho(item["name"], fg="cyan")
click.echo("=" * len(item["name"]))
click.secho(item['name'], fg="cyan")
click.echo("=" * len(item['name']))
if "id" in item:
click.secho("#ID: %d" % item["id"], bold=True)
click.secho("#ID: %d" % item['id'], bold=True)
if "description" in item or "url" in item:
click.echo(item.get("description", item.get("url", "")))
click.echo()
@@ -608,26 +562,14 @@ def print_lib_item(item):
for key in ("frameworks", "platforms"):
if key not in item:
continue
click.echo(
"Compatible %s: %s"
% (
key,
", ".join(
[i["title"] if isinstance(i, dict) else i for i in item[key]]
),
)
)
click.echo("Compatible %s: %s" % (key, ", ".join(
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
if "authors" in item or "authornames" in item:
click.echo(
"Authors: %s"
% ", ".join(
item.get(
"authornames", [a.get("name", "") for a in item.get("authors", [])]
)
)
)
click.echo("Authors: %s" % ", ".join(
item.get("authornames",
[a.get("name", "") for a in item.get("authors", [])])))
if "__src_url" in item:
click.secho("Source: %s" % item["__src_url"])
click.secho("Source: %s" % item['__src_url'])
click.echo()

View File

@@ -20,7 +20,6 @@ from platformio import app, exception, util
from platformio.commands.boards import print_boards
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformFactory, PlatformManager
from platformio.package.pack import PackagePacker
@click.group(short_help="Platform Manager")
@@ -30,27 +29,24 @@ def cli():
def _print_platforms(platforms):
for platform in platforms:
click.echo(
"{name} ~ {title}".format(
name=click.style(platform["name"], fg="cyan"), title=platform["title"]
)
)
click.echo("=" * (3 + len(platform["name"] + platform["title"])))
click.echo(platform["description"])
click.echo("{name} ~ {title}".format(name=click.style(platform['name'],
fg="cyan"),
title=platform['title']))
click.echo("=" * (3 + len(platform['name'] + platform['title'])))
click.echo(platform['description'])
click.echo()
if "homepage" in platform:
click.echo("Home: %s" % platform["homepage"])
if "frameworks" in platform and platform["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(platform["frameworks"]))
click.echo("Home: %s" % platform['homepage'])
if "frameworks" in platform and platform['frameworks']:
click.echo("Frameworks: %s" % ", ".join(platform['frameworks']))
if "packages" in platform:
click.echo("Packages: %s" % ", ".join(platform["packages"]))
click.echo("Packages: %s" % ", ".join(platform['packages']))
if "version" in platform:
if "__src_url" in platform:
click.echo(
"Version: #%s (%s)" % (platform["version"], platform["__src_url"])
)
click.echo("Version: #%s (%s)" %
(platform['version'], platform['__src_url']))
else:
click.echo("Version: " + platform["version"])
click.echo("Version: " + platform['version'])
click.echo()
@@ -58,7 +54,7 @@ def _get_registry_platforms():
platforms = util.get_api_result("/platforms", cache_valid="7d")
pm = PlatformManager()
for platform in platforms or []:
platform["versions"] = pm.get_all_repo_versions(platform["name"])
platform['versions'] = pm.get_all_repo_versions(platform['name'])
return platforms
@@ -69,22 +65,22 @@ def _get_platform_data(*args, **kwargs):
return _get_registry_platform_data(*args, **kwargs)
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
def _get_installed_platform_data(platform,
with_boards=True,
expose_packages=True):
p = PlatformFactory.newPlatform(platform)
data = dict(
name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
packages=list(p.packages) if p.packages else [],
)
data = dict(name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
packages=list(p.packages) if p.packages else [])
# if dump to API
# del data['version']
@@ -98,20 +94,18 @@ def _get_installed_platform_data(platform, with_boards=True, expose_packages=Tru
data[key] = manifest[key]
if with_boards:
data["boards"] = [c.get_brief_data() for c in p.get_boards().values()]
data['boards'] = [c.get_brief_data() for c in p.get_boards().values()]
if not data["packages"] or not expose_packages:
if not data['packages'] or not expose_packages:
return data
data["packages"] = []
data['packages'] = []
installed_pkgs = p.get_installed_packages()
for name, opts in p.packages.items():
item = dict(
name=name,
type=p.get_package_type(name),
requirements=opts.get("version"),
optional=opts.get("optional") is True,
)
item = dict(name=name,
type=p.get_package_type(name),
requirements=opts.get("version"),
optional=opts.get("optional") is True)
if name in installed_pkgs:
for key, value in installed_pkgs[name].items():
if key not in ("url", "version", "description"):
@@ -119,42 +113,40 @@ def _get_installed_platform_data(platform, with_boards=True, expose_packages=Tru
item[key] = value
if key == "version":
item["originalVersion"] = util.get_original_version(value)
data["packages"].append(item)
data['packages'].append(item)
return data
def _get_registry_platform_data( # pylint: disable=unused-argument
platform, with_boards=True, expose_packages=True
):
platform,
with_boards=True,
expose_packages=True):
_data = None
for p in _get_registry_platforms():
if p["name"] == platform:
if p['name'] == platform:
_data = p
break
if not _data:
return None
data = dict(
name=_data["name"],
title=_data["title"],
description=_data["description"],
homepage=_data["homepage"],
repository=_data["repository"],
url=_data["url"],
license=_data["license"],
forDesktop=_data["forDesktop"],
frameworks=_data["frameworks"],
packages=_data["packages"],
versions=_data["versions"],
)
data = dict(name=_data['name'],
title=_data['title'],
description=_data['description'],
homepage=_data['homepage'],
repository=_data['repository'],
url=_data['url'],
license=_data['license'],
forDesktop=_data['forDesktop'],
frameworks=_data['frameworks'],
packages=_data['packages'],
versions=_data['versions'])
if with_boards:
data["boards"] = [
board
for board in PlatformManager().get_registered_boards()
if board["platform"] == _data["name"]
data['boards'] = [
board for board in PlatformManager().get_registered_boards()
if board['platform'] == _data['name']
]
return data
@@ -172,10 +164,9 @@ def platform_search(query, json_output):
if query and query.lower() not in search_data.lower():
continue
platforms.append(
_get_registry_platform_data(
platform["name"], with_boards=False, expose_packages=False
)
)
_get_registry_platform_data(platform['name'],
with_boards=False,
expose_packages=False))
if json_output:
click.echo(dump_json_to_unicode(platforms))
@@ -194,15 +185,15 @@ def platform_frameworks(query, json_output):
search_data = dump_json_to_unicode(framework)
if query and query.lower() not in search_data.lower():
continue
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
framework["platforms"] = [
platform["name"]
for platform in _get_registry_platforms()
if framework["name"] in platform["frameworks"]
framework['homepage'] = ("https://platformio.org/frameworks/" +
framework['name'])
framework['platforms'] = [
platform['name'] for platform in _get_registry_platforms()
if framework['name'] in platform['frameworks']
]
frameworks.append(framework)
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
frameworks = sorted(frameworks, key=lambda manifest: manifest['name'])
if json_output:
click.echo(dump_json_to_unicode(frameworks))
else:
@@ -216,12 +207,11 @@ def platform_list(json_output):
pm = PlatformManager()
for manifest in pm.get_installed():
platforms.append(
_get_installed_platform_data(
manifest["__pkg_dir"], with_boards=False, expose_packages=False
)
)
_get_installed_platform_data(manifest['__pkg_dir'],
with_boards=False,
expose_packages=False))
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
platforms = sorted(platforms, key=lambda manifest: manifest['name'])
if json_output:
click.echo(dump_json_to_unicode(platforms))
else:
@@ -238,58 +228,55 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
if json_output:
return click.echo(dump_json_to_unicode(data))
click.echo(
"{name} ~ {title}".format(
name=click.style(data["name"], fg="cyan"), title=data["title"]
)
)
click.echo("=" * (3 + len(data["name"] + data["title"])))
click.echo(data["description"])
click.echo("{name} ~ {title}".format(name=click.style(data['name'],
fg="cyan"),
title=data['title']))
click.echo("=" * (3 + len(data['name'] + data['title'])))
click.echo(data['description'])
click.echo()
if "version" in data:
click.echo("Version: %s" % data["version"])
if data["homepage"]:
click.echo("Home: %s" % data["homepage"])
if data["repository"]:
click.echo("Repository: %s" % data["repository"])
if data["url"]:
click.echo("Vendor: %s" % data["url"])
if data["license"]:
click.echo("License: %s" % data["license"])
if data["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
click.echo("Version: %s" % data['version'])
if data['homepage']:
click.echo("Home: %s" % data['homepage'])
if data['repository']:
click.echo("Repository: %s" % data['repository'])
if data['url']:
click.echo("Vendor: %s" % data['url'])
if data['license']:
click.echo("License: %s" % data['license'])
if data['frameworks']:
click.echo("Frameworks: %s" % ", ".join(data['frameworks']))
if not data["packages"]:
if not data['packages']:
return None
if not isinstance(data["packages"][0], dict):
click.echo("Packages: %s" % ", ".join(data["packages"]))
if not isinstance(data['packages'][0], dict):
click.echo("Packages: %s" % ", ".join(data['packages']))
else:
click.echo()
click.secho("Packages", bold=True)
click.echo("--------")
for item in data["packages"]:
for item in data['packages']:
click.echo()
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
click.echo("-" * (8 + len(item["name"])))
if item["type"]:
click.echo("Type: %s" % item["type"])
click.echo("Requirements: %s" % item["requirements"])
click.echo(
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
)
click.echo("Package %s" % click.style(item['name'], fg="yellow"))
click.echo("-" * (8 + len(item['name'])))
if item['type']:
click.echo("Type: %s" % item['type'])
click.echo("Requirements: %s" % item['requirements'])
click.echo("Installed: %s" %
("Yes" if item.get("version") else "No (optional)"))
if "version" in item:
click.echo("Version: %s" % item["version"])
click.echo("Version: %s" % item['version'])
if "originalVersion" in item:
click.echo("Original version: %s" % item["originalVersion"])
click.echo("Original version: %s" % item['originalVersion'])
if "description" in item:
click.echo("Description: %s" % item["description"])
click.echo("Description: %s" % item['description'])
if data["boards"]:
if data['boards']:
click.echo()
click.secho("Boards", bold=True)
click.echo("------")
print_boards(data["boards"])
print_boards(data['boards'])
return True
@@ -299,37 +286,24 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
@click.option("--with-package", multiple=True)
@click.option("--without-package", multiple=True)
@click.option("--skip-default-package", is_flag=True)
@click.option("--with-all-packages", is_flag=True)
@click.option(
"-f",
"--force",
is_flag=True,
help="Reinstall/redownload dev/platform and its packages if exist",
)
def platform_install( # pylint: disable=too-many-arguments
platforms,
with_package,
without_package,
skip_default_package,
with_all_packages,
force,
):
help="Reinstall/redownload dev/platform and its packages if exist")
def platform_install(platforms, with_package, without_package,
skip_default_package, force):
pm = PlatformManager()
for platform in platforms:
if pm.install(
name=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
with_all_packages=with_all_packages,
force=force,
):
click.secho(
"The platform '%s' has been successfully installed!\n"
"The rest of packages will be installed automatically "
"depending on your build environment." % platform,
fg="green",
)
if pm.install(name=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
force=force):
click.secho("The platform '%s' has been successfully installed!\n"
"The rest of packages will be installed automatically "
"depending on your build environment." % platform,
fg="green")
@cli.command("uninstall", short_help="Uninstall development platform")
@@ -338,39 +312,35 @@ def platform_uninstall(platforms):
pm = PlatformManager()
for platform in platforms:
if pm.uninstall(platform):
click.secho(
"The platform '%s' has been successfully uninstalled!" % platform,
fg="green",
)
click.secho("The platform '%s' has been successfully "
"uninstalled!" % platform,
fg="green")
@cli.command("update", short_help="Update installed development platforms")
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
@click.option(
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
)
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-p",
"--only-packages",
is_flag=True,
help="Update only the platform packages")
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
@click.option("--json-output", is_flag=True)
def platform_update( # pylint: disable=too-many-locals
platforms, only_packages, only_check, dry_run, json_output
):
platforms, only_packages, only_check, dry_run, json_output):
pm = PlatformManager()
pkg_dir_to_name = {}
if not platforms:
platforms = []
for manifest in pm.get_installed():
platforms.append(manifest["__pkg_dir"])
pkg_dir_to_name[manifest["__pkg_dir"]] = manifest.get(
"title", manifest["name"]
)
platforms.append(manifest['__pkg_dir'])
pkg_dir_to_name[manifest['__pkg_dir']] = manifest.get(
"title", manifest['name'])
only_check = dry_run or only_check
@@ -386,16 +356,14 @@ def platform_update( # pylint: disable=too-many-locals
if not pkg_dir:
continue
latest = pm.outdated(pkg_dir, requirements)
if (
not latest
and not PlatformFactory.newPlatform(pkg_dir).are_outdated_packages()
):
if (not latest and not PlatformFactory.newPlatform(
pkg_dir).are_outdated_packages()):
continue
data = _get_installed_platform_data(
pkg_dir, with_boards=False, expose_packages=False
)
data = _get_installed_platform_data(pkg_dir,
with_boards=False,
expose_packages=False)
if latest:
data["versionLatest"] = latest
data['versionLatest'] = latest
result.append(data)
return click.echo(dump_json_to_unicode(result))
@@ -403,21 +371,10 @@ def platform_update( # pylint: disable=too-many-locals
app.clean_cache()
for platform in platforms:
click.echo(
"Platform %s"
% click.style(pkg_dir_to_name.get(platform, platform), fg="cyan")
)
"Platform %s" %
click.style(pkg_dir_to_name.get(platform, platform), fg="cyan"))
click.echo("--------")
pm.update(platform, only_packages=only_packages, only_check=only_check)
click.echo()
return True
@cli.command(
"pack", short_help="Create a tarball from development platform/tool package"
)
@click.argument("package", required=True, metavar="[source directory, tar.gz or zip]")
def platform_pack(package):
p = PackagePacker(package)
tarball_path = p.pack()
click.secho('Wrote a tarball to "%s"' % tarball_path, fg="green")

View File

@@ -0,0 +1,205 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import threading
from os import getcwd
from os.path import isfile, join
from tempfile import mkdtemp
from time import sleep
import click
from platformio import exception, fs
from platformio.commands.device import device_monitor as cmd_device_monitor
from platformio.compat import get_file_contents
from platformio.managers.core import pioplus_call
# pylint: disable=unused-argument
@click.group("remote", short_help="PIO Remote")
@click.option("-a", "--agent", multiple=True)
def cli(**kwargs):
pass
@cli.group("agent", short_help="Start new agent or list active")
def remote_agent():
pass
@remote_agent.command("start", short_help="Start agent")
@click.option("-n", "--name")
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
@click.option("-d",
"--working-dir",
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
def remote_agent_start(**kwargs):
pioplus_call(sys.argv[1:])
@remote_agent.command("reload", short_help="Reload agents")
def remote_agent_reload():
pioplus_call(sys.argv[1:])
@remote_agent.command("list", short_help="List active agents")
def remote_agent_list():
pioplus_call(sys.argv[1:])
@cli.command("update",
short_help="Update installed Platforms, Packages and Libraries")
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
def remote_update(only_check, dry_run):
pioplus_call(sys.argv[1:])
@cli.command("run", short_help="Process project environments remotely")
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=True,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("--disable-auto-clean", is_flag=True)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
def remote_run(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("test", short_help="Remote Unit Testing")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
@click.option("--upload-port")
@click.option("--test-port")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-r", "--force-remote", is_flag=True)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--verbose", "-v", is_flag=True)
def remote_test(**kwargs):
pioplus_call(sys.argv[1:])
@cli.group("device", short_help="Monitor remote device or list existing")
def remote_device():
pass
@remote_device.command("list", short_help="List remote devices")
@click.option("--json-output", is_flag=True)
def device_list(json_output):
pioplus_call(sys.argv[1:])
@remote_device.command("monitor", short_help="Monitor remote device")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud",
"-b",
type=int,
default=9600,
help="Set baud rate, default=9600")
@click.option("--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N")
@click.option("--rtscts",
is_flag=True,
help="Enable RTS/CTS flow control, default=Off")
@click.option("--xonxoff",
is_flag=True,
help="Enable software flow control, default=Off")
@click.option("--rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state")
@click.option("--dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state")
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option("--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8")
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option("--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF")
@click.option("--raw",
is_flag=True,
help="Do not apply any encodings/transformations")
@click.option("--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)")
@click.option("--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)")
@click.option("--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off")
@click.pass_context
def device_monitor(ctx, **kwargs):
def _tx_target(sock_dir):
try:
pioplus_call(sys.argv[1:] + ["--sock", sock_dir])
except exception.ReturnErrorCode:
pass
sock_dir = mkdtemp(suffix="pioplus")
sock_file = join(sock_dir, "sock")
try:
t = threading.Thread(target=_tx_target, args=(sock_dir, ))
t.start()
while t.is_alive() and not isfile(sock_file):
sleep(0.1)
if not t.is_alive():
return
kwargs['port'] = get_file_contents(sock_file)
ctx.invoke(cmd_device_monitor, **kwargs)
t.join(2)
finally:
fs.rmtree(sock_dir)

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,91 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
class AsyncCommandBase(object):
MAX_BUFFER_SIZE = 1024 * 1024 # 1Mb
def __init__(self, options=None, on_end_callback=None):
self.options = options or {}
self.on_end_callback = on_end_callback
self._buffer = b""
self._return_code = None
self._d = None
self._paused = False
try:
self.start()
except Exception as e:
raise pb.Error(str(e))
@property
def id(self):
return id(self)
def pause(self):
self._paused = True
self.stop()
def unpause(self):
self._paused = False
self.start()
def start(self):
raise NotImplementedError
def stop(self):
self.transport.loseConnection() # pylint: disable=no-member
def _ac_ended(self):
if self.on_end_callback:
self.on_end_callback()
if not self._d or self._d.called:
self._d = None
return
if self._buffer:
self._d.callback(self._buffer)
else:
self._d.callback(None)
def _ac_ondata(self, data):
self._buffer += data
if len(self._buffer) > self.MAX_BUFFER_SIZE:
self._buffer = self._buffer[-1 * self.MAX_BUFFER_SIZE :]
if self._paused:
return
if self._d and not self._d.called:
self._d.callback(self._buffer)
self._buffer = b""
def ac_read(self):
if self._buffer:
result = self._buffer
self._buffer = b""
return result
if self._return_code is None:
self._d = defer.Deferred()
return self._d
return None
def ac_write(self, data):
self.transport.write(data) # pylint: disable=no-member
return len(data)
def ac_close(self):
self.stop()
return self._return_code

View File

@@ -1,42 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from twisted.internet import protocol, reactor # pylint: disable=import-error
from platformio.commands.remote.ac.base import AsyncCommandBase
class ProcessAsyncCmd(protocol.ProcessProtocol, AsyncCommandBase):
def start(self):
env = dict(os.environ).copy()
env.update({"PLATFORMIO_FORCE_ANSI": "true"})
reactor.spawnProcess(
self, self.options["executable"], self.options["args"], env
)
def outReceived(self, data):
self._ac_ondata(data)
def errReceived(self, data):
self._ac_ondata(data)
def processExited(self, reason):
self._return_code = reason.value.exitCode
def processEnded(self, reason):
if self._return_code is None:
self._return_code = reason.value.exitCode
self._ac_ended()

View File

@@ -1,66 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import zlib
from io import BytesIO
from platformio.commands.remote.ac.base import AsyncCommandBase
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
class ProjectSyncAsyncCmd(AsyncCommandBase):
def __init__(self, *args, **kwargs):
self.psync = None
self._upstream = None
super(ProjectSyncAsyncCmd, self).__init__(*args, **kwargs)
def start(self):
project_dir = os.path.join(
self.options["agent_working_dir"], "projects", self.options["id"]
)
self.psync = ProjectSync(project_dir)
for name in self.options["items"]:
self.psync.add_item(os.path.join(project_dir, name), name)
def stop(self):
self.psync = None
self._upstream = None
self._return_code = PROJECT_SYNC_STAGE.COMPLETED.value
def ac_write(self, data):
stage = PROJECT_SYNC_STAGE.lookupByValue(data.get("stage"))
if stage is PROJECT_SYNC_STAGE.DBINDEX:
self.psync.rebuild_dbindex()
return zlib.compress(json.dumps(self.psync.get_dbindex()).encode())
if stage is PROJECT_SYNC_STAGE.DELETE:
return self.psync.delete_dbindex(
json.loads(zlib.decompress(data["dbindex"]))
)
if stage is PROJECT_SYNC_STAGE.UPLOAD:
if not self._upstream:
self._upstream = BytesIO()
self._upstream.write(data["chunk"])
if self._upstream.tell() == data["total"]:
self.psync.decompress_items(self._upstream)
self._upstream = None
return PROJECT_SYNC_STAGE.EXTRACTED.value
return PROJECT_SYNC_STAGE.UPLOAD.value
return None

View File

@@ -1,60 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import sleep
from twisted.internet import protocol, reactor # pylint: disable=import-error
from twisted.internet.serialport import SerialPort # pylint: disable=import-error
from platformio.commands.remote.ac.base import AsyncCommandBase
class SerialPortAsyncCmd(protocol.Protocol, AsyncCommandBase):
def start(self):
SerialPort(
self,
reactor=reactor,
**{
"deviceNameOrPortNumber": self.options["port"],
"baudrate": self.options["baud"],
"parity": self.options["parity"],
"rtscts": 1 if self.options["rtscts"] else 0,
"xonxoff": 1 if self.options["xonxoff"] else 0,
}
)
def connectionMade(self):
self.reset_device()
if self.options.get("rts", None) is not None:
self.transport.setRTS(self.options.get("rts"))
if self.options.get("dtr", None) is not None:
self.transport.setDTR(self.options.get("dtr"))
def reset_device(self):
self.transport.flushInput()
self.transport.setDTR(False)
self.transport.setRTS(False)
sleep(0.1)
self.transport.setDTR(True)
self.transport.setRTS(True)
sleep(0.1)
def dataReceived(self, data):
self._ac_ondata(data)
def connectionLost(self, reason): # pylint: disable=unused-argument
if self._paused:
return
self._return_code = 0
self._ac_ended()

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,38 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import click
from platformio.commands.remote.client.base import RemoteClientBase
class AgentListClient(RemoteClientBase):
def agent_pool_ready(self):
d = self.agentpool.callRemote("list", True)
d.addCallback(self._cbResult)
d.addErrback(self.cb_global_error)
def _cbResult(self, result):
for item in result:
click.secho(item["name"], fg="cyan")
click.echo("-" * len(item["name"]))
click.echo("ID: %s" % item["id"])
click.echo(
"Started: %s"
% datetime.fromtimestamp(item["started"]).strftime("%Y-%m-%d %H:%M:%S")
)
click.echo("")
self.disconnect()

View File

@@ -1,222 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from os.path import getatime, getmtime, isdir, isfile, join
from twisted.logger import LogLevel # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio import proc, util
from platformio.commands.remote.ac.process import ProcessAsyncCmd
from platformio.commands.remote.ac.psync import ProjectSyncAsyncCmd
from platformio.commands.remote.ac.serial import SerialPortAsyncCmd
from platformio.commands.remote.client.base import RemoteClientBase
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
from platformio.project.helpers import get_project_core_dir
class RemoteAgentService(RemoteClientBase):
def __init__(self, name, share, working_dir=None):
RemoteClientBase.__init__(self)
self.log_level = LogLevel.info
self.working_dir = working_dir or join(get_project_core_dir(), "remote")
if not isdir(self.working_dir):
os.makedirs(self.working_dir)
if name:
self.name = str(name)[:50]
self.join_options.update(
{"agent": True, "share": [s.lower().strip()[:50] for s in share]}
)
self._acs = {}
def agent_pool_ready(self):
pass
def cb_disconnected(self, reason):
for ac in self._acs.values():
ac.ac_close()
RemoteClientBase.cb_disconnected(self, reason)
def remote_acread(self, ac_id):
self.log.debug("Async Read: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return self._acs[ac_id].ac_read()
def remote_acwrite(self, ac_id, data):
self.log.debug("Async Write: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return self._acs[ac_id].ac_write(data)
def remote_acclose(self, ac_id):
self.log.debug("Async Close: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return_code = self._acs[ac_id].ac_close()
del self._acs[ac_id]
return return_code
def remote_cmd(self, cmd, options):
self.log.info("Remote command received: {cmd}", cmd=cmd)
self.log.debug("Command options: {options!r}", options=options)
callback = "_process_cmd_%s" % cmd.replace(".", "_")
return getattr(self, callback)(options)
def _defer_async_cmd(self, ac, pass_agent_name=True):
self._acs[ac.id] = ac
if pass_agent_name:
return (self.id, ac.id, self.name)
return (self.id, ac.id)
def _process_cmd_device_list(self, _):
return (self.name, util.get_serialports())
def _process_cmd_device_monitor(self, options):
if not options["port"]:
for item in util.get_serialports():
if "VID:PID" in item["hwid"]:
options["port"] = item["port"]
break
# terminate opened monitors
if options["port"]:
for ac in list(self._acs.values()):
if (
isinstance(ac, SerialPortAsyncCmd)
and ac.options["port"] == options["port"]
):
self.log.info(
"Terminate previously opened monitor at {port}",
port=options["port"],
)
ac.ac_close()
del self._acs[ac.id]
if not options["port"]:
raise pb.Error("Please specify serial port using `--port` option")
self.log.info("Starting serial monitor at {port}", port=options["port"])
return self._defer_async_cmd(SerialPortAsyncCmd(options), pass_agent_name=False)
def _process_cmd_psync(self, options):
for ac in list(self._acs.values()):
if (
isinstance(ac, ProjectSyncAsyncCmd)
and ac.options["id"] == options["id"]
):
self.log.info("Terminate previous Project Sync process")
ac.ac_close()
del self._acs[ac.id]
options["agent_working_dir"] = self.working_dir
return self._defer_async_cmd(
ProjectSyncAsyncCmd(options), pass_agent_name=False
)
def _process_cmd_run(self, options):
return self._process_cmd_run_or_test("run", options)
def _process_cmd_test(self, options):
return self._process_cmd_run_or_test("test", options)
def _process_cmd_run_or_test( # pylint: disable=too-many-locals,too-many-branches
self, command, options
):
assert options and "project_id" in options
project_dir = join(self.working_dir, "projects", options["project_id"])
origin_pio_ini = join(project_dir, "platformio.ini")
back_pio_ini = join(project_dir, "platformio.ini.bak")
# remove insecure project options
try:
conf = ProjectConfig(origin_pio_ini)
if isfile(back_pio_ini):
os.remove(back_pio_ini)
os.rename(origin_pio_ini, back_pio_ini)
# cleanup
if conf.has_section("platformio"):
for opt in conf.options("platformio"):
if opt.endswith("_dir"):
conf.remove_option("platformio", opt)
else:
conf.add_section("platformio")
conf.set("platformio", "build_dir", ".pio/build")
conf.save(origin_pio_ini)
# restore A/M times
os.utime(origin_pio_ini, (getatime(back_pio_ini), getmtime(back_pio_ini)))
except NotPlatformIOProjectError as e:
raise pb.Error(str(e))
cmd_args = ["platformio", "--force", command, "-d", project_dir]
for env in options.get("environment", []):
cmd_args.extend(["-e", env])
for target in options.get("target", []):
cmd_args.extend(["-t", target])
for ignore in options.get("ignore", []):
cmd_args.extend(["-i", ignore])
if options.get("upload_port", False):
cmd_args.extend(["--upload-port", options.get("upload_port")])
if options.get("test_port", False):
cmd_args.extend(["--test-port", options.get("test_port")])
if options.get("disable_auto_clean", False):
cmd_args.append("--disable-auto-clean")
if options.get("without_building", False):
cmd_args.append("--without-building")
if options.get("without_uploading", False):
cmd_args.append("--without-uploading")
if options.get("silent", False):
cmd_args.append("-s")
if options.get("verbose", False):
cmd_args.append("-v")
paused_acs = []
for ac in self._acs.values():
if not isinstance(ac, SerialPortAsyncCmd):
continue
self.log.info("Pause active monitor at {port}", port=ac.options["port"])
ac.pause()
paused_acs.append(ac)
def _cb_on_end():
if isfile(back_pio_ini):
if isfile(origin_pio_ini):
os.remove(origin_pio_ini)
os.rename(back_pio_ini, origin_pio_ini)
for ac in paused_acs:
ac.unpause()
self.log.info(
"Unpause active monitor at {port}", port=ac.options["port"]
)
return self._defer_async_cmd(
ProcessAsyncCmd(
{"executable": proc.where_is_program("platformio"), "args": cmd_args},
on_end_callback=_cb_on_end,
)
)
def _process_cmd_update(self, options):
cmd_args = ["platformio", "--force", "update"]
if options.get("only_check"):
cmd_args.append("--only-check")
return self._defer_async_cmd(
ProcessAsyncCmd(
{"executable": proc.where_is_program("platformio"), "args": cmd_args}
)
)

View File

@@ -1,65 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from twisted.spread import pb # pylint: disable=import-error
from platformio.commands.remote.client.base import RemoteClientBase
class AsyncClientBase(RemoteClientBase):
def __init__(self, command, agents, options):
RemoteClientBase.__init__(self)
self.command = command
self.agents = agents
self.options = options
self._acs_total = 0
self._acs_ended = 0
def agent_pool_ready(self):
pass
def cb_async_result(self, result):
if self._acs_total == 0:
self._acs_total = len(result)
for (success, value) in result:
if not success:
raise pb.Error(value)
self.acread_data(*value)
def acread_data(self, agent_id, ac_id, agent_name=None):
d = self.agentpool.callRemote("acread", agent_id, ac_id)
d.addCallback(self.cb_acread_result, agent_id, ac_id, agent_name)
d.addErrback(self.cb_global_error)
def cb_acread_result(self, result, agent_id, ac_id, agent_name):
if result is None:
self.acclose(agent_id, ac_id)
else:
if self._acs_total > 1 and agent_name:
click.echo("[%s] " % agent_name, nl=False)
click.echo(result, nl=False)
self.acread_data(agent_id, ac_id, agent_name)
def acclose(self, agent_id, ac_id):
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
d.addCallback(self.cb_acclose_result)
d.addErrback(self.cb_global_error)
def cb_acclose_result(self, exit_code):
self._acs_ended += 1
if self._acs_ended != self._acs_total:
return
self.disconnect(exit_code)

View File

@@ -1,193 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from time import time
import click
from twisted.internet import defer, endpoints, reactor # pylint: disable=import-error
from twisted.logger import ILogObserver # pylint: disable=import-error
from twisted.logger import Logger # pylint: disable=import-error
from twisted.logger import LogLevel # pylint: disable=import-error
from twisted.logger import formatEvent # pylint: disable=import-error
from twisted.python import failure # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from zope.interface import provider # pylint: disable=import-error
from platformio import __pioremote_endpoint__, __version__, app, exception, maintenance
from platformio.commands.remote.factory.client import RemoteClientFactory
from platformio.commands.remote.factory.ssl import SSLContextFactory
class RemoteClientBase( # pylint: disable=too-many-instance-attributes
pb.Referenceable
):
PING_DELAY = 60
PING_MAX_FAILURES = 3
DEBUG = False
def __init__(self):
self.log_level = LogLevel.warn
self.log = Logger(namespace="remote", observer=self._log_observer)
self.id = app.get_host_id()
self.name = app.get_host_name()
self.join_options = {"corever": __version__}
self.perspective = None
self.agentpool = None
self._ping_id = 0
self._ping_caller = None
self._ping_counter = 0
self._reactor_stopped = False
self._exit_code = 0
@provider(ILogObserver)
def _log_observer(self, event):
if not self.DEBUG and (
event["log_namespace"] != self.log.namespace
or self.log_level > event["log_level"]
):
return
msg = formatEvent(event)
click.echo(
"%s [%s] %s"
% (
datetime.fromtimestamp(event["log_time"]).strftime("%Y-%m-%d %H:%M:%S"),
event["log_level"].name,
msg,
)
)
def connect(self):
self.log.info("Name: {name}", name=self.name)
self.log.info("Connecting to PIO Remote Cloud")
# pylint: disable=protected-access
proto, options = endpoints._parse(__pioremote_endpoint__)
proto = proto[0]
factory = RemoteClientFactory()
factory.remote_client = self
factory.sslContextFactory = None
if proto == "ssl":
factory.sslContextFactory = SSLContextFactory(options["host"])
reactor.connectSSL(
options["host"],
int(options["port"]),
factory,
factory.sslContextFactory,
)
elif proto == "tcp":
reactor.connectTCP(options["host"], int(options["port"]), factory)
else:
raise exception.PlatformioException("Unknown PIO Remote Cloud protocol")
reactor.run()
if self._exit_code != 0:
raise exception.ReturnErrorCode(self._exit_code)
def cb_client_authorization_failed(self, err):
msg = "Bad account credentials"
if err.check(pb.Error):
msg = err.getErrorMessage()
self.log.error(msg)
self.disconnect(exit_code=1)
def cb_client_authorization_made(self, perspective):
self.log.info("Successfully authorized")
self.perspective = perspective
d = perspective.callRemote("join", self.id, self.name, self.join_options)
d.addCallback(self._cb_client_join_made)
d.addErrback(self.cb_global_error)
def _cb_client_join_made(self, result):
code = result[0]
if code == 1:
self.agentpool = result[1]
self.agent_pool_ready()
self.restart_ping()
elif code == 2:
self.remote_service(*result[1:])
def remote_service(self, command, options):
if command == "disconnect":
self.log.error(
"PIO Remote Cloud disconnected: {msg}", msg=options.get("message")
)
self.disconnect()
def restart_ping(self, reset_counter=True):
# stop previous ping callers
self.stop_ping(reset_counter)
self._ping_caller = reactor.callLater(self.PING_DELAY, self._do_ping)
def _do_ping(self):
self._ping_counter += 1
self._ping_id = int(time())
d = self.perspective.callRemote("service", "ping", {"id": self._ping_id})
d.addCallback(self._cb_pong)
d.addErrback(self._cb_pong)
def stop_ping(self, reset_counter=True):
if reset_counter:
self._ping_counter = 0
if not self._ping_caller or not self._ping_caller.active():
return
self._ping_caller.cancel()
self._ping_caller = None
def _cb_pong(self, result):
if not isinstance(result, failure.Failure) and self._ping_id == result:
self.restart_ping()
return
if self._ping_counter >= self.PING_MAX_FAILURES:
self.stop_ping()
self.perspective.broker.transport.loseConnection()
else:
self.restart_ping(reset_counter=False)
def agent_pool_ready(self):
raise NotImplementedError
def disconnect(self, exit_code=None):
self.stop_ping()
if exit_code is not None:
self._exit_code = exit_code
if reactor.running and not self._reactor_stopped:
self._reactor_stopped = True
reactor.stop()
def cb_disconnected(self, _):
self.stop_ping()
self.perspective = None
self.agentpool = None
def cb_global_error(self, err):
if err.check(pb.PBConnectionLost, defer.CancelledError):
return
msg = err.getErrorMessage()
if err.check(pb.DeadReferenceError):
msg = "Remote Client has been terminated"
elif "PioAgentNotStartedError" in str(err.type):
msg = (
"Could not find active agents. Please start it before on "
"a remote machine using `pio remote agent start` command.\n"
"See http://docs.platformio.org/page/plus/pio-remote.html"
)
else:
maintenance.on_platformio_exception(Exception(err.type))
click.secho(msg, fg="red", err=True)
self.disconnect(exit_code=1)

View File

@@ -1,54 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import click
from platformio.commands.remote.client.base import RemoteClientBase
class DeviceListClient(RemoteClientBase):
def __init__(self, agents, json_output):
RemoteClientBase.__init__(self)
self.agents = agents
self.json_output = json_output
def agent_pool_ready(self):
d = self.agentpool.callRemote("cmd", self.agents, "device.list")
d.addCallback(self._cbResult)
d.addErrback(self.cb_global_error)
def _cbResult(self, result):
data = {}
for (success, value) in result:
if not success:
click.secho(value, fg="red", err=True)
continue
(agent_name, devlist) = value
data[agent_name] = devlist
if self.json_output:
click.echo(json.dumps(data))
else:
for agent_name, devlist in data.items():
click.echo("Agent %s" % click.style(agent_name, fg="cyan", bold=True))
click.echo("=" * (6 + len(agent_name)))
for item in devlist:
click.secho(item["port"], fg="cyan")
click.echo("-" * len(item["port"]))
click.echo("Hardware ID: %s" % item["hwid"])
click.echo("Description: %s" % item["description"])
click.echo("")
self.disconnect()

View File

@@ -1,236 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from fnmatch import fnmatch
import click
from twisted.internet import protocol, reactor, task # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio.commands.remote.client.base import RemoteClientBase
class SMBridgeProtocol(protocol.Protocol): # pylint: disable=no-init
def connectionMade(self):
self.factory.add_client(self)
def connectionLost(self, reason): # pylint: disable=unused-argument
self.factory.remove_client(self)
def dataReceived(self, data):
self.factory.send_to_server(data)
class SMBridgeFactory(protocol.ServerFactory):
def __init__(self, cdm):
self.cdm = cdm
self._clients = []
def buildProtocol(self, addr): # pylint: disable=unused-argument
p = SMBridgeProtocol()
p.factory = self # pylint: disable=attribute-defined-outside-init
return p
def add_client(self, client):
self.cdm.log.debug("SMBridge: Client connected")
self._clients.append(client)
self.cdm.acread_data()
def remove_client(self, client):
self.cdm.log.debug("SMBridge: Client disconnected")
self._clients.remove(client)
if not self._clients:
self.cdm.client_terminal_stopped()
def has_clients(self):
return len(self._clients)
def send_to_clients(self, data):
if not self._clients:
return None
for client in self._clients:
client.transport.write(data)
return len(data)
def send_to_server(self, data):
self.cdm.acwrite_data(data)
class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
RemoteClientBase
):
MAX_BUFFER_SIZE = 1024 * 1024
def __init__(self, agents, **kwargs):
RemoteClientBase.__init__(self)
self.agents = agents
self.cmd_options = kwargs
self._bridge_factory = SMBridgeFactory(self)
self._agent_id = None
self._ac_id = None
self._d_acread = None
self._d_acwrite = None
self._acwrite_buffer = ""
def agent_pool_ready(self):
d = task.deferLater(
reactor, 1, self.agentpool.callRemote, "cmd", self.agents, "device.list"
)
d.addCallback(self._cb_device_list)
d.addErrback(self.cb_global_error)
def _cb_device_list(self, result):
devices = []
hwid_devindexes = []
for (success, value) in result:
if not success:
click.secho(value, fg="red", err=True)
continue
(agent_name, ports) = value
for item in ports:
if "VID:PID" in item["hwid"]:
hwid_devindexes.append(len(devices))
devices.append((agent_name, item))
if len(result) == 1 and self.cmd_options["port"]:
if set(["*", "?", "[", "]"]) & set(self.cmd_options["port"]):
for agent, item in devices:
if fnmatch(item["port"], self.cmd_options["port"]):
return self.start_remote_monitor(agent, item["port"])
return self.start_remote_monitor(result[0][1][0], self.cmd_options["port"])
device = None
if len(hwid_devindexes) == 1:
device = devices[hwid_devindexes[0]]
else:
click.echo("Available ports:")
for i, device in enumerate(devices):
click.echo(
"{index}. {host}{port} \t{description}".format(
index=i + 1,
host=device[0] + ":" if len(result) > 1 else "",
port=device[1]["port"],
description=device[1]["description"]
if device[1]["description"] != "n/a"
else "",
)
)
device_index = click.prompt(
"Please choose a port (number in the list above)",
type=click.Choice([str(i + 1) for i, _ in enumerate(devices)]),
)
device = devices[int(device_index) - 1]
self.start_remote_monitor(device[0], device[1]["port"])
return None
def start_remote_monitor(self, agent, port):
options = {"port": port}
for key in ("baud", "parity", "rtscts", "xonxoff", "rts", "dtr"):
options[key] = self.cmd_options[key]
click.echo(
"Starting Serial Monitor on {host}:{port}".format(
host=agent, port=options["port"]
)
)
d = self.agentpool.callRemote("cmd", [agent], "device.monitor", options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)
def cb_async_result(self, result):
if len(result) != 1:
raise pb.Error("Invalid response from Remote Cloud")
success, value = result[0]
if not success:
raise pb.Error(value)
reconnected = self._agent_id is not None
self._agent_id, self._ac_id = value
if reconnected:
self.acread_data(force=True)
self.acwrite_data("", force=True)
return
# start bridge
port = reactor.listenTCP(0, self._bridge_factory)
address = port.getHost()
self.log.debug("Serial Bridge is started on {address!r}", address=address)
if "sock" in self.cmd_options:
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
fp.write("socket://localhost:%d" % address.port)
def client_terminal_stopped(self):
try:
d = self.agentpool.callRemote("acclose", self._agent_id, self._ac_id)
d.addCallback(lambda r: self.disconnect())
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def acread_data(self, force=False):
if force and self._d_acread:
self._d_acread.cancel()
self._d_acread = None
if (
self._d_acread and not self._d_acread.called
) or not self._bridge_factory.has_clients():
return
try:
self._d_acread = self.agentpool.callRemote(
"acread", self._agent_id, self._ac_id
)
self._d_acread.addCallback(self.cb_acread_result)
self._d_acread.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_acread_result(self, result):
if result is None:
self.disconnect(exit_code=1)
else:
self._bridge_factory.send_to_clients(result)
self.acread_data()
def acwrite_data(self, data, force=False):
if force and self._d_acwrite:
self._d_acwrite.cancel()
self._d_acwrite = None
self._acwrite_buffer += data
if len(self._acwrite_buffer) > self.MAX_BUFFER_SIZE:
self._acwrite_buffer = self._acwrite_buffer[-1 * self.MAX_BUFFER_SIZE :]
if (self._d_acwrite and not self._d_acwrite.called) or not self._acwrite_buffer:
return
data = self._acwrite_buffer
self._acwrite_buffer = ""
try:
d = self.agentpool.callRemote("acwrite", self._agent_id, self._ac_id, data)
d.addCallback(self.cb_acwrite_result)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_acwrite_result(self, result):
assert result > 0
if self._acwrite_buffer:
self.acwrite_data("")

View File

@@ -1,272 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import os
import zlib
from io import BytesIO
from twisted.spread import pb # pylint: disable=import-error
from platformio import util
from platformio.commands.remote.client.async_base import AsyncClientBase
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
from platformio.compat import hashlib_encode_data
from platformio.project.config import ProjectConfig
class RunOrTestClient(AsyncClientBase):
MAX_ARCHIVE_SIZE = 50 * 1024 * 1024 # 50Mb
UPLOAD_CHUNK_SIZE = 256 * 1024 # 256Kb
PSYNC_SRC_EXTS = [
"c",
"cpp",
"S",
"spp",
"SPP",
"sx",
"s",
"asm",
"ASM",
"h",
"hpp",
"ipp",
"ino",
"pde",
"json",
"properties",
]
PSYNC_SKIP_DIRS = (".git", ".svn", ".hg", "example", "examples", "test", "tests")
def __init__(self, *args, **kwargs):
AsyncClientBase.__init__(self, *args, **kwargs)
self.project_id = self.generate_project_id(self.options["project_dir"])
self.psync = ProjectSync(self.options["project_dir"])
def generate_project_id(self, path):
h = hashlib.sha1(hashlib_encode_data(self.id))
h.update(hashlib_encode_data(path))
return "%s-%s" % (os.path.basename(path), h.hexdigest())
def add_project_items(self, psync):
with util.cd(self.options["project_dir"]):
cfg = ProjectConfig.get_instance(
os.path.join(self.options["project_dir"], "platformio.ini")
)
psync.add_item(cfg.path, "platformio.ini")
psync.add_item(cfg.get_optional_dir("shared"), "shared")
psync.add_item(cfg.get_optional_dir("boards"), "boards")
if self.options["force_remote"]:
self._add_project_source_items(cfg, psync)
else:
self._add_project_binary_items(cfg, psync)
if self.command == "test":
psync.add_item(cfg.get_optional_dir("test"), "test")
def _add_project_source_items(self, cfg, psync):
psync.add_item(cfg.get_optional_dir("lib"), "lib")
psync.add_item(
cfg.get_optional_dir("include"),
"include",
cb_filter=self._cb_tarfile_filter,
)
psync.add_item(
cfg.get_optional_dir("src"), "src", cb_filter=self._cb_tarfile_filter
)
if set(["buildfs", "uploadfs", "uploadfsota"]) & set(
self.options.get("target", [])
):
psync.add_item(cfg.get_optional_dir("data"), "data")
@staticmethod
def _add_project_binary_items(cfg, psync):
build_dir = cfg.get_optional_dir("build")
for env_name in os.listdir(build_dir):
env_dir = os.path.join(build_dir, env_name)
if not os.path.isdir(env_dir):
continue
for fname in os.listdir(env_dir):
bin_file = os.path.join(env_dir, fname)
bin_exts = (".elf", ".bin", ".hex", ".eep", "program")
if os.path.isfile(bin_file) and fname.endswith(bin_exts):
psync.add_item(
bin_file, os.path.join(".pio", "build", env_name, fname)
)
def _cb_tarfile_filter(self, path):
if (
os.path.isdir(path)
and os.path.basename(path).lower() in self.PSYNC_SKIP_DIRS
):
return None
if os.path.isfile(path) and not self.is_file_with_exts(
path, self.PSYNC_SRC_EXTS
):
return None
return path
@staticmethod
def is_file_with_exts(path, exts):
if path.endswith(tuple(".%s" % e for e in exts)):
return True
return False
def agent_pool_ready(self):
self.psync_init()
def psync_init(self):
self.add_project_items(self.psync)
d = self.agentpool.callRemote(
"cmd",
self.agents,
"psync",
dict(id=self.project_id, items=[i[1] for i in self.psync.get_items()]),
)
d.addCallback(self.cb_psync_init_result)
d.addErrback(self.cb_global_error)
# build db index while wait for result from agent
self.psync.rebuild_dbindex()
def cb_psync_init_result(self, result):
self._acs_total = len(result)
for (success, value) in result:
if not success:
raise pb.Error(value)
agent_id, ac_id = value
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(stage=PROJECT_SYNC_STAGE.DBINDEX.value),
)
d.addCallback(self.cb_psync_dbindex_result, agent_id, ac_id)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_dbindex_result(self, result, agent_id, ac_id):
result = set(json.loads(zlib.decompress(result)))
dbindex = set(self.psync.get_dbindex())
delete = list(result - dbindex)
delta = list(dbindex - result)
self.log.debug(
"PSync: stats, total={total}, delete={delete}, delta={delta}",
total=len(dbindex),
delete=len(delete),
delta=len(delta),
)
if not delete and not delta:
return self.psync_finalize(agent_id, ac_id)
if not delete:
return self.psync_upload(agent_id, ac_id, delta)
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(
stage=PROJECT_SYNC_STAGE.DELETE.value,
dbindex=zlib.compress(json.dumps(delete).encode()),
),
)
d.addCallback(self.cb_psync_delete_result, agent_id, ac_id, delta)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
return None
def cb_psync_delete_result(self, result, agent_id, ac_id, dbindex):
assert result
self.psync_upload(agent_id, ac_id, dbindex)
def psync_upload(self, agent_id, ac_id, dbindex):
assert dbindex
fileobj = BytesIO()
compressed = self.psync.compress_items(fileobj, dbindex, self.MAX_ARCHIVE_SIZE)
fileobj.seek(0)
self.log.debug(
"PSync: upload project, size={size}", size=len(fileobj.getvalue())
)
self.psync_upload_chunk(
agent_id, ac_id, list(set(dbindex) - set(compressed)), fileobj
)
def psync_upload_chunk(self, agent_id, ac_id, dbindex, fileobj):
offset = fileobj.tell()
total = fileobj.seek(0, os.SEEK_END)
# unwind
fileobj.seek(offset)
chunk = fileobj.read(self.UPLOAD_CHUNK_SIZE)
assert chunk
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(
stage=PROJECT_SYNC_STAGE.UPLOAD.value,
chunk=chunk,
length=len(chunk),
total=total,
),
)
d.addCallback(
self.cb_psync_upload_chunk_result, agent_id, ac_id, dbindex, fileobj
)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments
self, result, agent_id, ac_id, dbindex, fileobj
):
result = PROJECT_SYNC_STAGE.lookupByValue(result)
self.log.debug("PSync: upload chunk result {r}", r=str(result))
assert result & (PROJECT_SYNC_STAGE.UPLOAD | PROJECT_SYNC_STAGE.EXTRACTED)
if result is PROJECT_SYNC_STAGE.EXTRACTED:
if dbindex:
self.psync_upload(agent_id, ac_id, dbindex)
else:
self.psync_finalize(agent_id, ac_id)
else:
self.psync_upload_chunk(agent_id, ac_id, dbindex, fileobj)
def psync_finalize(self, agent_id, ac_id):
try:
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
d.addCallback(self.cb_psync_completed_result, agent_id)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_completed_result(self, result, agent_id):
assert PROJECT_SYNC_STAGE.lookupByValue(result)
options = self.options.copy()
del options["project_dir"]
options["project_id"] = self.project_id
d = self.agentpool.callRemote("cmd", [agent_id], self.command, options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)

View File

@@ -1,22 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.remote.client.async_base import AsyncClientBase
class UpdateCoreClient(AsyncClientBase):
def agent_pool_ready(self):
d = self.agentpool.callRemote("cmd", self.agents, self.command, self.options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)

View File

@@ -1,367 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments, import-outside-toplevel
# pylint: disable=inconsistent-return-statements
import os
import subprocess
import threading
from tempfile import mkdtemp
from time import sleep
import click
from platformio import exception, fs, proc
from platformio.commands.device import helpers as device_helpers
from platformio.commands.device.command import device_monitor as cmd_device_monitor
from platformio.commands.run.command import cli as cmd_run
from platformio.commands.test.command import cli as cmd_test
from platformio.compat import PY2
from platformio.managers.core import inject_contrib_pysite
from platformio.project.exception import NotPlatformIOProjectError
@click.group("remote", short_help="PIO Remote")
@click.option("-a", "--agent", multiple=True)
@click.pass_context
def cli(ctx, agent):
if PY2:
raise exception.UserSideException(
"PIO Remote requires Python 3.5 or above. \nPlease install the latest "
"Python 3 and reinstall PlatformIO Core using installation script:\n"
"https://docs.platformio.org/page/core/installation.html"
)
ctx.obj = agent
inject_contrib_pysite()
@cli.group("agent", short_help="Start a new agent or list active")
def remote_agent():
pass
@remote_agent.command("start", short_help="Start agent")
@click.option("-n", "--name")
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
@click.option(
"-d",
"--working-dir",
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
def remote_agent_start(name, share, working_dir):
from platformio.commands.remote.client.agent_service import RemoteAgentService
RemoteAgentService(name, share, working_dir).connect()
@remote_agent.command("list", short_help="List active agents")
def remote_agent_list():
from platformio.commands.remote.client.agent_list import AgentListClient
AgentListClient().connect()
@cli.command("update", short_help="Update installed Platforms, Packages and Libraries")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.pass_obj
def remote_update(agents, only_check, dry_run):
from platformio.commands.remote.client.update_core import UpdateCoreClient
UpdateCoreClient("update", agents, dict(only_check=only_check or dry_run)).connect()
@cli.command("run", short_help="Process project environments remotely")
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("--disable-auto-clean", is_flag=True)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.pass_obj
@click.pass_context
def remote_run(
ctx,
agents,
environment,
target,
upload_port,
project_dir,
disable_auto_clean,
force_remote,
silent,
verbose,
):
from platformio.commands.remote.client.run_or_test import RunOrTestClient
cr = RunOrTestClient(
"run",
agents,
dict(
environment=environment,
target=target,
upload_port=upload_port,
project_dir=project_dir,
disable_auto_clean=disable_auto_clean,
force_remote=force_remote,
silent=silent,
verbose=verbose,
),
)
if force_remote:
return cr.connect()
click.secho("Building project locally", bold=True)
local_targets = []
if "clean" in target:
local_targets = ["clean"]
elif set(["buildfs", "uploadfs", "uploadfsota"]) & set(target):
local_targets = ["buildfs"]
else:
local_targets = ["checkprogsize", "buildprog"]
ctx.invoke(
cmd_run,
environment=environment,
target=local_targets,
project_dir=project_dir,
# disable_auto_clean=True,
silent=silent,
verbose=verbose,
)
if any(["upload" in t for t in target] + ["program" in target]):
click.secho("Uploading firmware remotely", bold=True)
cr.options["target"] += ("nobuild",)
cr.options["disable_auto_clean"] = True
cr.connect()
return True
@cli.command("test", short_help="Remote Unit Testing")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
@click.option("--upload-port")
@click.option("--test-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--verbose", "-v", is_flag=True)
@click.pass_obj
@click.pass_context
def remote_test(
ctx,
agents,
environment,
ignore,
upload_port,
test_port,
project_dir,
force_remote,
without_building,
without_uploading,
verbose,
):
from platformio.commands.remote.client.run_or_test import RunOrTestClient
cr = RunOrTestClient(
"test",
agents,
dict(
environment=environment,
ignore=ignore,
upload_port=upload_port,
test_port=test_port,
project_dir=project_dir,
force_remote=force_remote,
without_building=without_building,
without_uploading=without_uploading,
verbose=verbose,
),
)
if force_remote:
return cr.connect()
click.secho("Building project locally", bold=True)
ctx.invoke(
cmd_test,
environment=environment,
ignore=ignore,
project_dir=project_dir,
without_uploading=True,
without_testing=True,
verbose=verbose,
)
click.secho("Testing project remotely", bold=True)
cr.options["without_building"] = True
cr.connect()
return True
@cli.group("device", short_help="Monitor remote device or list existing")
def remote_device():
pass
@remote_device.command("list", short_help="List remote devices")
@click.option("--json-output", is_flag=True)
@click.pass_obj
def device_list(agents, json_output):
from platformio.commands.remote.client.device_list import DeviceListClient
DeviceListClient(agents, json_output).connect()
@remote_device.command("monitor", short_help="Monitor remote device")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment",
)
@click.option(
"--sock",
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.pass_obj
@click.pass_context
def device_monitor(ctx, agents, **kwargs):
from platformio.commands.remote.client.device_monitor import DeviceMonitorClient
if kwargs["sock"]:
return DeviceMonitorClient(agents, **kwargs).connect()
project_options = {}
try:
with fs.cd(kwargs["project_dir"]):
project_options = device_helpers.get_project_options(kwargs["environment"])
kwargs = device_helpers.apply_project_monitor_options(kwargs, project_options)
except NotPlatformIOProjectError:
pass
kwargs["baud"] = kwargs["baud"] or 9600
def _tx_target(sock_dir):
subcmd_argv = ["remote", "device", "monitor"]
subcmd_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
subcmd_argv.extend(["--sock", sock_dir])
subprocess.call([proc.where_is_program("platformio")] + subcmd_argv)
sock_dir = mkdtemp(suffix="pio")
sock_file = os.path.join(sock_dir, "sock")
try:
t = threading.Thread(target=_tx_target, args=(sock_dir,))
t.start()
while t.is_alive() and not os.path.isfile(sock_file):
sleep(0.1)
if not t.is_alive():
return
with open(sock_file) as fp:
kwargs["port"] = fp.read()
ctx.invoke(cmd_device_monitor, **kwargs)
t.join(2)
finally:
fs.rmtree(sock_dir)
return True

View File

@@ -1,13 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -1,73 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.cred import credentials # pylint: disable=import-error
from twisted.internet import protocol, reactor # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio.app import get_host_id
from platformio.commands.account.client import AccountClient
class RemoteClientFactory(pb.PBClientFactory, protocol.ReconnectingClientFactory):
def clientConnectionMade(self, broker):
if self.sslContextFactory and not self.sslContextFactory.certificate_verified:
self.remote_client.log.error(
"A remote cloud could not prove that its security certificate is "
"from {host}. This may cause a misconfiguration or an attacker "
"intercepting your connection.",
host=self.sslContextFactory.host,
)
return self.remote_client.disconnect()
pb.PBClientFactory.clientConnectionMade(self, broker)
protocol.ReconnectingClientFactory.resetDelay(self)
self.remote_client.log.info("Successfully connected")
self.remote_client.log.info("Authenticating")
d = self.login(
credentials.UsernamePassword(
AccountClient().fetch_authentication_token().encode(),
get_host_id().encode(),
),
client=self.remote_client,
)
d.addCallback(self.remote_client.cb_client_authorization_made)
d.addErrback(self.remote_client.cb_client_authorization_failed)
return d
def clientConnectionFailed(self, connector, reason):
self.remote_client.log.warn(
"Could not connect to PIO Remote Cloud. Reconnecting..."
)
self.remote_client.cb_disconnected(reason)
protocol.ReconnectingClientFactory.clientConnectionFailed(
self, connector, reason
)
def clientConnectionLost( # pylint: disable=arguments-differ
self, connector, unused_reason
):
if not reactor.running:
self.remote_client.log.info("Successfully disconnected")
return
self.remote_client.log.warn(
"Connection is lost to PIO Remote Cloud. Reconnecting"
)
pb.PBClientFactory.clientConnectionLost(
self, connector, unused_reason, reconnecting=1
)
self.remote_client.cb_disconnected(unused_reason)
protocol.ReconnectingClientFactory.clientConnectionLost(
self, connector, unused_reason
)

View File

@@ -1,41 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import certifi
from OpenSSL import SSL # pylint: disable=import-error
from twisted.internet import ssl # pylint: disable=import-error
class SSLContextFactory(ssl.ClientContextFactory):
def __init__(self, host):
self.host = host
self.certificate_verified = False
def getContext(self):
ctx = super(SSLContextFactory, self).getContext()
ctx.set_verify(
SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, self.verifyHostname
)
ctx.load_verify_locations(certifi.where())
return ctx
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments
self, connection, x509, errno, depth, status
):
cn = x509.get_subject().commonName
if cn.startswith("*"):
cn = cn[1:]
if self.host.endswith(cn):
self.certificate_verified = True
return status

Some files were not shown because too many files have changed in this diff Show More