mirror of
https://github.com/platformio/platformio-core.git
synced 2025-07-30 18:17:13 +02:00
Merge branch 'release/v4.3.2'
This commit is contained in:
@ -1,31 +0,0 @@
|
||||
build: off
|
||||
|
||||
platform:
|
||||
- x64
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- TOXENV: "py27"
|
||||
PLATFORMIO_BUILD_CACHE_DIR: C:\Temp\PIO_Build_Cache_P2_{build}
|
||||
PYTHON_DIRS: C:\Python27-x64;C:\Python27-x64\Scripts
|
||||
|
||||
- TOXENV: "py36"
|
||||
PLATFORMIO_BUILD_CACHE_DIR: C:\Temp\PIO_Build_Cache_P3_{build}
|
||||
PYTHON_DIRS: C:\Python36-x64;C:\Python36-x64\Scripts
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: SET PATH=%PYTHON_DIRS%;C:\MinGW\bin;%PATH%
|
||||
- cmd: SET PLATFORMIO_CORE_DIR=C:\.pio
|
||||
- cmd: pip install --force-reinstall tox
|
||||
|
||||
test_script:
|
||||
- cmd: tox
|
||||
|
||||
notifications:
|
||||
- provider: Slack
|
||||
incoming_webhook:
|
||||
secure: E9H0SU0Ju7WLDvgxsV8cs3J62T3nTTX7QkEjsczN0Sto/c9hWkVfhc5gGWUkxhlD975cokHByKGJIdwYwCewqOI+7BrcT8U+nlga4Uau7J8=
|
||||
on_build_success: false
|
||||
on_build_failure: true
|
||||
on_build_status_changed: true
|
42
.github/workflows/core.yml
vendored
Normal file
42
.github/workflows/core.yml
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
name: Core
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: [2.7, 3.7]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Python Lint
|
||||
run: |
|
||||
tox -e lint
|
||||
- name: Integration Tests
|
||||
env:
|
||||
PLATFORMIO_TEST_ACCOUNT_LOGIN: ${{ secrets.PLATFORMIO_TEST_ACCOUNT_LOGIN }}
|
||||
PLATFORMIO_TEST_ACCOUNT_PASSWORD: ${{ secrets.PLATFORMIO_TEST_ACCOUNT_PASSWORD }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Core*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
31
.github/workflows/docs.yml
vendored
Normal file
31
.github/workflows/docs.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
name: Docs
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Build docs
|
||||
run: |
|
||||
tox -e docs
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Docs*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
62
.github/workflows/examples.yml
vendored
Normal file
62
.github/workflows/examples.yml
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
name: Examples
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-16.04, windows-latest, macos-latest]
|
||||
python-version: [2.7, 3.7]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
git submodule update --init --recursive
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51,aceinna_imu"
|
||||
run: |
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
sudo apt-get install libc6-i386
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
# Run
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,gd32v,nuclei"
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on Windows
|
||||
if: startsWith(matrix.os, 'windows')
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Examples*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
@ -1,3 +1,3 @@
|
||||
[settings]
|
||||
line_length=88
|
||||
known_third_party=SCons, twisted, autobahn, jsonrpc
|
||||
known_third_party=OpenSSL, SCons, autobahn, jsonrpc, twisted, zope
|
||||
|
39
.travis.yml
39
.travis.yml
@ -1,39 +0,0 @@
|
||||
language: python
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=docs
|
||||
- os: linux
|
||||
sudo: required
|
||||
python: 2.7
|
||||
env: TOX_ENV=py27 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
|
||||
- os: linux
|
||||
sudo: required
|
||||
python: 3.6
|
||||
env: TOX_ENV=py36 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
|
||||
- os: osx
|
||||
language: generic
|
||||
env: TOX_ENV=skipexamples
|
||||
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
|
||||
- pip install -U tox
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
slack:
|
||||
rooms:
|
||||
secure: JD6VGfN4+SLU2CwDdiIOr1VgwD+zbYUCE/srwyGuHavnjIkPItkl6T6Bn8Y4VrU6ysbuKotfdV2TAJJ82ivFbY8BvZBc7FBcYp/AGQ4FaCCV5ySv8RDAcQgdE12oaGzMdODiLqsB85f65zOlAFa+htaXyEiRTcotn6Y2hupatrI=
|
||||
on_failure: always
|
||||
on_success: change
|
17
HISTORY.rst
17
HISTORY.rst
@ -6,12 +6,25 @@ Release Notes
|
||||
PlatformIO Core 4
|
||||
-----------------
|
||||
|
||||
4.3.2 (2020-04-28)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* New `Account Management System <https://docs.platformio.org/page/plus/pio-account.html>`__ (preview)
|
||||
* Open source `PIO Remote <http://docs.platformio.org/page/plus/pio-remote.html>`__ client
|
||||
* Improved `PIO Check <http://docs.platformio.org/page/plus/pio-check.html>`__ with more accurate project processing
|
||||
* Echo what is typed when ``send_on_enter`` device monitor filter <https://docs.platformio.org/page/projectconf/section_env_monitor.html#monitor-filters>`__ is used (`issue #3452 <https://github.com/platformio/platformio-core/issues/3452>`_)
|
||||
* Fixed PIO Unit Testing for Zephyr RTOS
|
||||
* Fixed UnicodeDecodeError on Windows when network drive (NAS) is used (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
|
||||
* Fixed an issue when saving libraries in new project results in error "No option 'lib_deps' in section" (`issue #3442 <https://github.com/platformio/platformio-core/issues/3442>`_)
|
||||
* Fixed an incorrect node path used for pattern matching when processing middleware nodes
|
||||
* Fixed an issue with missing ``lib_extra_dirs`` option in SRC_LIST for CLion (`issue #3460 <https://github.com/platformio/platformio-core/issues/3460>`_)
|
||||
|
||||
4.3.1 (2020-03-20)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a SyntaxError "'return' with argument inside generator" for PIO Unified Debugger when Python 2.7 is used
|
||||
* Fixed an issue when ``lib_archive = no`` was not honored in `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__
|
||||
* Fixed an TypeError "super(type, obj): obj must be an instance or subtype of type" when device monitor is used with a custom dev-platform filter (`issue #3431 <https://github.com/platformio/platformio-core/issues/3431>`_)
|
||||
* Fixed a TypeError "super(type, obj): obj must be an instance or subtype of type" when device monitor is used with a custom dev-platform filter (`issue #3431 <https://github.com/platformio/platformio-core/issues/3431>`_)
|
||||
|
||||
4.3.0 (2020-03-19)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
@ -35,7 +48,7 @@ PlatformIO Core 4
|
||||
- Show a hexadecimal representation of the data (code point of each character) with ``hexlify`` filter
|
||||
|
||||
* New standalone (1-script) `PlatformIO Core Installer <https://github.com/platformio/platformio-core-installer>`_
|
||||
* Initial support for `Renode <https://docs.platformio.org/page/plus/debug-tools/qemu.html>`__ simulation framework (`issue #3401 <https://github.com/platformio/platformio-core/issues/3401>`_)
|
||||
* Initial support for `Renode <https://docs.platformio.org/page/plus/debug-tools/renode.html>`__ simulation framework (`issue #3401 <https://github.com/platformio/platformio-core/issues/3401>`_)
|
||||
* Added support for Arm Mbed "module.json" ``dependencies`` field (`issue #3400 <https://github.com/platformio/platformio-core/issues/3400>`_)
|
||||
* Improved support for Arduino "library.properties" ``depends`` field
|
||||
* Fixed an issue when quitting from PlatformIO IDE does not shutdown PIO Home server
|
||||
|
2
Makefile
2
Makefile
@ -12,7 +12,7 @@ format:
|
||||
test:
|
||||
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: isort format lint test
|
||||
before-commit: isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
25
README.rst
25
README.rst
@ -1,12 +1,15 @@
|
||||
PlatformIO
|
||||
==========
|
||||
|
||||
.. image:: https://travis-ci.org/platformio/platformio-core.svg?branch=develop
|
||||
:target: https://travis-ci.org/platformio/platformio-core
|
||||
:alt: Travis.CI Build Status
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
|
||||
:target: https://docs.platformio.org/page/core/index.html
|
||||
:alt: CI Build for PlatformIO Core
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
|
||||
:target: https://github.com/platformio/platformio-examples
|
||||
:alt: CI Build for dev-platform examples
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
|
||||
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
|
||||
:alt: CI Build for Docs
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
@ -45,13 +48,13 @@ PlatformIO
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
* `What is PlatformIO? <https://docs.platformio.org/page/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Instruments
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/page/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
@ -64,7 +67,7 @@ Professional
|
||||
* `PIO Check <https://docs.platformio.org/page/plus/pio-check.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
@ -140,8 +143,8 @@ Telemetry / Privacy Policy
|
||||
Share minimal diagnostics and usage information to help us make PlatformIO better.
|
||||
It is enabled by default. For more information see:
|
||||
|
||||
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
* `SSL Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
|
||||
* `Telemetry Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
* `SSL Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
|
||||
|
||||
License
|
||||
-------
|
||||
|
2
docs
2
docs
Submodule docs updated: d97117eb2e...bff1fc845b
2
examples
2
examples
Submodule examples updated: 370c2c41a1...7793b677f7
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
VERSION = (4, 3, 1)
|
||||
VERSION = (4, 3, 2)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@ -34,3 +34,5 @@ __license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
|
||||
__apiurl__ = "https://api.platformio.org"
|
||||
__pioaccount_api__ = "https://api.accounts.platformio.org"
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
@ -13,8 +13,11 @@
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import getpass
|
||||
import hashlib
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import uuid
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import dirname, isdir, isfile, join, realpath
|
||||
@ -22,7 +25,7 @@ from time import time
|
||||
|
||||
import requests
|
||||
|
||||
from platformio import exception, fs, lockfile
|
||||
from platformio import __version__, exception, fs, lockfile
|
||||
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.helpers import (
|
||||
@ -414,3 +417,28 @@ def get_cid():
|
||||
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
return cid
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
data = ["PlatformIO/%s" % __version__, "CI/%d" % int(is_ci())]
|
||||
if get_session_var("caller_id"):
|
||||
data.append("Caller/%s" % get_session_var("caller_id"))
|
||||
if os.getenv("PLATFORMIO_IDE"):
|
||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
data.append("Python/%s" % platform.python_version())
|
||||
data.append("Platform/%s" % platform.platform())
|
||||
return " ".join(data)
|
||||
|
||||
|
||||
def get_host_id():
|
||||
h = hashlib.sha1(hashlib_encode_data(get_cid()))
|
||||
try:
|
||||
username = getpass.getuser()
|
||||
h.update(hashlib_encode_data(username))
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def get_host_name():
|
||||
return str(socket.gethostname())[:255]
|
||||
|
@ -147,7 +147,7 @@ env.LoadPioPlatform()
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(
|
||||
join("$BUILD_DIR", ".sconsign.py%d%d" % (sys.version_info[0], sys.version_info[1]))
|
||||
join("$BUILD_DIR", ".sconsign%d%d.db" % (sys.version_info[0], sys.version_info[1]))
|
||||
)
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
|
@ -25,44 +25,45 @@ from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def _dump_includes(env):
|
||||
includes = []
|
||||
includes = {}
|
||||
|
||||
for item in env.get("CPPPATH", []):
|
||||
includes.append(env.subst(item))
|
||||
includes["build"] = [
|
||||
env.subst("$PROJECT_INCLUDE_DIR"),
|
||||
env.subst("$PROJECT_SRC_DIR"),
|
||||
]
|
||||
includes["build"].extend(
|
||||
[os.path.realpath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
|
||||
# installed libs
|
||||
includes["compatlib"] = []
|
||||
for lb in env.GetLibBuilders():
|
||||
includes.extend(lb.get_include_dirs())
|
||||
includes["compatlib"].extend(
|
||||
[os.path.realpath(inc) for inc in lb.get_include_dirs()]
|
||||
)
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
includes["toolchain"] = []
|
||||
for name in p.get_installed_packages():
|
||||
if p.get_package_type(name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = glob_escape(p.get_package_dir(name))
|
||||
toolchain_incglobs = [
|
||||
os.path.join(toolchain_dir, "*", "include*"),
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
os.path.join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
|
||||
os.path.join(toolchain_dir, "*", "include*"),
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes.extend(glob(g))
|
||||
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
|
||||
|
||||
includes["unity"] = []
|
||||
unity_dir = get_core_package_dir("tool-unity")
|
||||
if unity_dir:
|
||||
includes.append(unity_dir)
|
||||
includes["unity"].append(unity_dir)
|
||||
|
||||
includes.extend([env.subst("$PROJECT_INCLUDE_DIR"), env.subst("$PROJECT_SRC_DIR")])
|
||||
|
||||
# remove duplicates
|
||||
result = []
|
||||
for item in includes:
|
||||
item = os.path.realpath(item)
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
|
||||
return result
|
||||
return includes
|
||||
|
||||
|
||||
def _get_gcc_defines(env):
|
||||
@ -158,8 +159,6 @@ def DumpIDEData(env):
|
||||
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
|
||||
"defines": _dump_defines(env),
|
||||
"includes": _dump_includes(env),
|
||||
"cc_flags": env.subst(LINTCCOM),
|
||||
"cxx_flags": env.subst(LINTCXXCOM),
|
||||
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
# pylint: disable=no-member, no-self-use, unused-argument, too-many-lines
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
# pylint: disable=assignment-from-no-return
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
@ -140,13 +140,10 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
|
||||
def _get_plaform_data():
|
||||
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
|
||||
src_manifest_path = platform.pm.get_src_manifest_path(platform.get_dir())
|
||||
if src_manifest_path:
|
||||
src_manifest = fs.load_json(src_manifest_path)
|
||||
if "version" in src_manifest:
|
||||
data.append("#" + src_manifest["version"])
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
data.append("(%s)" % src_manifest["url"])
|
||||
if platform.src_version:
|
||||
data.append("#" + platform.src_version)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)) and platform.src_url:
|
||||
data.append("(%s)" % platform.src_url)
|
||||
if board_config:
|
||||
data.extend([">", board_config.get("name")])
|
||||
return data
|
||||
@ -196,20 +193,14 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
|
||||
def _get_packages_data():
|
||||
data = []
|
||||
for name, options in platform.packages.items():
|
||||
if options.get("optional"):
|
||||
continue
|
||||
pkg_dir = platform.get_package_dir(name)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
manifest = platform.pm.load_manifest(pkg_dir)
|
||||
original_version = util.get_original_version(manifest["version"])
|
||||
info = "%s %s" % (manifest["name"], manifest["version"])
|
||||
for item in platform.dump_used_packages():
|
||||
original_version = util.get_original_version(item["version"])
|
||||
info = "%s %s" % (item["name"], item["version"])
|
||||
extra = []
|
||||
if original_version:
|
||||
extra.append(original_version)
|
||||
if "__src_url" in manifest and int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
extra.append(manifest["__src_url"])
|
||||
if "src_url" in item and int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
extra.append(item["src_url"])
|
||||
if extra:
|
||||
info += " (%s)" % ", ".join(extra)
|
||||
data.append(info)
|
||||
|
@ -285,7 +285,7 @@ def CollectBuildFiles(
|
||||
for callback, pattern in env.get("__PIO_BUILD_MIDDLEWARES", []):
|
||||
tmp = []
|
||||
for node in sources:
|
||||
if pattern and not fnmatch.fnmatch(node.get_path(), pattern):
|
||||
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
|
||||
tmp.append(node)
|
||||
continue
|
||||
n = callback(node)
|
||||
|
@ -1,72 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.group("account", short_help="Manage PIO Account")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("register", short_help="Create new PIO Account")
|
||||
@click.option("-u", "--username")
|
||||
def account_register(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("login", short_help="Log in to PIO Account")
|
||||
@click.option("-u", "--username")
|
||||
@click.option("-p", "--password")
|
||||
def account_login(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("logout", short_help="Log out of PIO Account")
|
||||
def account_logout():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("password", short_help="Change password")
|
||||
@click.option("--old-password")
|
||||
@click.option("--new-password")
|
||||
def account_password(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password")
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("forgot", short_help="Forgot password")
|
||||
@click.option("-u", "--username")
|
||||
def account_forgot(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("show", short_help="PIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
13
platformio/commands/account/__init__.py
Normal file
13
platformio/commands/account/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
235
platformio/commands/account/client.py
Normal file
235
platformio/commands/account/client.py
Normal file
@ -0,0 +1,235 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import requests.adapters
|
||||
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
|
||||
|
||||
from platformio import __pioaccount_api__, app
|
||||
from platformio.commands.account import exception
|
||||
|
||||
|
||||
class AccountClient(object):
|
||||
def __init__(
|
||||
self, api_base_url=__pioaccount_api__, retries=3,
|
||||
):
|
||||
if api_base_url.endswith("/"):
|
||||
api_base_url = api_base_url[:-1]
|
||||
self.api_base_url = api_base_url
|
||||
self._session = requests.Session()
|
||||
self._session.headers.update({"User-Agent": app.get_user_agent()})
|
||||
retry = Retry(
|
||||
total=retries,
|
||||
read=retries,
|
||||
connect=retries,
|
||||
backoff_factor=2,
|
||||
method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
|
||||
)
|
||||
adapter = requests.adapters.HTTPAdapter(max_retries=retry)
|
||||
self._session.mount(api_base_url, adapter)
|
||||
|
||||
def login(self, username, password):
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise exception.AccountAlreadyAuthenticated(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/login",
|
||||
data={"username": username, "password": password},
|
||||
)
|
||||
result = self.raise_error_from_response(response)
|
||||
app.set_state_item("account", result)
|
||||
return result
|
||||
|
||||
def login_with_code(self, client_id, code, redirect_uri):
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise exception.AccountAlreadyAuthenticated(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/login/code",
|
||||
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
|
||||
)
|
||||
result = self.raise_error_from_response(response)
|
||||
app.set_state_item("account", result)
|
||||
return result
|
||||
|
||||
def logout(self):
|
||||
try:
|
||||
refresh_token = self.get_refresh_token()
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
response = requests.post(
|
||||
self.api_base_url + "/v1/logout", data={"refresh_token": refresh_token},
|
||||
)
|
||||
try:
|
||||
self.raise_error_from_response(response)
|
||||
except exception.AccountError:
|
||||
pass
|
||||
app.delete_state_item("account")
|
||||
return True
|
||||
|
||||
def change_password(self, old_password, new_password):
|
||||
try:
|
||||
token = self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/password",
|
||||
headers={"Authorization": "Bearer %s" % token},
|
||||
data={"old_password": old_password, "new_password": new_password},
|
||||
)
|
||||
self.raise_error_from_response(response)
|
||||
return True
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint:disable=too-many-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise exception.AccountAlreadyAuthenticated(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/registration",
|
||||
data={
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": password,
|
||||
"firstname": firstname,
|
||||
"lastname": lastname,
|
||||
},
|
||||
)
|
||||
return self.raise_error_from_response(response)
|
||||
|
||||
def auth_token(self, password, regenerate):
|
||||
try:
|
||||
token = self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/token",
|
||||
headers={"Authorization": "Bearer %s" % token},
|
||||
data={"password": password, "regenerate": 1 if regenerate else 0},
|
||||
)
|
||||
return self.raise_error_from_response(response).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/forgot", data={"username": username},
|
||||
)
|
||||
return self.raise_error_from_response(response).get("auth_token")
|
||||
|
||||
def get_profile(self):
|
||||
try:
|
||||
token = self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
response = self._session.get(
|
||||
self.api_base_url + "/v1/profile",
|
||||
headers={"Authorization": "Bearer %s" % token},
|
||||
)
|
||||
return self.raise_error_from_response(response)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
try:
|
||||
token = self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
profile["current_password"] = current_password
|
||||
response = self._session.put(
|
||||
self.api_base_url + "/v1/profile",
|
||||
headers={"Authorization": "Bearer %s" % token},
|
||||
data=profile,
|
||||
)
|
||||
return self.raise_error_from_response(response)
|
||||
|
||||
def get_account_info(self, offline):
|
||||
if offline:
|
||||
account = app.get_state_item("account")
|
||||
if not account:
|
||||
raise exception.AccountNotAuthenticated()
|
||||
return {
|
||||
"profile": {
|
||||
"email": account.get("email"),
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
try:
|
||||
token = self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
response = self._session.get(
|
||||
self.api_base_url + "/v1/summary",
|
||||
headers={"Authorization": "Bearer %s" % token},
|
||||
)
|
||||
return self.raise_error_from_response(response)
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if "PLATFORMIO_AUTH_TOKEN" in os.environ:
|
||||
return os.environ["PLATFORMIO_AUTH_TOKEN"]
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
response = self._session.post(
|
||||
self.api_base_url + "/v1/login",
|
||||
headers={"Authorization": "Bearer %s" % auth.get("refresh_token")},
|
||||
)
|
||||
result = self.raise_error_from_response(response)
|
||||
app.set_state_item("account", result)
|
||||
return result.get("auth").get("access_token")
|
||||
raise exception.AccountNotAuthenticated()
|
||||
|
||||
@staticmethod
|
||||
def get_refresh_token():
|
||||
try:
|
||||
auth = app.get_state_item("account").get("auth").get("refresh_token")
|
||||
return auth
|
||||
except: # pylint:disable=bare-except
|
||||
raise exception.AccountNotAuthenticated()
|
||||
|
||||
@staticmethod
|
||||
def raise_error_from_response(response, expected_codes=(200, 201, 202)):
|
||||
if response.status_code in expected_codes:
|
||||
try:
|
||||
return response.json()
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
message = response.json()["message"]
|
||||
except (KeyError, ValueError):
|
||||
message = response.text
|
||||
if "Authorization session has been expired" in message:
|
||||
app.delete_state_item("account")
|
||||
raise exception.AccountError(message)
|
278
platformio/commands/account/command.py
Normal file
278
platformio/commands/account/command.py
Normal file
@ -0,0 +1,278 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.commands.account import exception
|
||||
from platformio.commands.account.client import AccountClient
|
||||
|
||||
|
||||
@click.group("account", short_help="Manage PIO Account")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
def validate_username(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){3,38}$", value, flags=re.I):
|
||||
raise click.BadParameter(
|
||||
"Invalid username format. "
|
||||
"Username must contain at least 4 characters including single hyphens,"
|
||||
" and cannot begin or end with a hyphen"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d_.+-]+@[a-z\d\-]+\.[a-z\d\-.]+$", value, flags=re.I):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
" including a number and a lowercase letter"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
@cli.command("register", short_help="Create new PIO Account")
|
||||
@click.option(
|
||||
"-u",
|
||||
"--username",
|
||||
prompt=True,
|
||||
callback=lambda _, __, value: validate_username(value),
|
||||
)
|
||||
@click.option(
|
||||
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--password",
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
confirmation_prompt=True,
|
||||
callback=lambda _, __, value: validate_password(value),
|
||||
)
|
||||
@click.option("--firstname", prompt=True)
|
||||
@click.option("--lastname", prompt=True)
|
||||
def account_register(username, email, password, firstname, lastname):
|
||||
client = AccountClient()
|
||||
client.registration(username, email, password, firstname, lastname)
|
||||
return click.secho(
|
||||
"An account has been successfully created. "
|
||||
"Please check your mail to activate your account and verify your email address.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("login", short_help="Log in to PIO Account")
|
||||
@click.option("-u", "--username", prompt="Username or email")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
def account_login(username, password):
|
||||
client = AccountClient()
|
||||
client.login(username, password)
|
||||
return click.secho("Successfully logged in!", fg="green")
|
||||
|
||||
|
||||
@cli.command("logout", short_help="Log out of PIO Account")
|
||||
def account_logout():
|
||||
client = AccountClient()
|
||||
client.logout()
|
||||
return click.secho("Successfully logged out!", fg="green")
|
||||
|
||||
|
||||
@cli.command("password", short_help="Change password")
|
||||
@click.option("--old-password", prompt=True, hide_input=True)
|
||||
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
|
||||
def account_password(old_password, new_password):
|
||||
client = AccountClient()
|
||||
client.change_password(old_password, new_password)
|
||||
return click.secho("Password successfully changed!", fg="green")
|
||||
|
||||
|
||||
@cli.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token(password, regenerate, json_output):
|
||||
client = AccountClient()
|
||||
auth_token = client.auth_token(password, regenerate)
|
||||
if json_output:
|
||||
return click.echo(json.dumps({"status": "success", "result": auth_token}))
|
||||
return click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
|
||||
|
||||
|
||||
@cli.command("forgot", short_help="Forgot password")
|
||||
@click.option("--username", prompt="Username or email")
|
||||
def account_forgot(username):
|
||||
client = AccountClient()
|
||||
client.forgot_password(username)
|
||||
return click.secho(
|
||||
"If this account is registered, we will send the "
|
||||
"further instructions to your email.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update profile information")
|
||||
@click.option("--current-password", prompt=True, hide_input=True)
|
||||
@click.option("--username")
|
||||
@click.option("--email")
|
||||
@click.option("--firstname")
|
||||
@click.option("--lastname")
|
||||
def account_update(current_password, **kwargs):
|
||||
client = AccountClient()
|
||||
profile = client.get_profile()
|
||||
new_profile = profile.copy()
|
||||
if not any(kwargs.values()):
|
||||
for field in profile:
|
||||
new_profile[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=profile[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_profile[field])
|
||||
if field == "username":
|
||||
validate_username(new_profile[field])
|
||||
else:
|
||||
new_profile.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_profile(new_profile, current_password)
|
||||
click.secho("Profile successfully updated!", fg="green")
|
||||
username_changed = new_profile["username"] != profile["username"]
|
||||
email_changed = new_profile["email"] != profile["email"]
|
||||
if not username_changed and not email_changed:
|
||||
return None
|
||||
try:
|
||||
client.logout()
|
||||
except exception.AccountNotAuthenticated:
|
||||
pass
|
||||
if email_changed:
|
||||
return click.secho(
|
||||
"Please check your mail to verify your new email address and re-login. ",
|
||||
fg="yellow",
|
||||
)
|
||||
return click.secho("Please re-login.", fg="yellow")
|
||||
|
||||
|
||||
@cli.command("show", short_help="PIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show(offline, json_output):
|
||||
client = AccountClient()
|
||||
info = client.get_account_info(offline)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(info))
|
||||
click.echo()
|
||||
if info.get("profile"):
|
||||
print_profile(info["profile"])
|
||||
if info.get("packages"):
|
||||
print_packages(info["packages"])
|
||||
if info.get("subscriptions"):
|
||||
print_subscriptions(info["subscriptions"])
|
||||
return click.echo()
|
||||
|
||||
|
||||
def print_profile(profile):
|
||||
click.secho("Profile", fg="cyan", bold=True)
|
||||
click.echo("=" * len("Profile"))
|
||||
data = []
|
||||
if profile.get("username"):
|
||||
data.append(("Username:", profile["username"]))
|
||||
if profile.get("email"):
|
||||
data.append(("Email:", profile["email"]))
|
||||
if profile.get("firstname"):
|
||||
data.append(("First name:", profile["firstname"]))
|
||||
if profile.get("lastname"):
|
||||
data.append(("Last name:", profile["lastname"]))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_packages(packages):
|
||||
click.echo()
|
||||
click.secho("Packages", fg="cyan")
|
||||
click.echo("=" * len("Packages"))
|
||||
for package in packages:
|
||||
click.echo()
|
||||
click.secho(package.get("name"), bold=True)
|
||||
click.echo("-" * len(package.get("name")))
|
||||
if package.get("description"):
|
||||
click.echo(package.get("description"))
|
||||
data = []
|
||||
expire = "-"
|
||||
if "subscription" in package:
|
||||
expire = datetime.datetime.strptime(
|
||||
(
|
||||
package["subscription"].get("end_at")
|
||||
or package["subscription"].get("next_bill_at")
|
||||
),
|
||||
"%Y-%m-%dT%H:%M:%SZ",
|
||||
).strftime("%Y-%m-%d")
|
||||
data.append(("Expire:", expire))
|
||||
services = []
|
||||
for key in package:
|
||||
if not key.startswith("service."):
|
||||
continue
|
||||
if isinstance(package[key], dict):
|
||||
services.append(package[key].get("title"))
|
||||
else:
|
||||
services.append(package[key])
|
||||
if services:
|
||||
data.append(("Services:", ", ".join(services)))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_subscriptions(subscriptions):
|
||||
click.echo()
|
||||
click.secho("Subscriptions", fg="cyan")
|
||||
click.echo("=" * len("Subscriptions"))
|
||||
for subscription in subscriptions:
|
||||
click.echo()
|
||||
click.secho(subscription.get("product_name"), bold=True)
|
||||
click.echo("-" * len(subscription.get("product_name")))
|
||||
data = [("State:", subscription.get("status"))]
|
||||
begin_at = datetime.datetime.strptime(
|
||||
subscription.get("begin_at"), "%Y-%m-%dT%H:%M:%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.append(("Start date:", begin_at or "-"))
|
||||
end_at = subscription.get("end_at")
|
||||
if end_at:
|
||||
end_at = datetime.datetime.strptime(
|
||||
subscription.get("end_at"), "%Y-%m-%dT%H:%M:%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.append(("End date:", end_at or "-"))
|
||||
next_bill_at = subscription.get("next_bill_at")
|
||||
if next_bill_at:
|
||||
next_bill_at = datetime.datetime.strptime(
|
||||
subscription.get("next_bill_at"), "%Y-%m-%dT%H:%M:%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.append(("Next payment:", next_bill_at or "-"))
|
||||
data.append(
|
||||
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
|
||||
)
|
||||
data.append(
|
||||
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
30
platformio/commands/account/exception.py
Normal file
30
platformio/commands/account/exception.py
Normal file
@ -0,0 +1,30 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
|
||||
|
||||
class AccountError(PlatformioException):
|
||||
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthenticated(AccountError):
|
||||
|
||||
MESSAGE = "You are not authenticated! Please login to PIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthenticated(AccountError):
|
||||
|
||||
MESSAGE = "You are already authenticated with {0} account."
|
@ -61,6 +61,7 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
multiple=True,
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli(
|
||||
environment,
|
||||
project_dir,
|
||||
@ -72,6 +73,7 @@ def cli(
|
||||
verbose,
|
||||
json_output,
|
||||
fail_on_defect,
|
||||
skip_packages,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
@ -114,6 +116,7 @@ def cli(
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity"),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
)
|
||||
|
||||
for tool in config.get("env:" + envname, "check_tool"):
|
||||
@ -222,7 +225,7 @@ def collect_component_stats(result):
|
||||
component = dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.startswith(get_project_dir()):
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
@ -51,7 +51,7 @@ class DefectItem(object):
|
||||
self.cwe = cwe
|
||||
self.id = id
|
||||
self.file = file
|
||||
if file.startswith(get_project_dir()):
|
||||
if file.lower().startswith(get_project_dir().lower()):
|
||||
self.file = os.path.relpath(file, get_project_dir())
|
||||
|
||||
def __repr__(self):
|
||||
|
@ -14,12 +14,13 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs, proc
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.project.helpers import get_project_dir, load_project_ide_data
|
||||
from platformio.project.helpers import load_project_ide_data
|
||||
|
||||
|
||||
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
@ -32,12 +33,13 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
self.cpp_includes = []
|
||||
self.cpp_defines = []
|
||||
self.toolchain_defines = []
|
||||
self._tmp_files = []
|
||||
self.cc_path = None
|
||||
self.cxx_path = None
|
||||
self._defects = []
|
||||
self._on_defect_callback = None
|
||||
self._bad_input = False
|
||||
self._load_cpp_data(project_dir, envname)
|
||||
self._load_cpp_data(project_dir)
|
||||
|
||||
# detect all defects by default
|
||||
if not self.options.get("severity"):
|
||||
@ -52,17 +54,17 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
for s in self.options["severity"]
|
||||
]
|
||||
|
||||
def _load_cpp_data(self, project_dir, envname):
|
||||
data = load_project_ide_data(project_dir, envname)
|
||||
def _load_cpp_data(self, project_dir):
|
||||
data = load_project_ide_data(project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = data.get("cc_flags", "").split(" ")
|
||||
self.cxx_flags = data.get("cxx_flags", "").split(" ")
|
||||
self.cpp_includes = data.get("includes", [])
|
||||
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
|
||||
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
|
||||
self.cpp_includes = self._dump_includes(data.get("includes", {}))
|
||||
self.cpp_defines = data.get("defines", [])
|
||||
self.cc_path = data.get("cc_path")
|
||||
self.cxx_path = data.get("cxx_path")
|
||||
self.toolchain_defines = self._get_toolchain_defines(self.cc_path)
|
||||
self.toolchain_defines = self._get_toolchain_defines()
|
||||
|
||||
def get_flags(self, tool):
|
||||
result = []
|
||||
@ -75,21 +77,52 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
return result
|
||||
|
||||
def _get_toolchain_defines(self):
|
||||
def _extract_defines(language, includes_file):
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
defines = []
|
||||
cmd = "echo | %s -x %s %s %s -dM -E -" % (
|
||||
self.cc_path,
|
||||
language,
|
||||
" ".join([f for f in build_flags if f.startswith(("-m", "-f"))]),
|
||||
includes_file,
|
||||
)
|
||||
result = proc.exec_command(cmd, shell=True)
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
defines.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
defines.append(tokens[1])
|
||||
|
||||
return defines
|
||||
|
||||
incflags_file = self._long_includes_hook(self.cpp_includes)
|
||||
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
||||
|
||||
def _create_tmp_file(self, data):
|
||||
with NamedTemporaryFile("w", delete=False) as fp:
|
||||
fp.write(data)
|
||||
self._tmp_files.append(fp.name)
|
||||
return fp.name
|
||||
|
||||
def _long_includes_hook(self, includes):
|
||||
data = []
|
||||
for inc in includes:
|
||||
data.append('-I"%s"' % fs.to_unix_path(inc))
|
||||
|
||||
return '@"%s"' % self._create_tmp_file(" ".join(data))
|
||||
|
||||
@staticmethod
|
||||
def _get_toolchain_defines(cc_path):
|
||||
defines = []
|
||||
result = proc.exec_command("echo | %s -dM -E -x c++ -" % cc_path, shell=True)
|
||||
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
defines.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
defines.append(tokens[1])
|
||||
|
||||
return defines
|
||||
def _dump_includes(includes_map):
|
||||
result = []
|
||||
for includes in includes_map.values():
|
||||
for include in includes:
|
||||
if include not in result:
|
||||
result.append(include)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def is_flag_set(flag, flags):
|
||||
@ -129,18 +162,27 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
return raw_line
|
||||
|
||||
def clean_up(self):
|
||||
pass
|
||||
for f in self._tmp_files:
|
||||
if os.path.isfile(f):
|
||||
os.remove(f)
|
||||
|
||||
def get_project_target_files(self):
|
||||
allowed_extensions = (".h", ".hpp", ".c", ".cc", ".cpp", ".ino")
|
||||
result = []
|
||||
@staticmethod
|
||||
def get_project_target_files(patterns):
|
||||
c_extension = (".c",)
|
||||
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
|
||||
header_extensions = (".h", ".hh", ".hpp", ".hxx")
|
||||
|
||||
result = {"c": [], "c++": [], "headers": []}
|
||||
|
||||
def _add_file(path):
|
||||
if not path.endswith(allowed_extensions):
|
||||
return
|
||||
result.append(os.path.realpath(path))
|
||||
if path.endswith(header_extensions):
|
||||
result["headers"].append(os.path.realpath(path))
|
||||
elif path.endswith(c_extension):
|
||||
result["c"].append(os.path.realpath(path))
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.realpath(path))
|
||||
|
||||
for pattern in self.options["patterns"]:
|
||||
for pattern in patterns:
|
||||
for item in glob.glob(pattern):
|
||||
if not os.path.isdir(item):
|
||||
_add_file(item)
|
||||
@ -150,27 +192,23 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
return result
|
||||
|
||||
def get_source_language(self):
|
||||
with fs.cd(get_project_dir()):
|
||||
for _, __, files in os.walk(self.config.get_optional_dir("src")):
|
||||
for name in files:
|
||||
if "." not in name:
|
||||
continue
|
||||
if os.path.splitext(name)[1].lower() in (".cpp", ".cxx", ".ino"):
|
||||
return "c++"
|
||||
return "c"
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
cmd = self.configure_command()
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
if cmd:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
else:
|
||||
if self.options.get("verbose"):
|
||||
click.echo("Error: Couldn't configure command")
|
||||
self._bad_input = True
|
||||
|
||||
self.clean_up()
|
||||
|
||||
|
@ -57,11 +57,28 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
if not self.is_flag_set("--checks", flags):
|
||||
cmd.append("--checks=*")
|
||||
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
|
||||
src_files = []
|
||||
for scope in project_files:
|
||||
src_files.extend(project_files[scope])
|
||||
|
||||
cmd.extend(flags)
|
||||
cmd.extend(self.get_project_target_files())
|
||||
cmd.extend(src_files)
|
||||
cmd.append("--")
|
||||
|
||||
cmd.extend(["-D%s" % d for d in self.cpp_defines + self.toolchain_defines])
|
||||
cmd.extend(["-I%s" % inc for inc in self.cpp_includes])
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines["c++"]]
|
||||
)
|
||||
|
||||
includes = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get_optional_dir("packages").lower()
|
||||
):
|
||||
continue
|
||||
includes.append(inc)
|
||||
|
||||
cmd.append("--extra-arg=" + self._long_includes_hook(includes))
|
||||
|
||||
return cmd
|
||||
|
@ -12,10 +12,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import remove
|
||||
from os.path import isfile, join
|
||||
from tempfile import NamedTemporaryFile
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools.base import CheckToolBase
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
@ -23,7 +24,6 @@ from platformio.managers.core import get_core_package_dir
|
||||
|
||||
class CppcheckCheckTool(CheckToolBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._tmp_files = []
|
||||
self.defect_fields = [
|
||||
"severity",
|
||||
"message",
|
||||
@ -74,10 +74,32 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
else:
|
||||
args["severity"] = DefectItem.SEVERITY_LOW
|
||||
|
||||
# Skip defects found in third-party software, but keep in mind that such defects
|
||||
# might break checking process so defects from project files are not reported
|
||||
breaking_defect_ids = ("preprocessorErrorDirective", "syntaxError")
|
||||
if (
|
||||
args.get("file", "")
|
||||
.lower()
|
||||
.startswith(self.config.get_optional_dir("packages").lower())
|
||||
):
|
||||
if args["id"] in breaking_defect_ids:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(
|
||||
"Error: Found a breaking defect '%s' in %s:%s\n"
|
||||
"Please note: check results might not be valid!\n"
|
||||
"Try adding --skip-packages"
|
||||
% (args.get("message"), args.get("file"), args.get("line"))
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
return None
|
||||
|
||||
return DefectItem(**args)
|
||||
|
||||
def configure_command(self):
|
||||
tool_path = join(get_core_package_dir("tool-cppcheck"), "cppcheck")
|
||||
def configure_command(
|
||||
self, language, src_files
|
||||
): # pylint: disable=arguments-differ
|
||||
tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck")
|
||||
|
||||
cmd = [
|
||||
tool_path,
|
||||
@ -108,51 +130,112 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
cmd.append("--enable=%s" % ",".join(enabled_checks))
|
||||
|
||||
if not self.is_flag_set("--language", flags):
|
||||
if self.get_source_language() == "c++":
|
||||
cmd.append("--language=c++")
|
||||
cmd.append("--language=" + language)
|
||||
|
||||
if not self.is_flag_set("--std", flags):
|
||||
for f in self.cxx_flags + self.cc_flags:
|
||||
if "-std" in f:
|
||||
# Standards with GNU extensions are not allowed
|
||||
cmd.append("-" + f.replace("gnu", "c"))
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
# Standards with GNU extensions are not allowed
|
||||
cmd.append("-" + flag.replace("gnu", "c"))
|
||||
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
|
||||
)
|
||||
|
||||
cmd.extend(["-D%s" % d for d in self.cpp_defines + self.toolchain_defines])
|
||||
cmd.extend(flags)
|
||||
|
||||
cmd.append("--file-list=%s" % self._generate_src_file())
|
||||
cmd.extend(
|
||||
"--include=" + inc
|
||||
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
|
||||
)
|
||||
cmd.append("--file-list=%s" % self._generate_src_file(src_files))
|
||||
cmd.append("--includes-file=%s" % self._generate_inc_file())
|
||||
|
||||
core_dir = self.config.get_optional_dir("packages")
|
||||
cmd.append("--suppress=*:%s*" % core_dir)
|
||||
cmd.append("--suppress=unmatchedSuppression:%s*" % core_dir)
|
||||
|
||||
return cmd
|
||||
|
||||
def _create_tmp_file(self, data):
|
||||
with NamedTemporaryFile("w", delete=False) as fp:
|
||||
fp.write(data)
|
||||
self._tmp_files.append(fp.name)
|
||||
return fp.name
|
||||
@staticmethod
|
||||
def get_forced_includes(build_flags, includes):
|
||||
def _extract_filepath(flag, include_options, build_flags):
|
||||
path = ""
|
||||
for option in include_options:
|
||||
if not flag.startswith(option):
|
||||
continue
|
||||
if flag.split(option)[1].strip():
|
||||
path = flag.split(option)[1].strip()
|
||||
elif build_flags.index(flag) + 1 < len(build_flags):
|
||||
path = build_flags[build_flags.index(flag) + 1]
|
||||
return path
|
||||
|
||||
def _generate_src_file(self):
|
||||
src_files = [
|
||||
f for f in self.get_project_target_files() if not f.endswith((".h", ".hpp"))
|
||||
]
|
||||
def _search_include_dir(filepath, include_paths):
|
||||
for inc_path in include_paths:
|
||||
path = os.path.join(inc_path, filepath)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
return ""
|
||||
|
||||
result = []
|
||||
include_options = ("-include", "-imacros")
|
||||
for f in build_flags:
|
||||
if f.startswith(include_options):
|
||||
filepath = _extract_filepath(f, include_options, build_flags)
|
||||
if not os.path.isabs(filepath):
|
||||
filepath = _search_include_dir(filepath, includes)
|
||||
if os.path.isfile(filepath):
|
||||
result.append(filepath)
|
||||
|
||||
return result
|
||||
|
||||
def _generate_src_file(self, src_files):
|
||||
return self._create_tmp_file("\n".join(src_files))
|
||||
|
||||
def _generate_inc_file(self):
|
||||
return self._create_tmp_file("\n".join(self.cpp_includes))
|
||||
result = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get_optional_dir("packages").lower()
|
||||
):
|
||||
continue
|
||||
result.append(inc)
|
||||
return self._create_tmp_file("\n".join(result))
|
||||
|
||||
def clean_up(self):
|
||||
for f in self._tmp_files:
|
||||
if isfile(f):
|
||||
remove(f)
|
||||
super(CppcheckCheckTool, self).clean_up()
|
||||
|
||||
# delete temporary dump files generated by addons
|
||||
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
|
||||
return
|
||||
for f in self.get_project_target_files():
|
||||
dump_file = f + ".dump"
|
||||
if isfile(dump_file):
|
||||
remove(dump_file)
|
||||
|
||||
for files in self.get_project_target_files(self.options["patterns"]).values():
|
||||
for f in files:
|
||||
dump_file = f + ".dump"
|
||||
if os.path.isfile(dump_file):
|
||||
os.remove(dump_file)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
|
||||
languages = ("c", "c++")
|
||||
if not any([project_files[t] for t in languages]):
|
||||
click.echo("Error: Nothing to check.")
|
||||
return True
|
||||
for language in languages:
|
||||
if not project_files[language]:
|
||||
continue
|
||||
cmd = self.configure_command(language, project_files[language])
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
|
@ -140,9 +140,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
os.remove(self._tmp_output_file)
|
||||
|
||||
if not os.path.isfile(self._tmp_preprocessed_file):
|
||||
click.echo(
|
||||
"Error: Missing preprocessed file '%s'" % (self._tmp_preprocessed_file)
|
||||
)
|
||||
click.echo("Error: Missing preprocessed file for '%s'" % src_file)
|
||||
return ""
|
||||
|
||||
cmd = [
|
||||
@ -175,6 +173,9 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
return os.path.join(self._tmp_dir, next(tempfile._get_candidate_names()))
|
||||
|
||||
def _prepare_preprocessed_file(self, src_file):
|
||||
if os.path.isfile(self._tmp_preprocessed_file):
|
||||
os.remove(self._tmp_preprocessed_file)
|
||||
|
||||
flags = self.cxx_flags
|
||||
compiler = self.cxx_path
|
||||
if src_file.endswith(".c"):
|
||||
@ -186,40 +187,46 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
cmd.extend(["-D%s" % d for d in self.cpp_defines])
|
||||
cmd.append('@"%s"' % self._tmp_cmd_file)
|
||||
|
||||
# Explicitly specify C++ as the language used in .ino files
|
||||
if src_file.endswith(".ino"):
|
||||
cmd.insert(1, "-xc++")
|
||||
|
||||
result = proc.exec_command(" ".join(cmd), shell=True)
|
||||
if result["returncode"] != 0:
|
||||
if result["returncode"] != 0 or result["err"]:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
def clean_up(self):
|
||||
super(PvsStudioCheckTool, self).clean_up()
|
||||
if os.path.isdir(self._tmp_dir):
|
||||
shutil.rmtree(self._tmp_dir)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
src_files = [
|
||||
f for f in self.get_project_target_files() if not f.endswith((".h", ".hpp"))
|
||||
]
|
||||
|
||||
for src_file in src_files:
|
||||
self._prepare_preprocessed_file(src_file)
|
||||
cmd = self.configure_command(src_file)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
for scope, files in self.get_project_target_files(
|
||||
self.options["patterns"]
|
||||
).items():
|
||||
if scope not in ("c", "c++"):
|
||||
continue
|
||||
for src_file in files:
|
||||
self._prepare_preprocessed_file(src_file)
|
||||
cmd = self.configure_command(src_file)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
|
||||
result = proc.exec_command(cmd)
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if result["returncode"] != 0 or "License was not entered" in result["err"]:
|
||||
self._bad_input = True
|
||||
click.echo(result["err"])
|
||||
continue
|
||||
result = proc.exec_command(cmd)
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if result["returncode"] != 0 or "license" in result["err"].lower():
|
||||
self._bad_input = True
|
||||
click.echo(result["err"])
|
||||
continue
|
||||
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
||||
|
||||
self.clean_up()
|
||||
|
||||
|
@ -148,7 +148,7 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
inject_contrib_pysite()
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.debug.client import GDBClient, reactor
|
||||
from platformio.commands.debug.process.client import GDBClient, reactor
|
||||
|
||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
||||
client.spawn(configuration["gdb_path"], configuration["prog_path"])
|
||||
|
13
platformio/commands/debug/process/__init__.py
Normal file
13
platformio/commands/debug/process/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
@ -30,8 +30,8 @@ from platformio import app, fs, proc, telemetry, util
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.debug.initcfgs import get_gdb_init_config
|
||||
from platformio.commands.debug.process import BaseProcess
|
||||
from platformio.commands.debug.server import DebugServer
|
||||
from platformio.commands.debug.process.base import BaseProcess
|
||||
from platformio.commands.debug.process.server import DebugServer
|
||||
from platformio.compat import hashlib_encode_data, is_bytes
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
@ -194,7 +194,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
telemetry.send_event(
|
||||
"Debug", "Started", telemetry.encode_run_environment(self.env_options)
|
||||
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
|
||||
)
|
||||
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
|
||||
self._auto_continue_timer.start(0.1)
|
||||
@ -231,8 +231,11 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
||||
self._target_is_run = True
|
||||
|
||||
def _handle_error(self, data):
|
||||
self._errors_buffer += data
|
||||
if self.PIO_SRC_NAME.encode() not in data or b"Error in sourced" not in data:
|
||||
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
|
||||
if not (
|
||||
self.PIO_SRC_NAME.encode() in self._errors_buffer
|
||||
and b"Error in sourced" in self._errors_buffer
|
||||
):
|
||||
return
|
||||
|
||||
last_erros = self._errors_buffer.decode()
|
||||
@ -240,7 +243,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||
|
||||
err = "%s -> %s" % (
|
||||
telemetry.encode_run_environment(self.env_options),
|
||||
telemetry.dump_run_environment(self.env_options),
|
||||
last_erros,
|
||||
)
|
||||
telemetry.send_exception("DebugInitError: %s" % err)
|
@ -22,7 +22,7 @@ from twisted.internet import reactor # pylint: disable=import-error
|
||||
from platformio import fs, util
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
|
||||
from platformio.commands.debug.process import BaseProcess
|
||||
from platformio.commands.debug.process.base import BaseProcess
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
@ -71,6 +71,7 @@ def cli(port, host, no_open, shutdown_timeout):
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.commands.home.rpc.handlers.account import AccountRPC
|
||||
from platformio.commands.home.rpc.server import JSONRPCServerFactory
|
||||
from platformio.commands.home.web import WebRoot
|
||||
|
||||
@ -81,6 +82,7 @@ def cli(port, host, no_open, shutdown_timeout):
|
||||
factory.addHandler(OSRPC(), namespace="os")
|
||||
factory.addHandler(PIOCoreRPC(), namespace="core")
|
||||
factory.addHandler(ProjectRPC(), namespace="project")
|
||||
factory.addHandler(AccountRPC(), namespace="account")
|
||||
|
||||
contrib_dir = get_core_package_dir("contrib-piohome")
|
||||
if not isdir(contrib_dir):
|
||||
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=keyword-arg-before-vararg, arguments-differ
|
||||
# pylint: disable=keyword-arg-before-vararg,arguments-differ,signature-differs
|
||||
|
||||
import os
|
||||
import socket
|
||||
|
29
platformio/commands/home/rpc/handlers/account.py
Normal file
29
platformio/commands/home/rpc/handlers/account.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
|
||||
from platformio.commands.account.client import AccountClient
|
||||
|
||||
|
||||
class AccountRPC(object):
|
||||
@staticmethod
|
||||
def call_client(method, *args, **kwargs):
|
||||
try:
|
||||
client = AccountClient()
|
||||
return getattr(client, method)(*args, **kwargs)
|
||||
except Exception as e: # pylint: disable=bare-except
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
code=4003, message="PIO Account Call Error", data=str(e)
|
||||
)
|
@ -96,7 +96,7 @@ class PIOCoreRPC(object):
|
||||
to_json = "--json-output" in args
|
||||
|
||||
try:
|
||||
if args and args[0] in ("account", "remote"):
|
||||
if args and args[0] == "remote":
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
else:
|
||||
|
@ -29,6 +29,7 @@ from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import InvalidProjectConfError
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
|
||||
try:
|
||||
@ -180,7 +181,10 @@ def lib_install( # pylint: disable=too-many-arguments
|
||||
if project_environments and env not in project_environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
lib_deps = config.get("env:" + env, "lib_deps", [])
|
||||
try:
|
||||
lib_deps = config.get("env:" + env, "lib_deps")
|
||||
except InvalidProjectConfError:
|
||||
lib_deps = []
|
||||
for library in libraries:
|
||||
if library in lib_deps:
|
||||
continue
|
||||
|
13
platformio/commands/remote/__init__.py
Normal file
13
platformio/commands/remote/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
13
platformio/commands/remote/ac/__init__.py
Normal file
13
platformio/commands/remote/ac/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
91
platformio/commands/remote/ac/base.py
Normal file
91
platformio/commands/remote/ac/base.py
Normal file
@ -0,0 +1,91 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
|
||||
|
||||
class AsyncCommandBase(object):
|
||||
|
||||
MAX_BUFFER_SIZE = 1024 * 1024 # 1Mb
|
||||
|
||||
def __init__(self, options=None, on_end_callback=None):
|
||||
self.options = options or {}
|
||||
self.on_end_callback = on_end_callback
|
||||
self._buffer = b""
|
||||
self._return_code = None
|
||||
self._d = None
|
||||
self._paused = False
|
||||
|
||||
try:
|
||||
self.start()
|
||||
except Exception as e:
|
||||
raise pb.Error(str(e))
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return id(self)
|
||||
|
||||
def pause(self):
|
||||
self._paused = True
|
||||
self.stop()
|
||||
|
||||
def unpause(self):
|
||||
self._paused = False
|
||||
self.start()
|
||||
|
||||
def start(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def stop(self):
|
||||
self.transport.loseConnection() # pylint: disable=no-member
|
||||
|
||||
def _ac_ended(self):
|
||||
if self.on_end_callback:
|
||||
self.on_end_callback()
|
||||
if not self._d or self._d.called:
|
||||
self._d = None
|
||||
return
|
||||
if self._buffer:
|
||||
self._d.callback(self._buffer)
|
||||
else:
|
||||
self._d.callback(None)
|
||||
|
||||
def _ac_ondata(self, data):
|
||||
self._buffer += data
|
||||
if len(self._buffer) > self.MAX_BUFFER_SIZE:
|
||||
self._buffer = self._buffer[-1 * self.MAX_BUFFER_SIZE :]
|
||||
if self._paused:
|
||||
return
|
||||
if self._d and not self._d.called:
|
||||
self._d.callback(self._buffer)
|
||||
self._buffer = b""
|
||||
|
||||
def ac_read(self):
|
||||
if self._buffer:
|
||||
result = self._buffer
|
||||
self._buffer = b""
|
||||
return result
|
||||
if self._return_code is None:
|
||||
self._d = defer.Deferred()
|
||||
return self._d
|
||||
return None
|
||||
|
||||
def ac_write(self, data):
|
||||
self.transport.write(data) # pylint: disable=no-member
|
||||
return len(data)
|
||||
|
||||
def ac_close(self):
|
||||
self.stop()
|
||||
return self._return_code
|
42
platformio/commands/remote/ac/process.py
Normal file
42
platformio/commands/remote/ac/process.py
Normal file
@ -0,0 +1,42 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from twisted.internet import protocol, reactor # pylint: disable=import-error
|
||||
|
||||
from platformio.commands.remote.ac.base import AsyncCommandBase
|
||||
|
||||
|
||||
class ProcessAsyncCmd(protocol.ProcessProtocol, AsyncCommandBase):
|
||||
def start(self):
|
||||
env = dict(os.environ).copy()
|
||||
env.update({"PLATFORMIO_FORCE_ANSI": "true"})
|
||||
reactor.spawnProcess(
|
||||
self, self.options["executable"], self.options["args"], env
|
||||
)
|
||||
|
||||
def outReceived(self, data):
|
||||
self._ac_ondata(data)
|
||||
|
||||
def errReceived(self, data):
|
||||
self._ac_ondata(data)
|
||||
|
||||
def processExited(self, reason):
|
||||
self._return_code = reason.value.exitCode
|
||||
|
||||
def processEnded(self, reason):
|
||||
if self._return_code is None:
|
||||
self._return_code = reason.value.exitCode
|
||||
self._ac_ended()
|
66
platformio/commands/remote/ac/psync.py
Normal file
66
platformio/commands/remote/ac/psync.py
Normal file
@ -0,0 +1,66 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
|
||||
from platformio.commands.remote.ac.base import AsyncCommandBase
|
||||
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
|
||||
|
||||
|
||||
class ProjectSyncAsyncCmd(AsyncCommandBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.psync = None
|
||||
self._upstream = None
|
||||
super(ProjectSyncAsyncCmd, self).__init__(*args, **kwargs)
|
||||
|
||||
def start(self):
|
||||
project_dir = os.path.join(
|
||||
self.options["agent_working_dir"], "projects", self.options["id"]
|
||||
)
|
||||
self.psync = ProjectSync(project_dir)
|
||||
for name in self.options["items"]:
|
||||
self.psync.add_item(os.path.join(project_dir, name), name)
|
||||
|
||||
def stop(self):
|
||||
self.psync = None
|
||||
self._upstream = None
|
||||
self._return_code = PROJECT_SYNC_STAGE.COMPLETED.value
|
||||
|
||||
def ac_write(self, data):
|
||||
stage = PROJECT_SYNC_STAGE.lookupByValue(data.get("stage"))
|
||||
|
||||
if stage is PROJECT_SYNC_STAGE.DBINDEX:
|
||||
self.psync.rebuild_dbindex()
|
||||
return zlib.compress(json.dumps(self.psync.get_dbindex()).encode())
|
||||
|
||||
if stage is PROJECT_SYNC_STAGE.DELETE:
|
||||
return self.psync.delete_dbindex(
|
||||
json.loads(zlib.decompress(data["dbindex"]))
|
||||
)
|
||||
|
||||
if stage is PROJECT_SYNC_STAGE.UPLOAD:
|
||||
if not self._upstream:
|
||||
self._upstream = BytesIO()
|
||||
self._upstream.write(data["chunk"])
|
||||
if self._upstream.tell() == data["total"]:
|
||||
self.psync.decompress_items(self._upstream)
|
||||
self._upstream = None
|
||||
return PROJECT_SYNC_STAGE.EXTRACTED.value
|
||||
|
||||
return PROJECT_SYNC_STAGE.UPLOAD.value
|
||||
|
||||
return None
|
60
platformio/commands/remote/ac/serial.py
Normal file
60
platformio/commands/remote/ac/serial.py
Normal file
@ -0,0 +1,60 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from time import sleep
|
||||
|
||||
from twisted.internet import protocol, reactor # pylint: disable=import-error
|
||||
from twisted.internet.serialport import SerialPort # pylint: disable=import-error
|
||||
|
||||
from platformio.commands.remote.ac.base import AsyncCommandBase
|
||||
|
||||
|
||||
class SerialPortAsyncCmd(protocol.Protocol, AsyncCommandBase):
|
||||
def start(self):
|
||||
SerialPort(
|
||||
self,
|
||||
reactor=reactor,
|
||||
**{
|
||||
"deviceNameOrPortNumber": self.options["port"],
|
||||
"baudrate": self.options["baud"],
|
||||
"parity": self.options["parity"],
|
||||
"rtscts": 1 if self.options["rtscts"] else 0,
|
||||
"xonxoff": 1 if self.options["xonxoff"] else 0,
|
||||
}
|
||||
)
|
||||
|
||||
def connectionMade(self):
|
||||
self.reset_device()
|
||||
if self.options.get("rts", None) is not None:
|
||||
self.transport.setRTS(self.options.get("rts"))
|
||||
if self.options.get("dtr", None) is not None:
|
||||
self.transport.setDTR(self.options.get("dtr"))
|
||||
|
||||
def reset_device(self):
|
||||
self.transport.flushInput()
|
||||
self.transport.setDTR(False)
|
||||
self.transport.setRTS(False)
|
||||
sleep(0.1)
|
||||
self.transport.setDTR(True)
|
||||
self.transport.setRTS(True)
|
||||
sleep(0.1)
|
||||
|
||||
def dataReceived(self, data):
|
||||
self._ac_ondata(data)
|
||||
|
||||
def connectionLost(self, reason): # pylint: disable=unused-argument
|
||||
if self._paused:
|
||||
return
|
||||
self._return_code = 0
|
||||
self._ac_ended()
|
13
platformio/commands/remote/client/__init__.py
Normal file
13
platformio/commands/remote/client/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
38
platformio/commands/remote/client/agent_list.py
Normal file
38
platformio/commands/remote/client/agent_list.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
|
||||
from platformio.commands.remote.client.base import RemoteClientBase
|
||||
|
||||
|
||||
class AgentListClient(RemoteClientBase):
|
||||
def agent_pool_ready(self):
|
||||
d = self.agentpool.callRemote("list", True)
|
||||
d.addCallback(self._cbResult)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def _cbResult(self, result):
|
||||
for item in result:
|
||||
click.secho(item["name"], fg="cyan")
|
||||
click.echo("-" * len(item["name"]))
|
||||
click.echo("ID: %s" % item["id"])
|
||||
click.echo(
|
||||
"Started: %s"
|
||||
% datetime.fromtimestamp(item["started"]).strftime("%Y-%m-%d %H:%M:%S")
|
||||
)
|
||||
click.echo("")
|
||||
self.disconnect()
|
222
platformio/commands/remote/client/agent_service.py
Normal file
222
platformio/commands/remote/client/agent_service.py
Normal file
@ -0,0 +1,222 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from os.path import getatime, getmtime, isdir, isfile, join
|
||||
|
||||
from twisted.logger import LogLevel # pylint: disable=import-error
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
|
||||
from platformio import proc, util
|
||||
from platformio.commands.remote.ac.process import ProcessAsyncCmd
|
||||
from platformio.commands.remote.ac.psync import ProjectSyncAsyncCmd
|
||||
from platformio.commands.remote.ac.serial import SerialPortAsyncCmd
|
||||
from platformio.commands.remote.client.base import RemoteClientBase
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
from platformio.project.helpers import get_project_core_dir
|
||||
|
||||
|
||||
class RemoteAgentService(RemoteClientBase):
|
||||
def __init__(self, name, share, working_dir=None):
|
||||
RemoteClientBase.__init__(self)
|
||||
self.log_level = LogLevel.info
|
||||
self.working_dir = working_dir or join(get_project_core_dir(), "remote")
|
||||
if not isdir(self.working_dir):
|
||||
os.makedirs(self.working_dir)
|
||||
if name:
|
||||
self.name = str(name)[:50]
|
||||
self.join_options.update(
|
||||
{"agent": True, "share": [s.lower().strip()[:50] for s in share]}
|
||||
)
|
||||
|
||||
self._acs = {}
|
||||
|
||||
def agent_pool_ready(self):
|
||||
pass
|
||||
|
||||
def cb_disconnected(self, reason):
|
||||
for ac in self._acs.values():
|
||||
ac.ac_close()
|
||||
RemoteClientBase.cb_disconnected(self, reason)
|
||||
|
||||
def remote_acread(self, ac_id):
|
||||
self.log.debug("Async Read: {id}", id=ac_id)
|
||||
if ac_id not in self._acs:
|
||||
raise pb.Error("Invalid Async Identifier")
|
||||
return self._acs[ac_id].ac_read()
|
||||
|
||||
def remote_acwrite(self, ac_id, data):
|
||||
self.log.debug("Async Write: {id}", id=ac_id)
|
||||
if ac_id not in self._acs:
|
||||
raise pb.Error("Invalid Async Identifier")
|
||||
return self._acs[ac_id].ac_write(data)
|
||||
|
||||
def remote_acclose(self, ac_id):
|
||||
self.log.debug("Async Close: {id}", id=ac_id)
|
||||
if ac_id not in self._acs:
|
||||
raise pb.Error("Invalid Async Identifier")
|
||||
return_code = self._acs[ac_id].ac_close()
|
||||
del self._acs[ac_id]
|
||||
return return_code
|
||||
|
||||
def remote_cmd(self, cmd, options):
|
||||
self.log.info("Remote command received: {cmd}", cmd=cmd)
|
||||
self.log.debug("Command options: {options!r}", options=options)
|
||||
callback = "_process_cmd_%s" % cmd.replace(".", "_")
|
||||
return getattr(self, callback)(options)
|
||||
|
||||
def _defer_async_cmd(self, ac, pass_agent_name=True):
|
||||
self._acs[ac.id] = ac
|
||||
if pass_agent_name:
|
||||
return (self.id, ac.id, self.name)
|
||||
return (self.id, ac.id)
|
||||
|
||||
def _process_cmd_device_list(self, _):
|
||||
return (self.name, util.get_serialports())
|
||||
|
||||
def _process_cmd_device_monitor(self, options):
|
||||
if not options["port"]:
|
||||
for item in util.get_serialports():
|
||||
if "VID:PID" in item["hwid"]:
|
||||
options["port"] = item["port"]
|
||||
break
|
||||
|
||||
# terminate opened monitors
|
||||
if options["port"]:
|
||||
for ac in list(self._acs.values()):
|
||||
if (
|
||||
isinstance(ac, SerialPortAsyncCmd)
|
||||
and ac.options["port"] == options["port"]
|
||||
):
|
||||
self.log.info(
|
||||
"Terminate previously opened monitor at {port}",
|
||||
port=options["port"],
|
||||
)
|
||||
ac.ac_close()
|
||||
del self._acs[ac.id]
|
||||
|
||||
if not options["port"]:
|
||||
raise pb.Error("Please specify serial port using `--port` option")
|
||||
self.log.info("Starting serial monitor at {port}", port=options["port"])
|
||||
|
||||
return self._defer_async_cmd(SerialPortAsyncCmd(options), pass_agent_name=False)
|
||||
|
||||
def _process_cmd_psync(self, options):
|
||||
for ac in list(self._acs.values()):
|
||||
if (
|
||||
isinstance(ac, ProjectSyncAsyncCmd)
|
||||
and ac.options["id"] == options["id"]
|
||||
):
|
||||
self.log.info("Terminate previous Project Sync process")
|
||||
ac.ac_close()
|
||||
del self._acs[ac.id]
|
||||
|
||||
options["agent_working_dir"] = self.working_dir
|
||||
return self._defer_async_cmd(
|
||||
ProjectSyncAsyncCmd(options), pass_agent_name=False
|
||||
)
|
||||
|
||||
def _process_cmd_run(self, options):
|
||||
return self._process_cmd_run_or_test("run", options)
|
||||
|
||||
def _process_cmd_test(self, options):
|
||||
return self._process_cmd_run_or_test("test", options)
|
||||
|
||||
def _process_cmd_run_or_test( # pylint: disable=too-many-locals,too-many-branches
|
||||
self, command, options
|
||||
):
|
||||
assert options and "project_id" in options
|
||||
project_dir = join(self.working_dir, "projects", options["project_id"])
|
||||
origin_pio_ini = join(project_dir, "platformio.ini")
|
||||
back_pio_ini = join(project_dir, "platformio.ini.bak")
|
||||
|
||||
# remove insecure project options
|
||||
try:
|
||||
conf = ProjectConfig(origin_pio_ini)
|
||||
if isfile(back_pio_ini):
|
||||
os.remove(back_pio_ini)
|
||||
os.rename(origin_pio_ini, back_pio_ini)
|
||||
# cleanup
|
||||
if conf.has_section("platformio"):
|
||||
for opt in conf.options("platformio"):
|
||||
if opt.endswith("_dir"):
|
||||
conf.remove_option("platformio", opt)
|
||||
else:
|
||||
conf.add_section("platformio")
|
||||
conf.set("platformio", "build_dir", ".pio/build")
|
||||
conf.save(origin_pio_ini)
|
||||
|
||||
# restore A/M times
|
||||
os.utime(origin_pio_ini, (getatime(back_pio_ini), getmtime(back_pio_ini)))
|
||||
except NotPlatformIOProjectError as e:
|
||||
raise pb.Error(str(e))
|
||||
|
||||
cmd_args = ["platformio", "--force", command, "-d", project_dir]
|
||||
for env in options.get("environment", []):
|
||||
cmd_args.extend(["-e", env])
|
||||
for target in options.get("target", []):
|
||||
cmd_args.extend(["-t", target])
|
||||
for ignore in options.get("ignore", []):
|
||||
cmd_args.extend(["-i", ignore])
|
||||
if options.get("upload_port", False):
|
||||
cmd_args.extend(["--upload-port", options.get("upload_port")])
|
||||
if options.get("test_port", False):
|
||||
cmd_args.extend(["--test-port", options.get("test_port")])
|
||||
if options.get("disable_auto_clean", False):
|
||||
cmd_args.append("--disable-auto-clean")
|
||||
if options.get("without_building", False):
|
||||
cmd_args.append("--without-building")
|
||||
if options.get("without_uploading", False):
|
||||
cmd_args.append("--without-uploading")
|
||||
if options.get("silent", False):
|
||||
cmd_args.append("-s")
|
||||
if options.get("verbose", False):
|
||||
cmd_args.append("-v")
|
||||
|
||||
paused_acs = []
|
||||
for ac in self._acs.values():
|
||||
if not isinstance(ac, SerialPortAsyncCmd):
|
||||
continue
|
||||
self.log.info("Pause active monitor at {port}", port=ac.options["port"])
|
||||
ac.pause()
|
||||
paused_acs.append(ac)
|
||||
|
||||
def _cb_on_end():
|
||||
if isfile(back_pio_ini):
|
||||
if isfile(origin_pio_ini):
|
||||
os.remove(origin_pio_ini)
|
||||
os.rename(back_pio_ini, origin_pio_ini)
|
||||
for ac in paused_acs:
|
||||
ac.unpause()
|
||||
self.log.info(
|
||||
"Unpause active monitor at {port}", port=ac.options["port"]
|
||||
)
|
||||
|
||||
return self._defer_async_cmd(
|
||||
ProcessAsyncCmd(
|
||||
{"executable": proc.where_is_program("platformio"), "args": cmd_args},
|
||||
on_end_callback=_cb_on_end,
|
||||
)
|
||||
)
|
||||
|
||||
def _process_cmd_update(self, options):
|
||||
cmd_args = ["platformio", "--force", "update"]
|
||||
if options.get("only_check"):
|
||||
cmd_args.append("--only-check")
|
||||
return self._defer_async_cmd(
|
||||
ProcessAsyncCmd(
|
||||
{"executable": proc.where_is_program("platformio"), "args": cmd_args}
|
||||
)
|
||||
)
|
65
platformio/commands/remote/client/async_base.py
Normal file
65
platformio/commands/remote/client/async_base.py
Normal file
@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
|
||||
from platformio.commands.remote.client.base import RemoteClientBase
|
||||
|
||||
|
||||
class AsyncClientBase(RemoteClientBase):
|
||||
def __init__(self, command, agents, options):
|
||||
RemoteClientBase.__init__(self)
|
||||
self.command = command
|
||||
self.agents = agents
|
||||
self.options = options
|
||||
|
||||
self._acs_total = 0
|
||||
self._acs_ended = 0
|
||||
|
||||
def agent_pool_ready(self):
|
||||
pass
|
||||
|
||||
def cb_async_result(self, result):
|
||||
if self._acs_total == 0:
|
||||
self._acs_total = len(result)
|
||||
for (success, value) in result:
|
||||
if not success:
|
||||
raise pb.Error(value)
|
||||
self.acread_data(*value)
|
||||
|
||||
def acread_data(self, agent_id, ac_id, agent_name=None):
|
||||
d = self.agentpool.callRemote("acread", agent_id, ac_id)
|
||||
d.addCallback(self.cb_acread_result, agent_id, ac_id, agent_name)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def cb_acread_result(self, result, agent_id, ac_id, agent_name):
|
||||
if result is None:
|
||||
self.acclose(agent_id, ac_id)
|
||||
else:
|
||||
if self._acs_total > 1 and agent_name:
|
||||
click.echo("[%s] " % agent_name, nl=False)
|
||||
click.echo(result, nl=False)
|
||||
self.acread_data(agent_id, ac_id, agent_name)
|
||||
|
||||
def acclose(self, agent_id, ac_id):
|
||||
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
|
||||
d.addCallback(self.cb_acclose_result)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def cb_acclose_result(self, exit_code):
|
||||
self._acs_ended += 1
|
||||
if self._acs_ended != self._acs_total:
|
||||
return
|
||||
self.disconnect(exit_code)
|
193
platformio/commands/remote/client/base.py
Normal file
193
platformio/commands/remote/client/base.py
Normal file
@ -0,0 +1,193 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from twisted.internet import defer, endpoints, reactor # pylint: disable=import-error
|
||||
from twisted.logger import ILogObserver # pylint: disable=import-error
|
||||
from twisted.logger import Logger # pylint: disable=import-error
|
||||
from twisted.logger import LogLevel # pylint: disable=import-error
|
||||
from twisted.logger import formatEvent # pylint: disable=import-error
|
||||
from twisted.python import failure # pylint: disable=import-error
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
from zope.interface import provider # pylint: disable=import-error
|
||||
|
||||
from platformio import __pioremote_endpoint__, __version__, app, exception, maintenance
|
||||
from platformio.commands.remote.factory.client import RemoteClientFactory
|
||||
from platformio.commands.remote.factory.ssl import SSLContextFactory
|
||||
|
||||
|
||||
class RemoteClientBase( # pylint: disable=too-many-instance-attributes
|
||||
pb.Referenceable
|
||||
):
|
||||
|
||||
PING_DELAY = 60
|
||||
PING_MAX_FAILURES = 3
|
||||
DEBUG = False
|
||||
|
||||
def __init__(self):
|
||||
self.log_level = LogLevel.warn
|
||||
self.log = Logger(namespace="remote", observer=self._log_observer)
|
||||
self.id = app.get_host_id()
|
||||
self.name = app.get_host_name()
|
||||
self.join_options = {"corever": __version__}
|
||||
self.perspective = None
|
||||
self.agentpool = None
|
||||
|
||||
self._ping_id = 0
|
||||
self._ping_caller = None
|
||||
self._ping_counter = 0
|
||||
self._reactor_stopped = False
|
||||
self._exit_code = 0
|
||||
|
||||
@provider(ILogObserver)
|
||||
def _log_observer(self, event):
|
||||
if not self.DEBUG and (
|
||||
event["log_namespace"] != self.log.namespace
|
||||
or self.log_level > event["log_level"]
|
||||
):
|
||||
return
|
||||
msg = formatEvent(event)
|
||||
click.echo(
|
||||
"%s [%s] %s"
|
||||
% (
|
||||
datetime.fromtimestamp(event["log_time"]).strftime("%Y-%m-%d %H:%M:%S"),
|
||||
event["log_level"].name,
|
||||
msg,
|
||||
)
|
||||
)
|
||||
|
||||
def connect(self):
|
||||
self.log.info("Name: {name}", name=self.name)
|
||||
self.log.info("Connecting to PIO Remote Cloud")
|
||||
|
||||
# pylint: disable=protected-access
|
||||
proto, options = endpoints._parse(__pioremote_endpoint__)
|
||||
proto = proto[0]
|
||||
|
||||
factory = RemoteClientFactory()
|
||||
factory.remote_client = self
|
||||
factory.sslContextFactory = None
|
||||
if proto == "ssl":
|
||||
factory.sslContextFactory = SSLContextFactory(options["host"])
|
||||
reactor.connectSSL(
|
||||
options["host"],
|
||||
int(options["port"]),
|
||||
factory,
|
||||
factory.sslContextFactory,
|
||||
)
|
||||
elif proto == "tcp":
|
||||
reactor.connectTCP(options["host"], int(options["port"]), factory)
|
||||
else:
|
||||
raise exception.PlatformioException("Unknown PIO Remote Cloud protocol")
|
||||
reactor.run()
|
||||
|
||||
if self._exit_code != 0:
|
||||
raise exception.ReturnErrorCode(self._exit_code)
|
||||
|
||||
def cb_client_authorization_failed(self, err):
|
||||
msg = "Bad account credentials"
|
||||
if err.check(pb.Error):
|
||||
msg = err.getErrorMessage()
|
||||
self.log.error(msg)
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_client_authorization_made(self, perspective):
|
||||
self.log.info("Successfully authorized")
|
||||
self.perspective = perspective
|
||||
d = perspective.callRemote("join", self.id, self.name, self.join_options)
|
||||
d.addCallback(self._cb_client_join_made)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def _cb_client_join_made(self, result):
|
||||
code = result[0]
|
||||
if code == 1:
|
||||
self.agentpool = result[1]
|
||||
self.agent_pool_ready()
|
||||
self.restart_ping()
|
||||
elif code == 2:
|
||||
self.remote_service(*result[1:])
|
||||
|
||||
def remote_service(self, command, options):
|
||||
if command == "disconnect":
|
||||
self.log.error(
|
||||
"PIO Remote Cloud disconnected: {msg}", msg=options.get("message")
|
||||
)
|
||||
self.disconnect()
|
||||
|
||||
def restart_ping(self, reset_counter=True):
|
||||
# stop previous ping callers
|
||||
self.stop_ping(reset_counter)
|
||||
self._ping_caller = reactor.callLater(self.PING_DELAY, self._do_ping)
|
||||
|
||||
def _do_ping(self):
|
||||
self._ping_counter += 1
|
||||
self._ping_id = int(time())
|
||||
d = self.perspective.callRemote("service", "ping", {"id": self._ping_id})
|
||||
d.addCallback(self._cb_pong)
|
||||
d.addErrback(self._cb_pong)
|
||||
|
||||
def stop_ping(self, reset_counter=True):
|
||||
if reset_counter:
|
||||
self._ping_counter = 0
|
||||
if not self._ping_caller or not self._ping_caller.active():
|
||||
return
|
||||
self._ping_caller.cancel()
|
||||
self._ping_caller = None
|
||||
|
||||
def _cb_pong(self, result):
|
||||
if not isinstance(result, failure.Failure) and self._ping_id == result:
|
||||
self.restart_ping()
|
||||
return
|
||||
if self._ping_counter >= self.PING_MAX_FAILURES:
|
||||
self.stop_ping()
|
||||
self.perspective.broker.transport.loseConnection()
|
||||
else:
|
||||
self.restart_ping(reset_counter=False)
|
||||
|
||||
def agent_pool_ready(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def disconnect(self, exit_code=None):
|
||||
self.stop_ping()
|
||||
if exit_code is not None:
|
||||
self._exit_code = exit_code
|
||||
if reactor.running and not self._reactor_stopped:
|
||||
self._reactor_stopped = True
|
||||
reactor.stop()
|
||||
|
||||
def cb_disconnected(self, _):
|
||||
self.stop_ping()
|
||||
self.perspective = None
|
||||
self.agentpool = None
|
||||
|
||||
def cb_global_error(self, err):
|
||||
if err.check(pb.PBConnectionLost, defer.CancelledError):
|
||||
return
|
||||
|
||||
msg = err.getErrorMessage()
|
||||
if err.check(pb.DeadReferenceError):
|
||||
msg = "Remote Client has been terminated"
|
||||
elif "PioAgentNotStartedError" in str(err.type):
|
||||
msg = (
|
||||
"Could not find active agents. Please start it before on "
|
||||
"a remote machine using `pio remote agent start` command.\n"
|
||||
"See http://docs.platformio.org/page/plus/pio-remote.html"
|
||||
)
|
||||
else:
|
||||
maintenance.on_platformio_exception(Exception(err.type))
|
||||
click.secho(msg, fg="red", err=True)
|
||||
self.disconnect(exit_code=1)
|
54
platformio/commands/remote/client/device_list.py
Normal file
54
platformio/commands/remote/client/device_list.py
Normal file
@ -0,0 +1,54 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
|
||||
from platformio.commands.remote.client.base import RemoteClientBase
|
||||
|
||||
|
||||
class DeviceListClient(RemoteClientBase):
|
||||
def __init__(self, agents, json_output):
|
||||
RemoteClientBase.__init__(self)
|
||||
self.agents = agents
|
||||
self.json_output = json_output
|
||||
|
||||
def agent_pool_ready(self):
|
||||
d = self.agentpool.callRemote("cmd", self.agents, "device.list")
|
||||
d.addCallback(self._cbResult)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def _cbResult(self, result):
|
||||
data = {}
|
||||
for (success, value) in result:
|
||||
if not success:
|
||||
click.secho(value, fg="red", err=True)
|
||||
continue
|
||||
(agent_name, devlist) = value
|
||||
data[agent_name] = devlist
|
||||
|
||||
if self.json_output:
|
||||
click.echo(json.dumps(data))
|
||||
else:
|
||||
for agent_name, devlist in data.items():
|
||||
click.echo("Agent %s" % click.style(agent_name, fg="cyan", bold=True))
|
||||
click.echo("=" * (6 + len(agent_name)))
|
||||
for item in devlist:
|
||||
click.secho(item["port"], fg="cyan")
|
||||
click.echo("-" * len(item["port"]))
|
||||
click.echo("Hardware ID: %s" % item["hwid"])
|
||||
click.echo("Description: %s" % item["description"])
|
||||
click.echo("")
|
||||
self.disconnect()
|
236
platformio/commands/remote/client/device_monitor.py
Normal file
236
platformio/commands/remote/client/device_monitor.py
Normal file
@ -0,0 +1,236 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import click
|
||||
from twisted.internet import protocol, reactor, task # pylint: disable=import-error
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
|
||||
from platformio.commands.remote.client.base import RemoteClientBase
|
||||
|
||||
|
||||
class SMBridgeProtocol(protocol.Protocol): # pylint: disable=no-init
|
||||
def connectionMade(self):
|
||||
self.factory.add_client(self)
|
||||
|
||||
def connectionLost(self, reason): # pylint: disable=unused-argument
|
||||
self.factory.remove_client(self)
|
||||
|
||||
def dataReceived(self, data):
|
||||
self.factory.send_to_server(data)
|
||||
|
||||
|
||||
class SMBridgeFactory(protocol.ServerFactory):
|
||||
def __init__(self, cdm):
|
||||
self.cdm = cdm
|
||||
self._clients = []
|
||||
|
||||
def buildProtocol(self, addr): # pylint: disable=unused-argument
|
||||
p = SMBridgeProtocol()
|
||||
p.factory = self # pylint: disable=attribute-defined-outside-init
|
||||
return p
|
||||
|
||||
def add_client(self, client):
|
||||
self.cdm.log.debug("SMBridge: Client connected")
|
||||
self._clients.append(client)
|
||||
self.cdm.acread_data()
|
||||
|
||||
def remove_client(self, client):
|
||||
self.cdm.log.debug("SMBridge: Client disconnected")
|
||||
self._clients.remove(client)
|
||||
if not self._clients:
|
||||
self.cdm.client_terminal_stopped()
|
||||
|
||||
def has_clients(self):
|
||||
return len(self._clients)
|
||||
|
||||
def send_to_clients(self, data):
|
||||
if not self._clients:
|
||||
return None
|
||||
for client in self._clients:
|
||||
client.transport.write(data)
|
||||
return len(data)
|
||||
|
||||
def send_to_server(self, data):
|
||||
self.cdm.acwrite_data(data)
|
||||
|
||||
|
||||
class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
RemoteClientBase
|
||||
):
|
||||
|
||||
MAX_BUFFER_SIZE = 1024 * 1024
|
||||
|
||||
def __init__(self, agents, **kwargs):
|
||||
RemoteClientBase.__init__(self)
|
||||
self.agents = agents
|
||||
self.cmd_options = kwargs
|
||||
|
||||
self._bridge_factory = SMBridgeFactory(self)
|
||||
self._agent_id = None
|
||||
self._ac_id = None
|
||||
self._d_acread = None
|
||||
self._d_acwrite = None
|
||||
self._acwrite_buffer = ""
|
||||
|
||||
def agent_pool_ready(self):
|
||||
d = task.deferLater(
|
||||
reactor, 1, self.agentpool.callRemote, "cmd", self.agents, "device.list"
|
||||
)
|
||||
d.addCallback(self._cb_device_list)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def _cb_device_list(self, result):
|
||||
devices = []
|
||||
hwid_devindexes = []
|
||||
for (success, value) in result:
|
||||
if not success:
|
||||
click.secho(value, fg="red", err=True)
|
||||
continue
|
||||
(agent_name, ports) = value
|
||||
for item in ports:
|
||||
if "VID:PID" in item["hwid"]:
|
||||
hwid_devindexes.append(len(devices))
|
||||
devices.append((agent_name, item))
|
||||
|
||||
if len(result) == 1 and self.cmd_options["port"]:
|
||||
if set(["*", "?", "[", "]"]) & set(self.cmd_options["port"]):
|
||||
for agent, item in devices:
|
||||
if fnmatch(item["port"], self.cmd_options["port"]):
|
||||
return self.start_remote_monitor(agent, item["port"])
|
||||
return self.start_remote_monitor(result[0][1][0], self.cmd_options["port"])
|
||||
|
||||
device = None
|
||||
if len(hwid_devindexes) == 1:
|
||||
device = devices[hwid_devindexes[0]]
|
||||
else:
|
||||
click.echo("Available ports:")
|
||||
for i, device in enumerate(devices):
|
||||
click.echo(
|
||||
"{index}. {host}{port} \t{description}".format(
|
||||
index=i + 1,
|
||||
host=device[0] + ":" if len(result) > 1 else "",
|
||||
port=device[1]["port"],
|
||||
description=device[1]["description"]
|
||||
if device[1]["description"] != "n/a"
|
||||
else "",
|
||||
)
|
||||
)
|
||||
device_index = click.prompt(
|
||||
"Please choose a port (number in the list above)",
|
||||
type=click.Choice([str(i + 1) for i, _ in enumerate(devices)]),
|
||||
)
|
||||
device = devices[int(device_index) - 1]
|
||||
|
||||
self.start_remote_monitor(device[0], device[1]["port"])
|
||||
|
||||
return None
|
||||
|
||||
def start_remote_monitor(self, agent, port):
|
||||
options = {"port": port}
|
||||
for key in ("baud", "parity", "rtscts", "xonxoff", "rts", "dtr"):
|
||||
options[key] = self.cmd_options[key]
|
||||
|
||||
click.echo(
|
||||
"Starting Serial Monitor on {host}:{port}".format(
|
||||
host=agent, port=options["port"]
|
||||
)
|
||||
)
|
||||
d = self.agentpool.callRemote("cmd", [agent], "device.monitor", options)
|
||||
d.addCallback(self.cb_async_result)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
def cb_async_result(self, result):
|
||||
if len(result) != 1:
|
||||
raise pb.Error("Invalid response from Remote Cloud")
|
||||
success, value = result[0]
|
||||
if not success:
|
||||
raise pb.Error(value)
|
||||
|
||||
reconnected = self._agent_id is not None
|
||||
self._agent_id, self._ac_id = value
|
||||
|
||||
if reconnected:
|
||||
self.acread_data(force=True)
|
||||
self.acwrite_data("", force=True)
|
||||
return
|
||||
|
||||
# start bridge
|
||||
port = reactor.listenTCP(0, self._bridge_factory)
|
||||
address = port.getHost()
|
||||
self.log.debug("Serial Bridge is started on {address!r}", address=address)
|
||||
if "sock" in self.cmd_options:
|
||||
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
|
||||
fp.write("socket://localhost:%d" % address.port)
|
||||
|
||||
def client_terminal_stopped(self):
|
||||
try:
|
||||
d = self.agentpool.callRemote("acclose", self._agent_id, self._ac_id)
|
||||
d.addCallback(lambda r: self.disconnect())
|
||||
d.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def acread_data(self, force=False):
|
||||
if force and self._d_acread:
|
||||
self._d_acread.cancel()
|
||||
self._d_acread = None
|
||||
|
||||
if (
|
||||
self._d_acread and not self._d_acread.called
|
||||
) or not self._bridge_factory.has_clients():
|
||||
return
|
||||
|
||||
try:
|
||||
self._d_acread = self.agentpool.callRemote(
|
||||
"acread", self._agent_id, self._ac_id
|
||||
)
|
||||
self._d_acread.addCallback(self.cb_acread_result)
|
||||
self._d_acread.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_acread_result(self, result):
|
||||
if result is None:
|
||||
self.disconnect(exit_code=1)
|
||||
else:
|
||||
self._bridge_factory.send_to_clients(result)
|
||||
self.acread_data()
|
||||
|
||||
def acwrite_data(self, data, force=False):
|
||||
if force and self._d_acwrite:
|
||||
self._d_acwrite.cancel()
|
||||
self._d_acwrite = None
|
||||
|
||||
self._acwrite_buffer += data
|
||||
if len(self._acwrite_buffer) > self.MAX_BUFFER_SIZE:
|
||||
self._acwrite_buffer = self._acwrite_buffer[-1 * self.MAX_BUFFER_SIZE :]
|
||||
if (self._d_acwrite and not self._d_acwrite.called) or not self._acwrite_buffer:
|
||||
return
|
||||
|
||||
data = self._acwrite_buffer
|
||||
self._acwrite_buffer = ""
|
||||
try:
|
||||
d = self.agentpool.callRemote("acwrite", self._agent_id, self._ac_id, data)
|
||||
d.addCallback(self.cb_acwrite_result)
|
||||
d.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_acwrite_result(self, result):
|
||||
assert result > 0
|
||||
if self._acwrite_buffer:
|
||||
self.acwrite_data("")
|
272
platformio/commands/remote/client/run_or_test.py
Normal file
272
platformio/commands/remote/client/run_or_test.py
Normal file
@ -0,0 +1,272 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio.commands.remote.client.async_base import AsyncClientBase
|
||||
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
|
||||
from platformio.compat import hashlib_encode_data
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class RunOrTestClient(AsyncClientBase):
|
||||
|
||||
MAX_ARCHIVE_SIZE = 50 * 1024 * 1024 # 50Mb
|
||||
UPLOAD_CHUNK_SIZE = 256 * 1024 # 256Kb
|
||||
|
||||
PSYNC_SRC_EXTS = [
|
||||
"c",
|
||||
"cpp",
|
||||
"S",
|
||||
"spp",
|
||||
"SPP",
|
||||
"sx",
|
||||
"s",
|
||||
"asm",
|
||||
"ASM",
|
||||
"h",
|
||||
"hpp",
|
||||
"ipp",
|
||||
"ino",
|
||||
"pde",
|
||||
"json",
|
||||
"properties",
|
||||
]
|
||||
|
||||
PSYNC_SKIP_DIRS = (".git", ".svn", ".hg", "example", "examples", "test", "tests")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
AsyncClientBase.__init__(self, *args, **kwargs)
|
||||
self.project_id = self.generate_project_id(self.options["project_dir"])
|
||||
self.psync = ProjectSync(self.options["project_dir"])
|
||||
|
||||
def generate_project_id(self, path):
|
||||
h = hashlib.sha1(hashlib_encode_data(self.id))
|
||||
h.update(hashlib_encode_data(path))
|
||||
return "%s-%s" % (os.path.basename(path), h.hexdigest())
|
||||
|
||||
def add_project_items(self, psync):
|
||||
with util.cd(self.options["project_dir"]):
|
||||
cfg = ProjectConfig.get_instance(
|
||||
os.path.join(self.options["project_dir"], "platformio.ini")
|
||||
)
|
||||
psync.add_item(cfg.path, "platformio.ini")
|
||||
psync.add_item(cfg.get_optional_dir("shared"), "shared")
|
||||
psync.add_item(cfg.get_optional_dir("boards"), "boards")
|
||||
|
||||
if self.options["force_remote"]:
|
||||
self._add_project_source_items(cfg, psync)
|
||||
else:
|
||||
self._add_project_binary_items(cfg, psync)
|
||||
|
||||
if self.command == "test":
|
||||
psync.add_item(cfg.get_optional_dir("test"), "test")
|
||||
|
||||
def _add_project_source_items(self, cfg, psync):
|
||||
psync.add_item(cfg.get_optional_dir("lib"), "lib")
|
||||
psync.add_item(
|
||||
cfg.get_optional_dir("include"),
|
||||
"include",
|
||||
cb_filter=self._cb_tarfile_filter,
|
||||
)
|
||||
psync.add_item(
|
||||
cfg.get_optional_dir("src"), "src", cb_filter=self._cb_tarfile_filter
|
||||
)
|
||||
if set(["buildfs", "uploadfs", "uploadfsota"]) & set(
|
||||
self.options.get("target", [])
|
||||
):
|
||||
psync.add_item(cfg.get_optional_dir("data"), "data")
|
||||
|
||||
@staticmethod
|
||||
def _add_project_binary_items(cfg, psync):
|
||||
build_dir = cfg.get_optional_dir("build")
|
||||
for env_name in os.listdir(build_dir):
|
||||
env_dir = os.path.join(build_dir, env_name)
|
||||
if not os.path.isdir(env_dir):
|
||||
continue
|
||||
for fname in os.listdir(env_dir):
|
||||
bin_file = os.path.join(env_dir, fname)
|
||||
bin_exts = (".elf", ".bin", ".hex", ".eep", "program")
|
||||
if os.path.isfile(bin_file) and fname.endswith(bin_exts):
|
||||
psync.add_item(
|
||||
bin_file, os.path.join(".pio", "build", env_name, fname)
|
||||
)
|
||||
|
||||
def _cb_tarfile_filter(self, path):
|
||||
if (
|
||||
os.path.isdir(path)
|
||||
and os.path.basename(path).lower() in self.PSYNC_SKIP_DIRS
|
||||
):
|
||||
return None
|
||||
if os.path.isfile(path) and not self.is_file_with_exts(
|
||||
path, self.PSYNC_SRC_EXTS
|
||||
):
|
||||
return None
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def is_file_with_exts(path, exts):
|
||||
if path.endswith(tuple(".%s" % e for e in exts)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def agent_pool_ready(self):
|
||||
self.psync_init()
|
||||
|
||||
def psync_init(self):
|
||||
self.add_project_items(self.psync)
|
||||
d = self.agentpool.callRemote(
|
||||
"cmd",
|
||||
self.agents,
|
||||
"psync",
|
||||
dict(id=self.project_id, items=[i[1] for i in self.psync.get_items()]),
|
||||
)
|
||||
d.addCallback(self.cb_psync_init_result)
|
||||
d.addErrback(self.cb_global_error)
|
||||
|
||||
# build db index while wait for result from agent
|
||||
self.psync.rebuild_dbindex()
|
||||
|
||||
def cb_psync_init_result(self, result):
|
||||
self._acs_total = len(result)
|
||||
for (success, value) in result:
|
||||
if not success:
|
||||
raise pb.Error(value)
|
||||
agent_id, ac_id = value
|
||||
try:
|
||||
d = self.agentpool.callRemote(
|
||||
"acwrite",
|
||||
agent_id,
|
||||
ac_id,
|
||||
dict(stage=PROJECT_SYNC_STAGE.DBINDEX.value),
|
||||
)
|
||||
d.addCallback(self.cb_psync_dbindex_result, agent_id, ac_id)
|
||||
d.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_psync_dbindex_result(self, result, agent_id, ac_id):
|
||||
result = set(json.loads(zlib.decompress(result)))
|
||||
dbindex = set(self.psync.get_dbindex())
|
||||
delete = list(result - dbindex)
|
||||
delta = list(dbindex - result)
|
||||
|
||||
self.log.debug(
|
||||
"PSync: stats, total={total}, delete={delete}, delta={delta}",
|
||||
total=len(dbindex),
|
||||
delete=len(delete),
|
||||
delta=len(delta),
|
||||
)
|
||||
|
||||
if not delete and not delta:
|
||||
return self.psync_finalize(agent_id, ac_id)
|
||||
if not delete:
|
||||
return self.psync_upload(agent_id, ac_id, delta)
|
||||
|
||||
try:
|
||||
d = self.agentpool.callRemote(
|
||||
"acwrite",
|
||||
agent_id,
|
||||
ac_id,
|
||||
dict(
|
||||
stage=PROJECT_SYNC_STAGE.DELETE.value,
|
||||
dbindex=zlib.compress(json.dumps(delete).encode()),
|
||||
),
|
||||
)
|
||||
d.addCallback(self.cb_psync_delete_result, agent_id, ac_id, delta)
|
||||
d.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
return None
|
||||
|
||||
def cb_psync_delete_result(self, result, agent_id, ac_id, dbindex):
|
||||
assert result
|
||||
self.psync_upload(agent_id, ac_id, dbindex)
|
||||
|
||||
def psync_upload(self, agent_id, ac_id, dbindex):
|
||||
assert dbindex
|
||||
fileobj = BytesIO()
|
||||
compressed = self.psync.compress_items(fileobj, dbindex, self.MAX_ARCHIVE_SIZE)
|
||||
fileobj.seek(0)
|
||||
self.log.debug(
|
||||
"PSync: upload project, size={size}", size=len(fileobj.getvalue())
|
||||
)
|
||||
self.psync_upload_chunk(
|
||||
agent_id, ac_id, list(set(dbindex) - set(compressed)), fileobj
|
||||
)
|
||||
|
||||
def psync_upload_chunk(self, agent_id, ac_id, dbindex, fileobj):
|
||||
offset = fileobj.tell()
|
||||
total = fileobj.seek(0, os.SEEK_END)
|
||||
# unwind
|
||||
fileobj.seek(offset)
|
||||
chunk = fileobj.read(self.UPLOAD_CHUNK_SIZE)
|
||||
assert chunk
|
||||
try:
|
||||
d = self.agentpool.callRemote(
|
||||
"acwrite",
|
||||
agent_id,
|
||||
ac_id,
|
||||
dict(
|
||||
stage=PROJECT_SYNC_STAGE.UPLOAD.value,
|
||||
chunk=chunk,
|
||||
length=len(chunk),
|
||||
total=total,
|
||||
),
|
||||
)
|
||||
d.addCallback(
|
||||
self.cb_psync_upload_chunk_result, agent_id, ac_id, dbindex, fileobj
|
||||
)
|
||||
d.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments
|
||||
self, result, agent_id, ac_id, dbindex, fileobj
|
||||
):
|
||||
result = PROJECT_SYNC_STAGE.lookupByValue(result)
|
||||
self.log.debug("PSync: upload chunk result {r}", r=str(result))
|
||||
assert result & (PROJECT_SYNC_STAGE.UPLOAD | PROJECT_SYNC_STAGE.EXTRACTED)
|
||||
if result is PROJECT_SYNC_STAGE.EXTRACTED:
|
||||
if dbindex:
|
||||
self.psync_upload(agent_id, ac_id, dbindex)
|
||||
else:
|
||||
self.psync_finalize(agent_id, ac_id)
|
||||
else:
|
||||
self.psync_upload_chunk(agent_id, ac_id, dbindex, fileobj)
|
||||
|
||||
def psync_finalize(self, agent_id, ac_id):
|
||||
try:
|
||||
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
|
||||
d.addCallback(self.cb_psync_completed_result, agent_id)
|
||||
d.addErrback(self.cb_global_error)
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_psync_completed_result(self, result, agent_id):
|
||||
assert PROJECT_SYNC_STAGE.lookupByValue(result)
|
||||
options = self.options.copy()
|
||||
del options["project_dir"]
|
||||
options["project_id"] = self.project_id
|
||||
d = self.agentpool.callRemote("cmd", [agent_id], self.command, options)
|
||||
d.addCallback(self.cb_async_result)
|
||||
d.addErrback(self.cb_global_error)
|
22
platformio/commands/remote/client/update_core.py
Normal file
22
platformio/commands/remote/client/update_core.py
Normal file
@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.remote.client.async_base import AsyncClientBase
|
||||
|
||||
|
||||
class UpdateCoreClient(AsyncClientBase):
|
||||
def agent_pool_ready(self):
|
||||
d = self.agentpool.callRemote("cmd", self.agents, self.command, self.options)
|
||||
d.addCallback(self.cb_async_result)
|
||||
d.addErrback(self.cb_global_error)
|
@ -12,30 +12,42 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, import-outside-toplevel
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import threading
|
||||
from tempfile import mkdtemp
|
||||
from time import sleep
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio import exception, fs, proc
|
||||
from platformio.commands.device import helpers as device_helpers
|
||||
from platformio.commands.device.command import device_monitor as cmd_device_monitor
|
||||
from platformio.managers.core import pioplus_call
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.commands.test.command import cli as cmd_test
|
||||
from platformio.compat import PY2
|
||||
from platformio.managers.core import inject_contrib_pysite
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
@click.group("remote", short_help="PIO Remote")
|
||||
@click.option("-a", "--agent", multiple=True)
|
||||
def cli(**kwargs):
|
||||
pass
|
||||
@click.pass_context
|
||||
def cli(ctx, agent):
|
||||
if PY2:
|
||||
raise exception.UserSideException(
|
||||
"PIO Remote requires Python 3.5 or above. \nPlease install the latest "
|
||||
"Python 3 and reinstall PlatformIO Core using installation script:\n"
|
||||
"https://docs.platformio.org/page/core/installation.html"
|
||||
)
|
||||
ctx.obj = agent
|
||||
inject_contrib_pysite()
|
||||
|
||||
|
||||
@cli.group("agent", short_help="Start new agent or list active")
|
||||
@cli.group("agent", short_help="Start a new agent or list active")
|
||||
def remote_agent():
|
||||
pass
|
||||
|
||||
@ -49,18 +61,17 @@ def remote_agent():
|
||||
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
||||
)
|
||||
def remote_agent_start(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
def remote_agent_start(name, share, working_dir):
|
||||
from platformio.commands.remote.client.agent_service import RemoteAgentService
|
||||
|
||||
|
||||
@remote_agent.command("reload", short_help="Reload agents")
|
||||
def remote_agent_reload():
|
||||
pioplus_call(sys.argv[1:])
|
||||
RemoteAgentService(name, share, working_dir).connect()
|
||||
|
||||
|
||||
@remote_agent.command("list", short_help="List active agents")
|
||||
def remote_agent_list():
|
||||
pioplus_call(sys.argv[1:])
|
||||
from platformio.commands.remote.client.agent_list import AgentListClient
|
||||
|
||||
AgentListClient().connect()
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed Platforms, Packages and Libraries")
|
||||
@ -73,8 +84,11 @@ def remote_agent_list():
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
def remote_update(only_check, dry_run):
|
||||
pioplus_call(sys.argv[1:])
|
||||
@click.pass_obj
|
||||
def remote_update(agents, only_check, dry_run):
|
||||
from platformio.commands.remote.client.update_core import UpdateCoreClient
|
||||
|
||||
UpdateCoreClient("update", agents, dict(only_check=only_check or dry_run)).connect()
|
||||
|
||||
|
||||
@cli.command("run", short_help="Process project environments remotely")
|
||||
@ -93,8 +107,65 @@ def remote_update(only_check, dry_run):
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
def remote_run(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def remote_run(
|
||||
ctx,
|
||||
agents,
|
||||
environment,
|
||||
target,
|
||||
upload_port,
|
||||
project_dir,
|
||||
disable_auto_clean,
|
||||
force_remote,
|
||||
silent,
|
||||
verbose,
|
||||
):
|
||||
|
||||
from platformio.commands.remote.client.run_or_test import RunOrTestClient
|
||||
|
||||
cr = RunOrTestClient(
|
||||
"run",
|
||||
agents,
|
||||
dict(
|
||||
environment=environment,
|
||||
target=target,
|
||||
upload_port=upload_port,
|
||||
project_dir=project_dir,
|
||||
disable_auto_clean=disable_auto_clean,
|
||||
force_remote=force_remote,
|
||||
silent=silent,
|
||||
verbose=verbose,
|
||||
),
|
||||
)
|
||||
if force_remote:
|
||||
return cr.connect()
|
||||
|
||||
click.secho("Building project locally", bold=True)
|
||||
local_targets = []
|
||||
if "clean" in target:
|
||||
local_targets = ["clean"]
|
||||
elif set(["buildfs", "uploadfs", "uploadfsota"]) & set(target):
|
||||
local_targets = ["buildfs"]
|
||||
else:
|
||||
local_targets = ["checkprogsize", "buildprog"]
|
||||
ctx.invoke(
|
||||
cmd_run,
|
||||
environment=environment,
|
||||
target=local_targets,
|
||||
project_dir=project_dir,
|
||||
# disable_auto_clean=True,
|
||||
silent=silent,
|
||||
verbose=verbose,
|
||||
)
|
||||
|
||||
if any(["upload" in t for t in target] + ["program" in target]):
|
||||
click.secho("Uploading firmware remotely", bold=True)
|
||||
cr.options["target"] += ("nobuild",)
|
||||
cr.options["disable_auto_clean"] = True
|
||||
cr.connect()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("test", short_help="Remote Unit Testing")
|
||||
@ -114,8 +185,59 @@ def remote_run(**kwargs):
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def remote_test(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def remote_test(
|
||||
ctx,
|
||||
agents,
|
||||
environment,
|
||||
ignore,
|
||||
upload_port,
|
||||
test_port,
|
||||
project_dir,
|
||||
force_remote,
|
||||
without_building,
|
||||
without_uploading,
|
||||
verbose,
|
||||
):
|
||||
|
||||
from platformio.commands.remote.client.run_or_test import RunOrTestClient
|
||||
|
||||
cr = RunOrTestClient(
|
||||
"test",
|
||||
agents,
|
||||
dict(
|
||||
environment=environment,
|
||||
ignore=ignore,
|
||||
upload_port=upload_port,
|
||||
test_port=test_port,
|
||||
project_dir=project_dir,
|
||||
force_remote=force_remote,
|
||||
without_building=without_building,
|
||||
without_uploading=without_uploading,
|
||||
verbose=verbose,
|
||||
),
|
||||
)
|
||||
if force_remote:
|
||||
return cr.connect()
|
||||
|
||||
click.secho("Building project locally", bold=True)
|
||||
|
||||
ctx.invoke(
|
||||
cmd_test,
|
||||
environment=environment,
|
||||
ignore=ignore,
|
||||
project_dir=project_dir,
|
||||
without_uploading=True,
|
||||
without_testing=True,
|
||||
verbose=verbose,
|
||||
)
|
||||
|
||||
click.secho("Testing project remotely", bold=True)
|
||||
cr.options["without_building"] = True
|
||||
cr.connect()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.group("device", short_help="Monitor remote device or list existing")
|
||||
@ -125,8 +247,11 @@ def remote_device():
|
||||
|
||||
@remote_device.command("list", short_help="List remote devices")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list(json_output):
|
||||
pioplus_call(sys.argv[1:])
|
||||
@click.pass_obj
|
||||
def device_list(agents, json_output):
|
||||
from platformio.commands.remote.client.device_list import DeviceListClient
|
||||
|
||||
DeviceListClient(agents, json_output).connect()
|
||||
|
||||
|
||||
@remote_device.command("monitor", short_help="Monitor remote device")
|
||||
@ -193,8 +318,20 @@ def device_list(json_output):
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment",
|
||||
)
|
||||
@click.option(
|
||||
"--sock",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def device_monitor(ctx, **kwargs):
|
||||
def device_monitor(ctx, agents, **kwargs):
|
||||
from platformio.commands.remote.client.device_monitor import DeviceMonitorClient
|
||||
|
||||
if kwargs["sock"]:
|
||||
return DeviceMonitorClient(agents, **kwargs).connect()
|
||||
|
||||
project_options = {}
|
||||
try:
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
@ -206,15 +343,12 @@ def device_monitor(ctx, **kwargs):
|
||||
kwargs["baud"] = kwargs["baud"] or 9600
|
||||
|
||||
def _tx_target(sock_dir):
|
||||
pioplus_argv = ["remote", "device", "monitor"]
|
||||
pioplus_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
|
||||
pioplus_argv.extend(["--sock", sock_dir])
|
||||
try:
|
||||
pioplus_call(pioplus_argv)
|
||||
except exception.ReturnErrorCode:
|
||||
pass
|
||||
subcmd_argv = ["remote", "device", "monitor"]
|
||||
subcmd_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
|
||||
subcmd_argv.extend(["--sock", sock_dir])
|
||||
subprocess.call([proc.where_is_program("platformio")] + subcmd_argv)
|
||||
|
||||
sock_dir = mkdtemp(suffix="pioplus")
|
||||
sock_dir = mkdtemp(suffix="pio")
|
||||
sock_file = os.path.join(sock_dir, "sock")
|
||||
try:
|
||||
t = threading.Thread(target=_tx_target, args=(sock_dir,))
|
||||
@ -229,3 +363,5 @@ def device_monitor(ctx, **kwargs):
|
||||
t.join(2)
|
||||
finally:
|
||||
fs.rmtree(sock_dir)
|
||||
|
||||
return True
|
13
platformio/commands/remote/factory/__init__.py
Normal file
13
platformio/commands/remote/factory/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
73
platformio/commands/remote/factory/client.py
Normal file
73
platformio/commands/remote/factory/client.py
Normal file
@ -0,0 +1,73 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.cred import credentials # pylint: disable=import-error
|
||||
from twisted.internet import protocol, reactor # pylint: disable=import-error
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
|
||||
from platformio.app import get_host_id
|
||||
from platformio.commands.account.client import AccountClient
|
||||
|
||||
|
||||
class RemoteClientFactory(pb.PBClientFactory, protocol.ReconnectingClientFactory):
|
||||
def clientConnectionMade(self, broker):
|
||||
if self.sslContextFactory and not self.sslContextFactory.certificate_verified:
|
||||
self.remote_client.log.error(
|
||||
"A remote cloud could not prove that its security certificate is "
|
||||
"from {host}. This may cause a misconfiguration or an attacker "
|
||||
"intercepting your connection.",
|
||||
host=self.sslContextFactory.host,
|
||||
)
|
||||
return self.remote_client.disconnect()
|
||||
pb.PBClientFactory.clientConnectionMade(self, broker)
|
||||
protocol.ReconnectingClientFactory.resetDelay(self)
|
||||
self.remote_client.log.info("Successfully connected")
|
||||
self.remote_client.log.info("Authenticating")
|
||||
|
||||
d = self.login(
|
||||
credentials.UsernamePassword(
|
||||
AccountClient().fetch_authentication_token().encode(),
|
||||
get_host_id().encode(),
|
||||
),
|
||||
client=self.remote_client,
|
||||
)
|
||||
d.addCallback(self.remote_client.cb_client_authorization_made)
|
||||
d.addErrback(self.remote_client.cb_client_authorization_failed)
|
||||
return d
|
||||
|
||||
def clientConnectionFailed(self, connector, reason):
|
||||
self.remote_client.log.warn(
|
||||
"Could not connect to PIO Remote Cloud. Reconnecting..."
|
||||
)
|
||||
self.remote_client.cb_disconnected(reason)
|
||||
protocol.ReconnectingClientFactory.clientConnectionFailed(
|
||||
self, connector, reason
|
||||
)
|
||||
|
||||
def clientConnectionLost( # pylint: disable=arguments-differ
|
||||
self, connector, unused_reason
|
||||
):
|
||||
if not reactor.running:
|
||||
self.remote_client.log.info("Successfully disconnected")
|
||||
return
|
||||
self.remote_client.log.warn(
|
||||
"Connection is lost to PIO Remote Cloud. Reconnecting"
|
||||
)
|
||||
pb.PBClientFactory.clientConnectionLost(
|
||||
self, connector, unused_reason, reconnecting=1
|
||||
)
|
||||
self.remote_client.cb_disconnected(unused_reason)
|
||||
protocol.ReconnectingClientFactory.clientConnectionLost(
|
||||
self, connector, unused_reason
|
||||
)
|
41
platformio/commands/remote/factory/ssl.py
Normal file
41
platformio/commands/remote/factory/ssl.py
Normal file
@ -0,0 +1,41 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import certifi
|
||||
from OpenSSL import SSL # pylint: disable=import-error
|
||||
from twisted.internet import ssl # pylint: disable=import-error
|
||||
|
||||
|
||||
class SSLContextFactory(ssl.ClientContextFactory):
|
||||
def __init__(self, host):
|
||||
self.host = host
|
||||
self.certificate_verified = False
|
||||
|
||||
def getContext(self):
|
||||
ctx = super(SSLContextFactory, self).getContext()
|
||||
ctx.set_verify(
|
||||
SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, self.verifyHostname
|
||||
)
|
||||
ctx.load_verify_locations(certifi.where())
|
||||
return ctx
|
||||
|
||||
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments
|
||||
self, connection, x509, errno, depth, status
|
||||
):
|
||||
cn = x509.get_subject().commonName
|
||||
if cn.startswith("*"):
|
||||
cn = cn[1:]
|
||||
if self.host.endswith(cn):
|
||||
self.certificate_verified = True
|
||||
return status
|
117
platformio/commands/remote/projectsync.py
Normal file
117
platformio/commands/remote/projectsync.py
Normal file
@ -0,0 +1,117 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
from binascii import crc32
|
||||
from os.path import getmtime, getsize, isdir, isfile, join
|
||||
|
||||
from twisted.python import constants # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import hashlib_encode_data
|
||||
|
||||
|
||||
class PROJECT_SYNC_STAGE(constants.Flags):
|
||||
INIT = constants.FlagConstant()
|
||||
DBINDEX = constants.FlagConstant()
|
||||
DELETE = constants.FlagConstant()
|
||||
UPLOAD = constants.FlagConstant()
|
||||
EXTRACTED = constants.FlagConstant()
|
||||
COMPLETED = constants.FlagConstant()
|
||||
|
||||
|
||||
class ProjectSync(object):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
if not isdir(self.path):
|
||||
os.makedirs(self.path)
|
||||
self.items = []
|
||||
self._db = {}
|
||||
|
||||
def add_item(self, path, relpath, cb_filter=None):
|
||||
self.items.append((path, relpath, cb_filter))
|
||||
|
||||
def get_items(self):
|
||||
return self.items
|
||||
|
||||
def rebuild_dbindex(self):
|
||||
self._db = {}
|
||||
for (path, relpath, cb_filter) in self.items:
|
||||
if cb_filter and not cb_filter(path):
|
||||
continue
|
||||
self._insert_to_db(path, relpath)
|
||||
if not isdir(path):
|
||||
continue
|
||||
for (root, _, files) in os.walk(path, followlinks=True):
|
||||
for name in files:
|
||||
self._insert_to_db(
|
||||
join(root, name), join(relpath, root[len(path) + 1 :], name)
|
||||
)
|
||||
|
||||
def _insert_to_db(self, path, relpath):
|
||||
if not isfile(path):
|
||||
return
|
||||
index_hash = "%s-%s-%s" % (relpath, getmtime(path), getsize(path))
|
||||
index = crc32(hashlib_encode_data(index_hash))
|
||||
self._db[index] = (path, relpath)
|
||||
|
||||
def get_dbindex(self):
|
||||
return list(self._db.keys())
|
||||
|
||||
def delete_dbindex(self, dbindex):
|
||||
for index in dbindex:
|
||||
if index not in self._db:
|
||||
continue
|
||||
path = self._db[index][0]
|
||||
if isfile(path):
|
||||
os.remove(path)
|
||||
del self._db[index]
|
||||
self.delete_empty_folders()
|
||||
return True
|
||||
|
||||
def delete_empty_folders(self):
|
||||
deleted = False
|
||||
for item in self.items:
|
||||
if not isdir(item[0]):
|
||||
continue
|
||||
for root, dirs, files in os.walk(item[0]):
|
||||
if not dirs and not files and root != item[0]:
|
||||
deleted = True
|
||||
os.rmdir(root)
|
||||
if deleted:
|
||||
return self.delete_empty_folders()
|
||||
|
||||
return True
|
||||
|
||||
def compress_items(self, fileobj, dbindex, max_size):
|
||||
compressed = []
|
||||
total_size = 0
|
||||
tar_opts = dict(fileobj=fileobj, mode="w:gz", bufsize=0, dereference=True)
|
||||
with tarfile.open(**tar_opts) as tgz:
|
||||
for index in dbindex:
|
||||
compressed.append(index)
|
||||
if index not in self._db:
|
||||
continue
|
||||
path, relpath = self._db[index]
|
||||
tgz.add(path, relpath)
|
||||
total_size += getsize(path)
|
||||
if total_size > max_size:
|
||||
break
|
||||
return compressed
|
||||
|
||||
def decompress_items(self, fileobj):
|
||||
fileobj.seek(0)
|
||||
with tarfile.open(fileobj=fileobj, mode="r:gz") as tgz:
|
||||
tgz.extractall(self.path)
|
||||
return True
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import exception, telemetry
|
||||
from platformio import exception
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
@ -62,8 +62,6 @@ class EnvironmentProcessor(object):
|
||||
build_vars = self.get_build_variables()
|
||||
build_targets = list(self.get_build_targets())
|
||||
|
||||
telemetry.send_run_environment(self.options, build_targets)
|
||||
|
||||
# skip monitor target, we call it above
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
|
@ -29,6 +29,7 @@ TRANSPORT_OPTIONS = {
|
||||
"flush": "Serial.flush()",
|
||||
"begin": "Serial.begin($baudrate)",
|
||||
"end": "Serial.end()",
|
||||
"language": "cpp",
|
||||
},
|
||||
"mbed": {
|
||||
"include": "#include <mbed.h>",
|
||||
@ -37,6 +38,7 @@ TRANSPORT_OPTIONS = {
|
||||
"flush": "",
|
||||
"begin": "pc.baud($baudrate)",
|
||||
"end": "",
|
||||
"language": "cpp",
|
||||
},
|
||||
"espidf": {
|
||||
"include": "#include <stdio.h>",
|
||||
@ -46,6 +48,14 @@ TRANSPORT_OPTIONS = {
|
||||
"begin": "",
|
||||
"end": "",
|
||||
},
|
||||
"zephyr": {
|
||||
"include": "#include <sys/printk.h>",
|
||||
"object": "",
|
||||
"putchar": 'printk("%c", c)',
|
||||
"flush": "",
|
||||
"begin": "",
|
||||
"end": "",
|
||||
},
|
||||
"native": {
|
||||
"include": "#include <stdio.h>",
|
||||
"object": "",
|
||||
@ -61,6 +71,7 @@ TRANSPORT_OPTIONS = {
|
||||
"flush": "unittest_uart_flush()",
|
||||
"begin": "unittest_uart_begin()",
|
||||
"end": "unittest_uart_end()",
|
||||
"language": "cpp",
|
||||
},
|
||||
}
|
||||
|
||||
@ -80,7 +91,7 @@ class TestProcessorBase(object):
|
||||
self.env_name = envname
|
||||
self.env_options = options["project_config"].items(env=envname, as_dict=True)
|
||||
self._run_failed = False
|
||||
self._outputcpp_generated = False
|
||||
self._output_file_generated = False
|
||||
|
||||
def get_transport(self):
|
||||
transport = None
|
||||
@ -105,11 +116,11 @@ class TestProcessorBase(object):
|
||||
click.secho(text, bold=self.options.get("verbose"))
|
||||
|
||||
def build_or_upload(self, target):
|
||||
if not self._outputcpp_generated:
|
||||
self.generate_outputcpp(
|
||||
if not self._output_file_generated:
|
||||
self.generate_output_file(
|
||||
self.options["project_config"].get_optional_dir("test")
|
||||
)
|
||||
self._outputcpp_generated = True
|
||||
self._output_file_generated = True
|
||||
|
||||
if self.test_name != "*":
|
||||
self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_name
|
||||
@ -147,10 +158,10 @@ class TestProcessorBase(object):
|
||||
else:
|
||||
click.echo(line)
|
||||
|
||||
def generate_outputcpp(self, test_dir):
|
||||
def generate_output_file(self, test_dir):
|
||||
assert isdir(test_dir)
|
||||
|
||||
cpp_tpl = "\n".join(
|
||||
file_tpl = "\n".join(
|
||||
[
|
||||
"$include",
|
||||
"#include <output_export.h>",
|
||||
@ -194,10 +205,12 @@ class TestProcessorBase(object):
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
tpl = Template(cpp_tpl).substitute(TRANSPORT_OPTIONS[self.get_transport()])
|
||||
transport_options = TRANSPORT_OPTIONS[self.get_transport()]
|
||||
tpl = Template(file_tpl).substitute(transport_options)
|
||||
data = Template(tpl).substitute(baudrate=self.get_baudrate())
|
||||
|
||||
tmp_file = join(test_dir, "output_export.cpp")
|
||||
tmp_file = join(
|
||||
test_dir, "output_export." + transport_options.get("language", "c")
|
||||
)
|
||||
with open(tmp_file, "w") as fp:
|
||||
fp.write(data)
|
||||
|
||||
|
@ -19,7 +19,7 @@ from zipfile import ZipFile
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import VERSION, __version__, app, exception, util
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.proc import exec_command, get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
@ -133,7 +133,7 @@ def get_develop_latest_version():
|
||||
r = requests.get(
|
||||
"https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/platformio/__init__.py",
|
||||
headers=util.get_request_defheaders(),
|
||||
headers={"User-Agent": app.get_user_agent()},
|
||||
)
|
||||
r.raise_for_status()
|
||||
for line in r.text.split("\n"):
|
||||
@ -153,7 +153,8 @@ def get_develop_latest_version():
|
||||
|
||||
def get_pypi_latest_version():
|
||||
r = requests.get(
|
||||
"https://pypi.org/pypi/platformio/json", headers=util.get_request_defheaders()
|
||||
"https://pypi.org/pypi/platformio/json",
|
||||
headers={"User-Agent": app.get_user_agent()},
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()["info"]["version"]
|
||||
|
@ -23,7 +23,7 @@ from time import mktime
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import util
|
||||
from platformio import app, util
|
||||
from platformio.exception import (
|
||||
FDSHASumMismatch,
|
||||
FDSizeMismatch,
|
||||
@ -38,7 +38,7 @@ class FileDownloader(object):
|
||||
self._request = requests.get(
|
||||
url,
|
||||
stream=True,
|
||||
headers=util.get_request_defheaders(),
|
||||
headers={"User-Agent": app.get_user_agent()},
|
||||
verify=sys.version_info >= (2, 7, 9),
|
||||
)
|
||||
if self._request.status_code != 200:
|
||||
|
@ -92,7 +92,7 @@ class PlatformIOPackageException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class UnknownPackage(PlatformIOPackageException):
|
||||
class UnknownPackage(UserSideException):
|
||||
|
||||
MESSAGE = "Detected unknown package '{0}'"
|
||||
|
||||
@ -177,7 +177,7 @@ class NotGlobalLibDir(UserSideException):
|
||||
)
|
||||
|
||||
|
||||
class InvalidLibConfURL(PlatformioException):
|
||||
class InvalidLibConfURL(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid library config URL '{0}'"
|
||||
|
||||
@ -242,12 +242,12 @@ class BuildScriptNotFound(PlatformioException):
|
||||
MESSAGE = "Invalid path '{0}' to build script"
|
||||
|
||||
|
||||
class InvalidSettingName(PlatformioException):
|
||||
class InvalidSettingName(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid setting with the name '{0}'"
|
||||
|
||||
|
||||
class InvalidSettingValue(PlatformioException):
|
||||
class InvalidSettingValue(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
|
||||
|
||||
@ -257,7 +257,7 @@ class InvalidJSONFile(PlatformioException):
|
||||
MESSAGE = "Could not load broken JSON: {0}"
|
||||
|
||||
|
||||
class CIBuildEnvsEmpty(PlatformioException):
|
||||
class CIBuildEnvsEmpty(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"Can't find PlatformIO build environments.\n"
|
||||
@ -295,7 +295,7 @@ class CygwinEnvDetected(PlatformioException):
|
||||
)
|
||||
|
||||
|
||||
class TestDirNotExists(PlatformioException):
|
||||
class TestDirNotExists(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"A test folder '{0}' does not exist.\nPlease create 'test' "
|
||||
|
@ -57,6 +57,20 @@ class ProjectGenerator(object):
|
||||
|
||||
return envname
|
||||
|
||||
@staticmethod
|
||||
def filter_includes(includes_map, ignore_scopes=None, to_unix_path=True):
|
||||
ignore_scopes = ignore_scopes or []
|
||||
result = []
|
||||
for scope, includes in includes_map.items():
|
||||
if scope in ignore_scopes:
|
||||
continue
|
||||
for include in includes:
|
||||
if to_unix_path:
|
||||
include = fs.to_unix_path(include)
|
||||
if include not in result:
|
||||
result.append(include)
|
||||
return result
|
||||
|
||||
def _load_tplvars(self):
|
||||
tpl_vars = {
|
||||
"config": self.config,
|
||||
@ -92,12 +106,13 @@ class ProjectGenerator(object):
|
||||
for key, value in tpl_vars.items():
|
||||
if key.endswith(("_path", "_dir")):
|
||||
tpl_vars[key] = fs.to_unix_path(value)
|
||||
for key in ("includes", "src_files", "libsource_dirs"):
|
||||
for key in ("src_files", "libsource_dirs"):
|
||||
if key not in tpl_vars:
|
||||
continue
|
||||
tpl_vars[key] = [fs.to_unix_path(inc) for inc in tpl_vars[key]]
|
||||
|
||||
tpl_vars["to_unix_path"] = fs.to_unix_path
|
||||
tpl_vars["filter_includes"] = self.filter_includes
|
||||
return tpl_vars
|
||||
|
||||
def get_src_files(self):
|
||||
@ -136,7 +151,7 @@ class ProjectGenerator(object):
|
||||
@staticmethod
|
||||
def _render_tpl(tpl_path, tpl_vars):
|
||||
with codecs.open(tpl_path, "r", encoding="utf8") as fp:
|
||||
return bottle.SimpleTemplate(fp.read()).render(**tpl_vars)
|
||||
return bottle.template(fp.read(), **tpl_vars)
|
||||
|
||||
@staticmethod
|
||||
def _merge_contents(dst_path, contents):
|
||||
|
@ -1,4 +1,4 @@
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
-I{{include}}
|
||||
% end
|
||||
% for define in defines:
|
||||
|
@ -4,6 +4,6 @@
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! to_unix_path(cc_flags).replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! to_unix_path(cxx_flags).replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes) }}",
|
||||
"gccIncludePaths": "{{ ','.join(filter_includes(includes)) }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
|
@ -5,10 +5,12 @@
|
||||
# please create `CMakeListsUser.txt` in the root of project.
|
||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
||||
|
||||
% from platformio.project.helpers import (load_project_ide_data)
|
||||
%
|
||||
% import os
|
||||
% import re
|
||||
%
|
||||
% from platformio.compat import WINDOWS
|
||||
% from platformio.project.helpers import (load_project_ide_data)
|
||||
%
|
||||
% def _normalize_path(path):
|
||||
% if project_dir in path:
|
||||
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
|
||||
@ -22,12 +24,30 @@
|
||||
% return path
|
||||
% end
|
||||
%
|
||||
% def _fix_lib_dirs(lib_dirs):
|
||||
% result = []
|
||||
% for lib_dir in lib_dirs:
|
||||
% if not os.path.isabs(lib_dir):
|
||||
% lib_dir = os.path.join(project_dir, lib_dir)
|
||||
% end
|
||||
% result.append(to_unix_path(os.path.normpath(lib_dir)))
|
||||
% end
|
||||
% return result
|
||||
% end
|
||||
%
|
||||
% def _escape(text):
|
||||
% return to_unix_path(text).replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
% def _get_lib_dirs(envname):
|
||||
% env_libdeps_dir = os.path.join(config.get_optional_dir("libdeps"), envname)
|
||||
% env_lib_extra_dirs = config.get("env:" + envname, "lib_extra_dirs", [])
|
||||
% return _fix_lib_dirs([env_libdeps_dir] + env_lib_extra_dirs)
|
||||
% end
|
||||
%
|
||||
% envs = config.envs()
|
||||
|
||||
|
||||
% if len(envs) > 1:
|
||||
set(CMAKE_CONFIGURATION_TYPES "{{ ";".join(envs) }};" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
|
||||
% else:
|
||||
@ -54,13 +74,19 @@ set(CMAKE_CXX_STANDARD {{ cxx_stds[-1] }})
|
||||
% end
|
||||
|
||||
if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}")
|
||||
%for define in defines:
|
||||
% for define in defines:
|
||||
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
|
||||
%end
|
||||
% end
|
||||
|
||||
%for include in includes:
|
||||
include_directories("{{ _normalize_path(to_unix_path(include)) }}")
|
||||
%end
|
||||
% for include in filter_includes(includes):
|
||||
include_directories("{{ _normalize_path(include) }}")
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
|
||||
% for dir in _get_lib_dirs(env_name):
|
||||
{{ _normalize_path(dir) + "/*.*" }}
|
||||
% end
|
||||
)
|
||||
endif()
|
||||
|
||||
% leftover_envs = list(set(envs) ^ set([env_name]))
|
||||
@ -76,9 +102,22 @@ if (CMAKE_BUILD_TYPE MATCHES "{{ env }}")
|
||||
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
|
||||
% end
|
||||
|
||||
% for include in data["includes"]:
|
||||
% for include in filter_includes(data["includes"]):
|
||||
include_directories("{{ _normalize_path(to_unix_path(include)) }}")
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
|
||||
% for dir in _get_lib_dirs(env):
|
||||
{{ _normalize_path(dir) + "/*.*" }}
|
||||
% end
|
||||
)
|
||||
endif()
|
||||
% end
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{ _normalize_path(project_src_dir) }}/*.*" "{{ _normalize_path(project_lib_dir) }}/*.*" "{{ _normalize_path(project_libdeps_dir) }}/*.*")
|
||||
|
||||
FILE(GLOB_RECURSE SRC_LIST
|
||||
% for path in (project_src_dir, project_lib_dir):
|
||||
{{ _normalize_path(path) + "/*.*" }}
|
||||
% end
|
||||
)
|
||||
|
||||
list(APPEND SRC_LIST ${EXTRA_LIB_SOURCES})
|
||||
|
@ -52,7 +52,7 @@
|
||||
% for define in defines:
|
||||
<Add option="-D{{define}}"/>
|
||||
% end
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
<Add directory="{{include}}"/>
|
||||
% end
|
||||
</Compiler>
|
||||
|
@ -23,10 +23,8 @@
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.libs.1409095472" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs"/>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1624502120" name="Assembly" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.239157887" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% cleaned_includes = filter_includes(includes, ["toolchain"])
|
||||
% for include in cleaned_includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
@ -47,10 +45,7 @@
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.54121539" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.1096940598" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% for include in cleaned_includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
@ -71,10 +66,7 @@
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1310559623" name="GNU C" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.41298875" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% for include in cleaned_includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
@ -121,10 +113,7 @@
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.libs.1855678035" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs"/>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.30528994" name="Assembly" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.794801023" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% for include in cleaned_includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
@ -145,10 +134,7 @@
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1146422798" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.650084869" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" useByScannerDiscovery="false" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% for include in cleaned_includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
@ -169,10 +155,7 @@
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1637357529" name="GNU C" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.1246337321" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" useByScannerDiscovery="false" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% for include in cleaned_includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
|
@ -13,7 +13,7 @@ clang
|
||||
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
|
||||
% end
|
||||
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ include }}
|
||||
% end
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
-I{{include}}
|
||||
% end
|
||||
% for define in defines:
|
||||
|
@ -34,9 +34,10 @@
|
||||
<cleanCommand>{{platformio_path}} -f -c netbeans run --target clean</cleanCommand>
|
||||
<executablePath></executablePath>
|
||||
<cTool>
|
||||
% cleaned_includes = filter_includes(includes)
|
||||
<incDir>
|
||||
<pElem>src</pElem>
|
||||
% for include in includes:
|
||||
% for include in cleaned_includes:
|
||||
<pElem>{{include}}</pElem>
|
||||
% end
|
||||
</incDir>
|
||||
@ -49,7 +50,7 @@
|
||||
<ccTool>
|
||||
<incDir>
|
||||
<pElem>src</pElem>
|
||||
% for include in includes:
|
||||
% for include in cleaned_includes:
|
||||
<pElem>{{include}}</pElem>
|
||||
% end
|
||||
</incDir>
|
||||
|
@ -5,7 +5,7 @@ else {
|
||||
HOMEDIR += $$(HOME)
|
||||
}
|
||||
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
% if include.startswith(user_home_dir):
|
||||
INCLUDEPATH += "$${HOMEDIR}{{include.replace(user_home_dir, "")}}"
|
||||
% else:
|
||||
|
@ -13,7 +13,7 @@ clang
|
||||
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
|
||||
% end
|
||||
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ include }}
|
||||
% end
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
% for include in includes:
|
||||
% for include in filter_includes(includes):
|
||||
-I"{{include}}"
|
||||
% end
|
||||
% for define in defines:
|
||||
|
@ -4,6 +4,6 @@
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes) }}",
|
||||
"gccIncludePaths": "{{ ','.join(filter_includes(includes)) }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
|
@ -42,13 +42,14 @@
|
||||
<NMakeBuildCommandLine>platformio -f -c visualstudio run</NMakeBuildCommandLine>
|
||||
<NMakeCleanCommandLine>platformio -f -c visualstudio run --target clean</NMakeCleanCommandLine>
|
||||
<NMakePreprocessorDefinitions>{{!";".join(defines)}}</NMakePreprocessorDefinitions>
|
||||
<NMakeIncludeSearchPath>{{";".join(["$(HOMEDRIVE)$(HOMEPATH)%s" % i.replace(user_home_dir, "") if i.startswith(user_home_dir) else i for i in includes])}}</NMakeIncludeSearchPath>
|
||||
% cleaned_includes = filter_includes(includes)
|
||||
<NMakeIncludeSearchPath>{{";".join(["$(HOMEDRIVE)$(HOMEPATH)%s" % i.replace(user_home_dir, "") if i.startswith(user_home_dir) else i for i in cleaned_includes])}}</NMakeIncludeSearchPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<NMakeBuildCommandLine>platformio run</NMakeBuildCommandLine>
|
||||
<NMakeCleanCommandLine>platformio run --target clean</NMakeCleanCommandLine>
|
||||
<NMakePreprocessorDefinitions>{{!";".join(defines)}}</NMakePreprocessorDefinitions>
|
||||
<NMakeIncludeSearchPath>{{";".join(["$(HOMEDRIVE)$(HOMEPATH)%s" % i.replace(user_home_dir, "") if i.startswith(user_home_dir) else i for i in includes])}}</NMakeIncludeSearchPath>
|
||||
<NMakeIncludeSearchPath>{{";".join(["$(HOMEDRIVE)$(HOMEPATH)%s" % i.replace(user_home_dir, "") if i.startswith(user_home_dir) else i for i in cleaned_includes])}}</NMakeIncludeSearchPath>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup>
|
||||
</ItemDefinitionGroup>
|
||||
|
@ -52,34 +52,34 @@
|
||||
%
|
||||
% def _find_forced_includes(flags, inc_paths):
|
||||
% result = []
|
||||
% include_args = ("-include", "-imacros")
|
||||
% for f in flags:
|
||||
% inc = ""
|
||||
% if f.startswith("-include") and f.split("-include")[1].strip():
|
||||
% inc = f.split("-include")[1].strip()
|
||||
% elif not f.startswith("-"):
|
||||
% inc = f
|
||||
% if not f.startswith(include_args):
|
||||
% continue
|
||||
% end
|
||||
% if inc:
|
||||
% result.append(_find_abs_path(inc, inc_paths))
|
||||
% for arg in include_args:
|
||||
% inc = ""
|
||||
% if f.startswith(arg) and f.split(arg)[1].strip():
|
||||
% inc = f.split(arg)[1].strip()
|
||||
% elif not f.startswith("-"):
|
||||
% inc = f
|
||||
% end
|
||||
% if inc:
|
||||
% result.append(_find_abs_path(inc, inc_paths))
|
||||
% end
|
||||
% end
|
||||
% end
|
||||
% return result
|
||||
% end
|
||||
%
|
||||
% cleaned_includes = []
|
||||
% for include in includes:
|
||||
% if "toolchain-" not in os.path.dirname(os.path.commonprefix(
|
||||
% [include, cc_path])) and os.path.isdir(include):
|
||||
% cleaned_includes.append(include)
|
||||
% end
|
||||
% end
|
||||
% cleaned_includes = filter_includes(includes, ["toolchain"])
|
||||
%
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cc_m_flags = split_args(cc_flags)
|
||||
% forced_includes = _find_forced_includes(
|
||||
% filter_args(cc_m_flags, ["-include"]), cleaned_includes)
|
||||
% filter_args(cc_m_flags, ["-include", "-imacros"]), cleaned_includes)
|
||||
%
|
||||
{
|
||||
"configurations": [
|
||||
@ -135,7 +135,7 @@
|
||||
"compilerArgs": [
|
||||
% for flag in [
|
||||
% '"%s"' % _escape(f) if _escape_required(f) else f
|
||||
% for f in filter_args(cc_m_flags, ["-m", "-i", "@"], ["-include"])
|
||||
% for f in filter_args(cc_m_flags, ["-m", "-i", "@"], ["-include", "-imacros"])
|
||||
% ]:
|
||||
"{{ flag }}",
|
||||
% end
|
||||
|
@ -16,28 +16,24 @@ import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from os.path import dirname, join
|
||||
|
||||
from platformio import __version__, exception, fs, util
|
||||
from platformio.compat import PY2, WINDOWS
|
||||
from platformio import exception, util
|
||||
from platformio.compat import PY2
|
||||
from platformio.managers.package import PackageManager
|
||||
from platformio.proc import copy_pythonpath_to_osenv, get_pythonexe_path
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
CORE_PACKAGES = {
|
||||
"contrib-piohome": "~3.1.0",
|
||||
"contrib-piohome": "~3.2.0",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-pioplus": "^2.6.1",
|
||||
"tool-unity": "~1.20500.0",
|
||||
"tool-scons": "~2.20501.7" if PY2 else "~3.30102.0",
|
||||
"tool-cppcheck": "~1.189.0",
|
||||
"tool-clangtidy": "^1.80000.0",
|
||||
"tool-pvs-studio": "~7.5.0",
|
||||
"tool-cppcheck": "~1.190.0",
|
||||
"tool-clangtidy": "~1.100000.0",
|
||||
"tool-pvs-studio": "~7.7.0",
|
||||
}
|
||||
|
||||
PIOPLUS_AUTO_UPDATES_MAX = 100
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
# pylint: disable=arguments-differ,signature-differs
|
||||
|
||||
|
||||
class CorePackageManager(PackageManager):
|
||||
@ -185,47 +181,3 @@ def get_contrib_pysite_deps():
|
||||
)
|
||||
result[0] = twisted_wheel
|
||||
return result
|
||||
|
||||
|
||||
def pioplus_call(args, **kwargs):
|
||||
if WINDOWS and sys.version_info < (2, 7, 6):
|
||||
raise exception.PlatformioException(
|
||||
"PlatformIO Remote v%s does not run under Python version %s.\n"
|
||||
"Minimum supported version is 2.7.6, please upgrade Python.\n"
|
||||
"Python 3 is not yet supported.\n" % (__version__, sys.version)
|
||||
)
|
||||
|
||||
pioplus_path = join(get_core_package_dir("tool-pioplus"), "pioplus")
|
||||
pythonexe_path = get_pythonexe_path()
|
||||
os.environ["PYTHONEXEPATH"] = pythonexe_path
|
||||
os.environ["PYTHONPYSITEDIR"] = get_core_package_dir("contrib-pysite")
|
||||
os.environ["PIOCOREPYSITEDIR"] = dirname(fs.get_source_dir() or "")
|
||||
if dirname(pythonexe_path) not in os.environ["PATH"].split(os.pathsep):
|
||||
os.environ["PATH"] = (os.pathsep).join(
|
||||
[dirname(pythonexe_path), os.environ["PATH"]]
|
||||
)
|
||||
copy_pythonpath_to_osenv()
|
||||
code = subprocess.call([pioplus_path] + args, **kwargs)
|
||||
|
||||
# handle remote update request
|
||||
if code == 13:
|
||||
count_attr = "_update_count"
|
||||
try:
|
||||
count_value = getattr(pioplus_call, count_attr)
|
||||
except AttributeError:
|
||||
count_value = 0
|
||||
setattr(pioplus_call, count_attr, 1)
|
||||
count_value += 1
|
||||
setattr(pioplus_call, count_attr, count_value)
|
||||
if count_value < PIOPLUS_AUTO_UPDATES_MAX:
|
||||
update_core_packages()
|
||||
return pioplus_call(args, **kwargs)
|
||||
|
||||
# handle reload request
|
||||
elif code == 14:
|
||||
return pioplus_call(args, **kwargs)
|
||||
|
||||
if code != 0:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
return True
|
||||
|
@ -84,6 +84,7 @@ class LibraryManager(BasePkgManager):
|
||||
for v in versions:
|
||||
semver_new = self.parse_semver_version(v["name"])
|
||||
if semver_spec:
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if not semver_new or semver_new not in semver_spec:
|
||||
continue
|
||||
if not item or self.parse_semver_version(item["name"]) < semver_new:
|
||||
|
@ -46,14 +46,14 @@ class PackageRepoIterator(object):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
return self.next() # pylint: disable=not-callable
|
||||
|
||||
@staticmethod
|
||||
@util.memoized(expire="60s")
|
||||
def load_manifest(url):
|
||||
r = None
|
||||
try:
|
||||
r = requests.get(url, headers=util.get_request_defheaders())
|
||||
r = requests.get(url, headers={"User-Agent": app.get_user_agent()})
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
except: # pylint: disable=bare-except
|
||||
|
@ -370,6 +370,21 @@ class PlatformPackagesMixin(object):
|
||||
return None
|
||||
return self.pm.load_manifest(pkg_dir).get("version")
|
||||
|
||||
def dump_used_packages(self):
|
||||
result = []
|
||||
for name, options in self.packages.items():
|
||||
if options.get("optional"):
|
||||
continue
|
||||
pkg_dir = self.get_package_dir(name)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
manifest = self.pm.load_manifest(pkg_dir)
|
||||
item = {"name": manifest["name"], "version": manifest["version"]}
|
||||
if manifest.get("__src_url"):
|
||||
item["src_url"] = manifest.get("__src_url")
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
class PlatformRunMixin(object):
|
||||
|
||||
@ -398,6 +413,8 @@ class PlatformRunMixin(object):
|
||||
self.configure_default_packages(options, targets)
|
||||
self.install_packages(silent=True)
|
||||
|
||||
self._report_non_sensitive_data(options, targets)
|
||||
|
||||
self.silent = silent
|
||||
self.verbose = verbose or app.get_setting("force_verbose")
|
||||
|
||||
@ -416,6 +433,17 @@ class PlatformRunMixin(object):
|
||||
|
||||
return result
|
||||
|
||||
def _report_non_sensitive_data(self, options, targets):
|
||||
topts = options.copy()
|
||||
topts["platform_packages"] = [
|
||||
dict(name=item["name"], version=item["version"])
|
||||
for item in self.dump_used_packages()
|
||||
]
|
||||
topts["platform"] = {"name": self.name, "version": self.version}
|
||||
if self.src_version:
|
||||
topts["platform"]["src_version"] = self.src_version
|
||||
telemetry.send_run_environment(topts, targets)
|
||||
|
||||
def _run_scons(self, variables, targets, jobs):
|
||||
args = [
|
||||
proc.get_pythonexe_path(),
|
||||
@ -531,14 +559,20 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
self.silent = False
|
||||
self.verbose = False
|
||||
|
||||
self._BOARDS_CACHE = {}
|
||||
self._manifest = fs.load_json(manifest_path)
|
||||
self._BOARDS_CACHE = {}
|
||||
self._custom_packages = None
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
self.pm = PackageManager(
|
||||
self.config.get_optional_dir("packages"), self.package_repositories
|
||||
)
|
||||
|
||||
self._src_manifest = None
|
||||
src_manifest_path = self.pm.get_src_manifest_path(self.get_dir())
|
||||
if src_manifest_path:
|
||||
self._src_manifest = fs.load_json(src_manifest_path)
|
||||
|
||||
# if self.engines and "platformio" in self.engines:
|
||||
# if self.PIO_VERSION not in semantic_version.SimpleSpec(
|
||||
# self.engines['platformio']):
|
||||
@ -561,6 +595,14 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
def version(self):
|
||||
return self._manifest["version"]
|
||||
|
||||
@property
|
||||
def src_version(self):
|
||||
return self._src_manifest.get("version") if self._src_manifest else None
|
||||
|
||||
@property
|
||||
def src_url(self):
|
||||
return self._src_manifest.get("url") if self._src_manifest else None
|
||||
|
||||
@property
|
||||
def homepage(self):
|
||||
return self._manifest.get("homepage")
|
||||
|
@ -41,5 +41,6 @@ class ManifestValidationError(ManifestException):
|
||||
def __str__(self):
|
||||
return (
|
||||
"Invalid manifest fields: %s. \nPlease check specification -> "
|
||||
"htts://docs.platformio.org/page/librarymanager/config.html" % self.messages
|
||||
"https://docs.platformio.org/page/librarymanager/config.html"
|
||||
% self.messages
|
||||
)
|
||||
|
@ -20,6 +20,7 @@ from threading import Thread
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import (
|
||||
PY2,
|
||||
WINDOWS,
|
||||
get_filesystem_encoding,
|
||||
get_locale_encoding,
|
||||
@ -30,7 +31,10 @@ from platformio.compat import (
|
||||
class AsyncPipeBase(object):
|
||||
def __init__(self):
|
||||
self._fd_read, self._fd_write = os.pipe()
|
||||
self._pipe_reader = os.fdopen(self._fd_read)
|
||||
if PY2:
|
||||
self._pipe_reader = os.fdopen(self._fd_read)
|
||||
else:
|
||||
self._pipe_reader = os.fdopen(self._fd_read, encoding="utf-8")
|
||||
self._buffer = ""
|
||||
self._thread = Thread(target=self.run)
|
||||
self._thread.start()
|
||||
|
@ -566,6 +566,13 @@ ProjectOptions = OrderedDict(
|
||||
type=click.Choice(["low", "medium", "high"]),
|
||||
default=["low", "medium", "high"],
|
||||
),
|
||||
ConfigEnvOption(
|
||||
group="check",
|
||||
name="check_skip_packages",
|
||||
description="Skip checking includes from packages directory",
|
||||
type=click.BOOL,
|
||||
default=False,
|
||||
),
|
||||
# Test
|
||||
ConfigEnvOption(
|
||||
group="test",
|
||||
|
@ -13,8 +13,9 @@
|
||||
# limitations under the License.
|
||||
|
||||
import atexit
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
@ -27,8 +28,9 @@ import requests
|
||||
|
||||
from platformio import __version__, app, exception, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.compat import string_types
|
||||
from platformio.compat import hashlib_encode_data, string_types
|
||||
from platformio.proc import is_ci, is_container
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
|
||||
try:
|
||||
import queue
|
||||
@ -93,23 +95,17 @@ class MeasurementProtocol(TelemetryBase):
|
||||
|
||||
def _prefill_appinfo(self):
|
||||
self["av"] = __version__
|
||||
|
||||
# gather dependent packages
|
||||
dpdata = []
|
||||
dpdata.append("PlatformIO/%s" % __version__)
|
||||
if app.get_session_var("caller_id"):
|
||||
dpdata.append("Caller/%s" % app.get_session_var("caller_id"))
|
||||
if os.getenv("PLATFORMIO_IDE"):
|
||||
dpdata.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
self["an"] = " ".join(dpdata)
|
||||
self["an"] = app.get_user_agent()
|
||||
|
||||
def _prefill_sysargs(self):
|
||||
args = []
|
||||
for arg in sys.argv[1:]:
|
||||
arg = str(arg).lower()
|
||||
if "@" in arg or os.path.exists(arg):
|
||||
arg = str(arg)
|
||||
if arg == "account": # ignore account cmd which can contain username
|
||||
return
|
||||
if any(("@" in arg, "/" in arg, "\\" in arg)):
|
||||
arg = "***"
|
||||
args.append(arg)
|
||||
args.append(arg.lower())
|
||||
self["cd3"] = " ".join(args)
|
||||
|
||||
def _prefill_custom_data(self):
|
||||
@ -127,7 +123,6 @@ class MeasurementProtocol(TelemetryBase):
|
||||
|
||||
caller_id = str(app.get_session_var("caller_id"))
|
||||
self["cd1"] = util.get_systype()
|
||||
self["cd2"] = "Python/%s %s" % (platform.python_version(), platform.platform())
|
||||
self["cd4"] = (
|
||||
1 if (not util.is_ci() and (caller_id or not is_container())) else 0
|
||||
)
|
||||
@ -269,7 +264,7 @@ class MPDataPusher(object):
|
||||
r = self._http_session.post(
|
||||
"https://ssl.google-analytics.com/collect",
|
||||
data=data,
|
||||
headers=util.get_request_defheaders(),
|
||||
headers={"User-Agent": app.get_user_agent()},
|
||||
timeout=1,
|
||||
)
|
||||
r.raise_for_status()
|
||||
@ -299,10 +294,6 @@ def on_exception(e):
|
||||
isinstance(e, cls)
|
||||
for cls in (IOError, exception.ReturnErrorCode, exception.UserSideException,)
|
||||
]
|
||||
try:
|
||||
skip_conditions.append("[API] Account: " in str(e))
|
||||
except UnicodeEncodeError as ue:
|
||||
e = ue
|
||||
if any(skip_conditions):
|
||||
return
|
||||
is_fatal = any(
|
||||
@ -320,7 +311,15 @@ def on_exception(e):
|
||||
|
||||
def measure_ci():
|
||||
event = {"category": "CI", "action": "NoName", "label": None}
|
||||
known_cis = ("TRAVIS", "APPVEYOR", "GITLAB_CI", "CIRCLECI", "SHIPPABLE", "DRONE")
|
||||
known_cis = (
|
||||
"GITHUB_ACTIONS",
|
||||
"TRAVIS",
|
||||
"APPVEYOR",
|
||||
"GITLAB_CI",
|
||||
"CIRCLECI",
|
||||
"SHIPPABLE",
|
||||
"DRONE",
|
||||
)
|
||||
for name in known_cis:
|
||||
if os.getenv(name, "false").lower() == "true":
|
||||
event["action"] = name
|
||||
@ -328,26 +327,29 @@ def measure_ci():
|
||||
send_event(**event)
|
||||
|
||||
|
||||
def encode_run_environment(options):
|
||||
non_sensative_keys = [
|
||||
def dump_run_environment(options):
|
||||
non_sensitive_data = [
|
||||
"platform",
|
||||
"platform_packages",
|
||||
"framework",
|
||||
"board",
|
||||
"upload_protocol",
|
||||
"check_tool",
|
||||
"debug_tool",
|
||||
"monitor_filters",
|
||||
]
|
||||
safe_options = [
|
||||
"%s=%s" % (k, v) for k, v in sorted(options.items()) if k in non_sensative_keys
|
||||
]
|
||||
return "&".join(safe_options)
|
||||
safe_options = {k: v for k, v in options.items() if k in non_sensitive_data}
|
||||
if is_platformio_project(os.getcwd()):
|
||||
phash = hashlib.sha1(hashlib_encode_data(app.get_cid()))
|
||||
safe_options["pid"] = phash.hexdigest()
|
||||
return json.dumps(safe_options, sort_keys=True, ensure_ascii=False)
|
||||
|
||||
|
||||
def send_run_environment(options, targets):
|
||||
send_event(
|
||||
"Env",
|
||||
" ".join([t.title() for t in targets or ["run"]]),
|
||||
encode_run_environment(options),
|
||||
dump_run_environment(options),
|
||||
)
|
||||
|
||||
|
||||
|
@ -270,11 +270,6 @@ def get_mdns_services():
|
||||
return items
|
||||
|
||||
|
||||
def get_request_defheaders():
|
||||
data = (__version__, int(is_ci()), requests.utils.default_user_agent())
|
||||
return {"User-Agent": "PlatformIO/%s CI/%d %s" % data}
|
||||
|
||||
|
||||
@memoized(expire="60s")
|
||||
def _api_request_session():
|
||||
return requests.Session()
|
||||
@ -284,18 +279,19 @@ def _api_request_session():
|
||||
def _get_api_result(
|
||||
url, params=None, data=None, auth=None # pylint: disable=too-many-branches
|
||||
):
|
||||
from platformio.app import get_setting # pylint: disable=import-outside-toplevel
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.app import get_user_agent, get_setting
|
||||
|
||||
result = {}
|
||||
r = None
|
||||
verify_ssl = sys.version_info >= (2, 7, 9)
|
||||
|
||||
headers = get_request_defheaders()
|
||||
if not url.startswith("http"):
|
||||
url = __apiurl__ + url
|
||||
if not get_setting("strict_ssl"):
|
||||
url = url.replace("https://", "http://")
|
||||
|
||||
headers = {"User-Agent": get_user_agent()}
|
||||
try:
|
||||
if data:
|
||||
r = _api_request_session().post(
|
||||
@ -369,6 +365,7 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
|
||||
PING_REMOTE_HOSTS = [
|
||||
"140.82.118.3", # Github.com
|
||||
"35.231.145.151", # Gitlab.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
"platformio.org",
|
||||
]
|
||||
|
@ -53,7 +53,7 @@ def is_compat_platform_and_framework(platform, framework):
|
||||
return framework in (p.frameworks or {}).keys()
|
||||
|
||||
|
||||
def campaign_url(url, source="platformio", medium="docs"):
|
||||
def campaign_url(url, source="platformio.org", medium="docs"):
|
||||
data = urlparse(url)
|
||||
query = data.query
|
||||
if query:
|
||||
@ -429,8 +429,8 @@ Boards
|
||||
.. note::
|
||||
* You can list pre-configured boards by :ref:`cmd_boards` command or
|
||||
`PlatformIO Boards Explorer <https://platformio.org/boards>`_
|
||||
* For more detailed ``board`` information please scroll tables below by
|
||||
horizontal.
|
||||
* For more detailed ``board`` information please scroll the tables below by
|
||||
horizontally.
|
||||
""")
|
||||
|
||||
for vendor, boards in sorted(vendors.items()):
|
||||
@ -533,7 +533,7 @@ Boards
|
||||
.. note::
|
||||
* You can list pre-configured boards by :ref:`cmd_boards` command or
|
||||
`PlatformIO Boards Explorer <https://platformio.org/boards>`_
|
||||
* For more detailed ``board`` information please scroll tables below by horizontal.
|
||||
* For more detailed ``board`` information please scroll the tables below by horizontally.
|
||||
""")
|
||||
for vendor, boards in sorted(vendors.items()):
|
||||
lines.append(str(vendor))
|
||||
@ -906,7 +906,7 @@ def update_project_examples():
|
||||
platform_examples_dir = join(p.get_dir(), "examples")
|
||||
examples_md_lines = []
|
||||
if isdir(platform_examples_dir):
|
||||
for item in os.listdir(platform_examples_dir):
|
||||
for item in sorted(os.listdir(platform_examples_dir)):
|
||||
example_dir = join(platform_examples_dir, item)
|
||||
if not isdir(example_dir) or not os.listdir(example_dir):
|
||||
continue
|
||||
|
@ -15,26 +15,29 @@
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from platformio import util
|
||||
|
||||
import click
|
||||
|
||||
|
||||
def main():
|
||||
@click.command()
|
||||
@click.option("--desktop", is_flag=True, default=False)
|
||||
@click.option(
|
||||
"--ignore",
|
||||
envvar="PIO_INSTALL_DEVPLATFORMS_IGNORE",
|
||||
help="Ignore names split by comma",
|
||||
)
|
||||
def main(desktop, ignore):
|
||||
platforms = json.loads(
|
||||
subprocess.check_output(
|
||||
["platformio", "platform", "search", "--json-output"]).decode())
|
||||
["platformio", "platform", "search", "--json-output"]
|
||||
).decode()
|
||||
)
|
||||
ignore = [n.strip() for n in (ignore or "").split(",") if n.strip()]
|
||||
for platform in platforms:
|
||||
if platform['forDesktop']:
|
||||
skip = [not desktop and platform["forDesktop"], platform["name"] in ignore]
|
||||
if any(skip):
|
||||
continue
|
||||
# RISC-V GAP does not support Windows 86
|
||||
if (util.get_systype() == "windows_x86"
|
||||
and platform['name'] == "riscv_gap"):
|
||||
continue
|
||||
# unknown issue on Linux
|
||||
if ("linux" in util.get_systype()
|
||||
and platform['name'] == "aceinna_imu"):
|
||||
continue
|
||||
subprocess.check_call(
|
||||
["platformio", "platform", "install", platform['name']])
|
||||
subprocess.check_call(["platformio", "platform", "install", platform["name"]])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
580
tests/commands/test_account.py
Normal file
580
tests/commands/test_account.py
Normal file
@ -0,0 +1,580 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
from platformio.commands.account.command import cli as cmd_account
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def credentials():
|
||||
return {
|
||||
"login": os.environ["PLATFORMIO_TEST_ACCOUNT_LOGIN"],
|
||||
"password": os.environ["PLATFORMIO_TEST_ACCOUNT_PASSWORD"],
|
||||
}
|
||||
|
||||
|
||||
def test_account_register_with_already_exists_username(
|
||||
clirunner, credentials, isolated_pio_home
|
||||
):
|
||||
username = credentials["login"]
|
||||
email = "test@test.com"
|
||||
if "@" in credentials["login"]:
|
||||
username = "Testusername"
|
||||
email = credentials["login"]
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"register",
|
||||
"-u",
|
||||
username,
|
||||
"-e",
|
||||
email,
|
||||
"-p",
|
||||
credentials["password"],
|
||||
"--firstname",
|
||||
"First",
|
||||
"--lastname",
|
||||
"Last",
|
||||
],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "User with same username already exists" in str(
|
||||
result.exception
|
||||
) or "User with same email already exists" in str(result.exception)
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_login_with_invalid_creds(clirunner, credentials, isolated_pio_home):
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", "123", "-p", "123"])
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "Invalid user credentials" in str(result.exception)
|
||||
|
||||
|
||||
def test_account_login(clirunner, credentials, validate_cliresult, isolated_pio_home):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Successfully logged in!" in result.output
|
||||
|
||||
with open(str(isolated_pio_home.join("appstate.json"))) as fp:
|
||||
appstate = json.load(fp)
|
||||
assert appstate.get("account")
|
||||
assert appstate.get("account").get("email")
|
||||
assert appstate.get("account").get("username")
|
||||
assert appstate.get("account").get("auth")
|
||||
assert appstate.get("account").get("auth").get("access_token")
|
||||
assert appstate.get("account").get("auth").get("access_token_expire")
|
||||
assert appstate.get("account").get("auth").get("refresh_token")
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are already authenticated with" in str(result.exception)
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
def test_account_logout(clirunner, credentials, validate_cliresult, isolated_pio_home):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["logout"])
|
||||
validate_cliresult(result)
|
||||
assert "Successfully logged out" in result.output
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["logout"])
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_password_change_with_invalid_old_password(
|
||||
clirunner, credentials, validate_cliresult
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["password", "--old-password", "test", "--new-password", "test"],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "Invalid user password" in str(result.exception)
|
||||
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
def test_account_password_change_with_invalid_new_password_format(
|
||||
clirunner, credentials, validate_cliresult
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
credentials["password"],
|
||||
"--new-password",
|
||||
"test",
|
||||
],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert (
|
||||
"Invalid password format. Password must contain at"
|
||||
" least 8 characters including a number and a lowercase letter"
|
||||
in str(result.exception)
|
||||
)
|
||||
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_password_change(
|
||||
clirunner, credentials, validate_cliresult, isolated_pio_home
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
credentials["password"],
|
||||
"--new-password",
|
||||
"Testpassword123",
|
||||
],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
credentials["password"],
|
||||
"--new-password",
|
||||
"Testpassword123",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Password successfully changed!" in result.output
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["logout"])
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["login", "-u", credentials["login"], "-p", "Testpassword123"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
"Testpassword123",
|
||||
"--new-password",
|
||||
credentials["password"],
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Password successfully changed!" in result.output
|
||||
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_token_with_invalid_password(
|
||||
clirunner, credentials, validate_cliresult
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", credentials["password"],],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["token", "--password", "test",],)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "Invalid user password" in str(result.exception)
|
||||
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
def test_account_token(clirunner, credentials, validate_cliresult, isolated_pio_home):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", credentials["password"],],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Personal Authentication Token:" in result.output
|
||||
token = result.output.strip().split(": ")[-1]
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["token", "--password", credentials["password"], "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert json_result
|
||||
assert json_result.get("status") == "success"
|
||||
assert json_result.get("result") == token
|
||||
token = json_result.get("result")
|
||||
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", credentials["password"],],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
os.environ["PLATFORMIO_AUTH_TOKEN"] = token
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["token", "--password", credentials["password"], "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert json_result
|
||||
assert json_result.get("status") == "success"
|
||||
assert json_result.get("result") == token
|
||||
|
||||
os.environ.pop("PLATFORMIO_AUTH_TOKEN")
|
||||
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_token_with_refreshing(
|
||||
clirunner, credentials, validate_cliresult, isolated_pio_home
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["token", "--password", credentials["password"], "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert json_result
|
||||
assert json_result.get("status") == "success"
|
||||
assert json_result.get("result")
|
||||
token = json_result.get("result")
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"token",
|
||||
"--password",
|
||||
credentials["password"],
|
||||
"--json-output",
|
||||
"--regenerate",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert json_result
|
||||
assert json_result.get("status") == "success"
|
||||
assert json_result.get("result")
|
||||
assert token != json_result.get("result")
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
def test_account_summary(clirunner, credentials, validate_cliresult, isolated_pio_home):
|
||||
try:
|
||||
result = clirunner.invoke(cmd_account, ["show"],)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show"])
|
||||
validate_cliresult(result)
|
||||
assert credentials["login"] in result.output
|
||||
assert "Community" in result.output
|
||||
assert "100 Concurrent Remote Agents" in result.output
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert json_result.get("user_id")
|
||||
assert json_result.get("profile")
|
||||
assert json_result.get("profile").get("username")
|
||||
assert json_result.get("profile").get("email")
|
||||
assert credentials["login"] == json_result.get("profile").get(
|
||||
"username"
|
||||
) or credentials["login"] == json_result.get("profile").get("email")
|
||||
assert json_result.get("profile").get("firstname")
|
||||
assert json_result.get("profile").get("lastname")
|
||||
assert json_result.get("packages")
|
||||
assert json_result.get("packages")[0].get("name")
|
||||
assert json_result.get("packages")[0].get("path")
|
||||
assert json_result.get("subscriptions") is not None
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show", "--json-output", "--offline"])
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert not json_result.get("user_id")
|
||||
assert json_result.get("profile")
|
||||
assert json_result.get("profile").get("username")
|
||||
assert json_result.get("profile").get("email")
|
||||
assert not json_result.get("packages")
|
||||
assert not json_result.get("subscriptions")
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_profile_update_with_invalid_password(
|
||||
clirunner, credentials, validate_cliresult
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["update", "--current-password", credentials["password"]],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
firstname = "First " + str(int(time.time() * 1000))
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["update", "--current-password", "test", "--firstname", firstname],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "Invalid user password" in str(result.exception)
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_profile_update_only_firstname_and_lastname(
|
||||
clirunner, credentials, validate_cliresult, isolated_pio_home
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["update", "--current-password", credentials["password"]],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
firstname = "First " + str(int(time.time() * 1000))
|
||||
lastname = "Last" + str(int(time.time() * 1000))
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"update",
|
||||
"--current-password",
|
||||
credentials["password"],
|
||||
"--firstname",
|
||||
firstname,
|
||||
"--lastname",
|
||||
lastname,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Profile successfully updated!" in result.output
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
assert json_result.get("profile").get("firstname") == firstname
|
||||
assert json_result.get("profile").get("lastname") == lastname
|
||||
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
|
||||
@pytest.mark.skip_ci
|
||||
def test_account_profile_update(
|
||||
clirunner, credentials, validate_cliresult, isolated_pio_home
|
||||
):
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["update", "--current-password", credentials["password"]],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", credentials["login"], "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
|
||||
firstname = "First " + str(int(time.time() * 1000))
|
||||
lastname = "Last" + str(int(time.time() * 1000))
|
||||
|
||||
old_username = json_result.get("profile").get("username")
|
||||
new_username = "username" + str(int(time.time() * 1000))[-5:]
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"update",
|
||||
"--current-password",
|
||||
credentials["password"],
|
||||
"--firstname",
|
||||
firstname,
|
||||
"--lastname",
|
||||
lastname,
|
||||
"--username",
|
||||
new_username,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Profile successfully updated!" in result.output
|
||||
assert "Please re-login." in result.output
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show"],)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authenticated! Please login to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["login", "-u", new_username, "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"update",
|
||||
"--current-password",
|
||||
credentials["password"],
|
||||
"--username",
|
||||
old_username,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Profile successfully updated!" in result.output
|
||||
assert "Please re-login." in result.output
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["login", "-u", old_username, "-p", credentials["password"]],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
finally:
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
@ -13,10 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os.path import isfile, join
|
||||
|
||||
import pytest
|
||||
|
||||
from platformio import fs
|
||||
from platformio.commands.check.command import cli as cmd_check
|
||||
|
||||
DEFAULT_CONFIG = """
|
||||
@ -383,3 +385,73 @@ check_tool = pvs-studio
|
||||
assert errors != 0
|
||||
assert warnings != 0
|
||||
assert style == 0
|
||||
|
||||
|
||||
def test_check_embedded_platform_all_tools(clirunner, tmpdir):
|
||||
config = """
|
||||
[env:test]
|
||||
platform = ststm32
|
||||
board = nucleo_f401re
|
||||
framework = %s
|
||||
check_tool = %s
|
||||
"""
|
||||
# tmpdir.join("platformio.ini").write(config)
|
||||
tmpdir.mkdir("src").join("main.c").write(
|
||||
"""// This is an open source non-commercial project. Dear PVS-Studio, please check it.
|
||||
// PVS-Studio Static Code Analyzer for C, C++, C#, and Java: http://www.viva64.com
|
||||
#include <stdlib.h>
|
||||
|
||||
void unused_function(int val){
|
||||
int unusedVar = 0;
|
||||
int* iP = &unusedVar;
|
||||
*iP++;
|
||||
}
|
||||
|
||||
int main() {
|
||||
}
|
||||
"""
|
||||
)
|
||||
|
||||
frameworks = ["arduino", "mbed", "stm32cube"]
|
||||
if sys.version_info[0] == 3:
|
||||
# Zephyr only supports Python 3
|
||||
frameworks.append("zephyr")
|
||||
|
||||
for framework in frameworks:
|
||||
for tool in ("cppcheck", "clangtidy", "pvs-studio"):
|
||||
tmpdir.join("platformio.ini").write(config % (framework, tool))
|
||||
|
||||
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
|
||||
defects = sum(count_defects(result.output))
|
||||
|
||||
assert result.exit_code == 0 and defects > 0, "Failed %s with %s" % (
|
||||
framework,
|
||||
tool,
|
||||
)
|
||||
|
||||
|
||||
def test_check_skip_includes_from_packages(clirunner, tmpdir):
|
||||
config = """
|
||||
[env:test]
|
||||
platform = nordicnrf52
|
||||
board = nrf52_dk
|
||||
framework = arduino
|
||||
"""
|
||||
|
||||
tmpdir.join("platformio.ini").write(config)
|
||||
tmpdir.mkdir("src").join("main.c").write(TEST_CODE)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_check, ["--project-dir", str(tmpdir), "--skip-packages", "-v"]
|
||||
)
|
||||
|
||||
output = result.output
|
||||
|
||||
project_path = fs.to_unix_path(str(tmpdir))
|
||||
for l in output.split("\n"):
|
||||
if not l.startswith("Includes:"):
|
||||
continue
|
||||
for inc in l.split(" "):
|
||||
if inc.startswith("-I") and project_path not in inc:
|
||||
pytest.fail("Detected an include path from packages: " + inc)
|
||||
|
@ -156,5 +156,5 @@ def test_init_custom_framework(clirunner, validate_cliresult):
|
||||
def test_init_incorrect_board(clirunner):
|
||||
result = clirunner.invoke(cmd_init, ["-b", "missed_board"])
|
||||
assert result.exit_code == 2
|
||||
assert 'Error: Invalid value for "-b" / "--board' in result.output
|
||||
assert "Error: Invalid value for" in result.output
|
||||
assert isinstance(result.exception, SystemExit)
|
||||
|
@ -63,7 +63,7 @@ def test_install_from_vcs(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_install,
|
||||
[
|
||||
"https://github.com/platformio/" "platform-espressif8266.git#feature/stage",
|
||||
"https://github.com/platformio/" "platform-espressif8266.git",
|
||||
"--skip-default-package",
|
||||
],
|
||||
)
|
||||
|
@ -20,6 +20,10 @@ from click.testing import CliRunner
|
||||
from platformio import util
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line("markers", "skip_ci: mark a test that will not run in CI")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def validate_cliresult():
|
||||
def decorator(result):
|
||||
|
@ -36,14 +36,6 @@ def pytest_generate_tests(metafunc):
|
||||
# dev/platforms
|
||||
for manifest in PlatformManager().get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
|
||||
ignore_conds = [
|
||||
not p.is_embedded(),
|
||||
p.name == "ststm8",
|
||||
# issue with "version `CXXABI_1.3.9' not found (required by sdcc)"
|
||||
"linux" in util.get_systype() and p.name == "intel_mcs51",
|
||||
]
|
||||
if any(ignore_conds):
|
||||
continue
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
assert isdir(examples_dir)
|
||||
examples_dirs.append(examples_dir)
|
||||
@ -70,7 +62,6 @@ def pytest_generate_tests(metafunc):
|
||||
metafunc.parametrize("pioproject_dir", sorted(project_dirs))
|
||||
|
||||
|
||||
@pytest.mark.examples
|
||||
def test_run(pioproject_dir):
|
||||
with util.cd(pioproject_dir):
|
||||
config = ProjectConfig()
|
||||
|
@ -121,11 +121,19 @@ def test_defaults(config):
|
||||
assert config.get_optional_dir("core") == os.path.join(
|
||||
os.path.expanduser("~"), ".platformio"
|
||||
)
|
||||
assert config.get("strict_ldf", "lib_deps", ["Empty"]) == ["Empty"]
|
||||
assert config.get("env:extra_2", "lib_compat_mode") == "soft"
|
||||
assert config.get("env:extra_2", "build_type") == "release"
|
||||
assert config.get("env:extra_2", "build_type", None) is None
|
||||
assert config.get("env:extra_2", "lib_archive", "no") is False
|
||||
|
||||
config.expand_interpolations = False
|
||||
with pytest.raises(
|
||||
InvalidProjectConfError, match="No option 'lib_deps' in section: 'strict_ldf'"
|
||||
):
|
||||
assert config.get("strict_ldf", "lib_deps", ["Empty"]) == ["Empty"]
|
||||
config.expand_interpolations = True
|
||||
|
||||
|
||||
def test_sections(config):
|
||||
with pytest.raises(ConfigParser.NoSectionError):
|
||||
|
31
tox.ini
31
tox.ini
@ -13,7 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
[tox]
|
||||
envlist = py27, py35, py36, py37, docs
|
||||
envlist = py27,py37
|
||||
|
||||
[testenv]
|
||||
passenv = *
|
||||
@ -25,13 +25,23 @@ deps =
|
||||
pytest
|
||||
pytest-xdist
|
||||
jsondiff
|
||||
commands =
|
||||
{envpython} --version
|
||||
|
||||
[testenv:lint]
|
||||
commands =
|
||||
{envpython} --version
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
{envpython} -c "print('travis_fold:start:install_devplatforms')"
|
||||
|
||||
[testenv:testcore]
|
||||
commands =
|
||||
{envpython} --version
|
||||
py.test -v --basetemp="{envtmpdir}" -k-skip_ci tests --ignore tests/test_examples.py
|
||||
|
||||
[testenv:testexamples]
|
||||
commands =
|
||||
{envpython} scripts/install_devplatforms.py
|
||||
{envpython} -c "print('travis_fold:end:install_devplatforms')"
|
||||
py.test -v --basetemp="{envtmpdir}" tests
|
||||
py.test -v --basetemp="{envtmpdir}" tests/test_examples.py
|
||||
|
||||
[testenv:docs]
|
||||
deps =
|
||||
@ -48,16 +58,3 @@ deps =
|
||||
sphinx_rtd_theme
|
||||
commands =
|
||||
sphinx-build -W -b linkcheck docs docs/_build/html
|
||||
|
||||
[testenv:skipexamples]
|
||||
commands =
|
||||
py.test -v --basetemp="{envtmpdir}" tests --ignore tests/test_examples.py
|
||||
|
||||
; [testenv:coverage]
|
||||
; basepython = python2
|
||||
; passenv = *
|
||||
; deps =
|
||||
; pytest
|
||||
; pytest-cov
|
||||
; commands =
|
||||
; py.test --cov=platformio --cov-report term --cov-report xml --ignore=tests/test_examples.py --ignore=tests/test_pkgmanifest.py -v tests
|
||||
|
Reference in New Issue
Block a user