mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 07:12:31 +01:00
Compare commits
139 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d2ae333bb8 | ||
|
|
18b18f1c3d | ||
|
|
edf724d20d | ||
|
|
622a190a61 | ||
|
|
5b4a78ba20 | ||
|
|
44b85f6e4b | ||
|
|
7f1f760645 | ||
|
|
54d8c96c30 | ||
|
|
c6ab7827e7 | ||
|
|
ae26079e2e | ||
|
|
3e993156f2 | ||
|
|
3b2fafd789 | ||
|
|
72ebaddcb8 | ||
|
|
5a9950cc19 | ||
|
|
cf29d7e400 | ||
|
|
244dba3614 | ||
|
|
21886517e1 | ||
|
|
3996236729 | ||
|
|
560cb3ac82 | ||
|
|
81c7e23ae9 | ||
|
|
0b8bd6d4fc | ||
|
|
7c271c8207 | ||
|
|
58947d91a6 | ||
|
|
20096be990 | ||
|
|
7c8508b651 | ||
|
|
b56d0fdd9b | ||
|
|
d0cc06f766 | ||
|
|
d8d2b215d1 | ||
|
|
c478d383b4 | ||
|
|
e01cd1c037 | ||
|
|
e63019c469 | ||
|
|
90a325a1b2 | ||
|
|
698594525f | ||
|
|
fd540148f3 | ||
|
|
078a024931 | ||
|
|
f8193b2419 | ||
|
|
808ba603c5 | ||
|
|
61d70fa688 | ||
|
|
493a33e754 | ||
|
|
bd75c3e559 | ||
|
|
cb9e72a879 | ||
|
|
9d2fd4982f | ||
|
|
eed9a0e376 | ||
|
|
d77dbb2cca | ||
|
|
7810946484 | ||
|
|
e2906e3be5 | ||
|
|
0a8b66ee95 | ||
|
|
8ff270c5f7 | ||
|
|
4012a86cac | ||
|
|
dd4fff3a79 | ||
|
|
0ed99b7687 | ||
|
|
2c389ae11e | ||
|
|
15ff8f9d2a | ||
|
|
bd4d3b914b | ||
|
|
59b02120b6 | ||
|
|
92655c30c1 | ||
|
|
484567f242 | ||
|
|
ef6e70a38b | ||
|
|
e695e30a9b | ||
|
|
65e67b64bd | ||
|
|
ddbe339541 | ||
|
|
b2c0e6a8c2 | ||
|
|
f9384ded27 | ||
|
|
4488f25ce0 | ||
|
|
52b22b5784 | ||
|
|
5a356140d6 | ||
|
|
e79de0108c | ||
|
|
985f31877c | ||
|
|
11a71b7fbb | ||
|
|
7f26c11c9d | ||
|
|
9b93fcd947 | ||
|
|
733ca5174b | ||
|
|
bd897d780b | ||
|
|
429065d2b9 | ||
|
|
b90734f1e2 | ||
|
|
db97a7d9d3 | ||
|
|
6ff67aeadf | ||
|
|
dd7d282d17 | ||
|
|
4e637ae58a | ||
|
|
1ec2e55322 | ||
|
|
556eb3f8c1 | ||
|
|
76b49ebc95 | ||
|
|
e82443a302 | ||
|
|
5de86a6416 | ||
|
|
3f3c8cabb8 | ||
|
|
cd59aa9afb | ||
|
|
34e12e575b | ||
|
|
4c8c261ab4 | ||
|
|
099bb3b9ff | ||
|
|
c623a6aacc | ||
|
|
ce7356794d | ||
|
|
523494f9cf | ||
|
|
0edc867d45 | ||
|
|
ce4c45a075 | ||
|
|
e29941e3eb | ||
|
|
86ce3595f6 | ||
|
|
6e958b8415 | ||
|
|
d485703768 | ||
|
|
109e2107d1 | ||
|
|
3469905365 | ||
|
|
75b3846f8f | ||
|
|
a9ec38208c | ||
|
|
c38b9a4144 | ||
|
|
b6128aeaa1 | ||
|
|
881782be05 | ||
|
|
0c05930501 | ||
|
|
b96f2a19b5 | ||
|
|
c1906714ee | ||
|
|
32181d1bd2 | ||
|
|
7dfb413d87 | ||
|
|
7934a96ad1 | ||
|
|
abddbf9c7d | ||
|
|
77e66241f7 | ||
|
|
4b3f2e19a4 | ||
|
|
b29c6485a8 | ||
|
|
f4dba7a68c | ||
|
|
2817408db3 | ||
|
|
9ff3c758eb | ||
|
|
3dcc189740 | ||
|
|
4a12d1954e | ||
|
|
e4d645110a | ||
|
|
01a32067d5 | ||
|
|
fc5ce4739c | ||
|
|
ae7b8f9ecf | ||
|
|
0f5d2d6821 | ||
|
|
48eca22a00 | ||
|
|
5e164493a8 | ||
|
|
ead99208f2 | ||
|
|
4f5ad05792 | ||
|
|
bc52e72605 | ||
|
|
038674835a | ||
|
|
00f21c17ca | ||
|
|
818a1508a0 | ||
|
|
2d9480a6a7 | ||
|
|
0bec4e25c8 | ||
|
|
950a540df4 | ||
|
|
2e66c5f807 | ||
|
|
7033c2616b | ||
|
|
8d4cde4534 |
2
.github/workflows/core.yml
vendored
2
.github/workflows/core.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: [2.7, 3.7, 3.8]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
13
.github/workflows/examples.yml
vendored
13
.github/workflows/examples.yml
vendored
@@ -8,14 +8,14 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-16.04, windows-latest, macos-latest]
|
||||
python-version: [2.7, 3.7]
|
||||
python-version: [3.7]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
@@ -26,7 +26,8 @@ jobs:
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,intel_mcs51,aceinna_imu"
|
||||
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51"
|
||||
run: |
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
sudo apt-get install libc6-i386
|
||||
@@ -40,7 +41,8 @@ jobs:
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,microchippic32,gd32v,nuclei,lattice_ice40"
|
||||
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,lattice_ice40,gd32v"
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
@@ -50,7 +52,8 @@ jobs:
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,riscv_gap"
|
||||
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
[settings]
|
||||
line_length=88
|
||||
known_third_party=OpenSSL, SCons, autobahn, jsonrpc, twisted, zope
|
||||
@@ -14,7 +14,6 @@ disable=
|
||||
too-few-public-methods,
|
||||
useless-object-inheritance,
|
||||
useless-import-alias,
|
||||
fixme,
|
||||
bad-option-value,
|
||||
|
||||
; PY2 Compat
|
||||
|
||||
76
HISTORY.rst
76
HISTORY.rst
@@ -8,6 +8,79 @@ PlatformIO Core 5
|
||||
|
||||
**A professional collaborative platform for embedded development**
|
||||
|
||||
5.1.1 (2021-03-17)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a "The command line is too long" issue with a linking process on Windows (`issue #3827 <https://github.com/platformio/platformio-core/issues/3827>`_)
|
||||
* Fixed an issue with `device monitor <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html>`__ when the "send_on_enter" filter didn't send EOL chars (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
|
||||
* Fixed an issue with silent mode when unwanted data is printed to stdout (`issue #3837 <https://github.com/platformio/platformio-core/issues/3837>`_)
|
||||
* Fixed an issue when code inspection fails with "Bad JSON" (`issue #3790 <https://github.com/platformio/platformio-core/issues/3790>`_)
|
||||
* Fixed an issue with overriding user-specified debugging configuration information in VSCode (`issue #3824 <https://github.com/platformio/platformio-core/issues/3824>`_)
|
||||
|
||||
5.1.0 (2021-01-28)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* **PlatformIO Home**
|
||||
|
||||
- Boosted `PlatformIO Home <https://docs.platformio.org/page/home/index.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
|
||||
- Added a new ``--session-id`` option to `pio home <https://docs.platformio.org/page/core/userguide/cmd_home.html>`__ command that helps to keep PlatformIO Home isolated from other instances and protect from 3rd party access (`issue #3397 <https://github.com/platformio/platformio-core/issues/3397>`_)
|
||||
|
||||
* **Build System**
|
||||
|
||||
- Upgraded build engine to the SCons 4.1 (`release notes <https://scons.org/scons-410-is-available.html>`_)
|
||||
- Refactored a workaround for a maximum command line character limitation (`issue #3792 <https://github.com/platformio/platformio-core/issues/3792>`_)
|
||||
- Fixed an issue with Python 3.8+ on Windows when a network drive is used (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
|
||||
|
||||
* **Package Management**
|
||||
|
||||
- New options for `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command:
|
||||
|
||||
+ ``--dry-run`` option to show data that will be removed
|
||||
+ ``--core-packages`` option to remove unnecessary core packages
|
||||
+ ``--platform-packages`` option to remove unnecessary development platform packages (`issue #923 <https://github.com/platformio/platformio-core/issues/923>`_)
|
||||
|
||||
- Added new `check_prune_system_threshold <https://docs.platformio.org/page/core/userguide/cmd_settings.html#check-prune-system-threshold>`__ setting
|
||||
- Disabled automatic removal of unnecessary development platform packages (`issue #3708 <https://github.com/platformio/platformio-core/issues/3708>`_, `issue #3770 <https://github.com/platformio/platformio-core/issues/3770>`_)
|
||||
- Fixed an issue when unnecessary packages were removed in ``update --dry-run`` mode (`issue #3809 <https://github.com/platformio/platformio-core/issues/3809>`_)
|
||||
- Fixed a "ValueError: Invalid simple block" when uninstalling a package with a custom name and external source (`issue #3816 <https://github.com/platformio/platformio-core/issues/3816>`_)
|
||||
|
||||
* **Debugging**
|
||||
|
||||
- Configure a custom debug adapter speed using a new `debug_speed <https://docs.platformio.org/page/projectconf/section_env_debug.html#debug-speed>`__ option (`issue #3799 <https://github.com/platformio/platformio-core/issues/3799>`_)
|
||||
- Handle debugging server's "ready_pattern" in "stderr" output
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Improved listing of `multicast DNS services <https://docs.platformio.org/page/core/userguide/device/cmd_list.html>`_
|
||||
- Fixed a "UnicodeDecodeError: 'utf-8' codec can't decode byte" when using J-Link for firmware uploading on Linux (`issue #3804 <https://github.com/platformio/platformio-core/issues/3804>`_)
|
||||
- Fixed an issue with a compiler driver for ".ccls" language server (`issue #3808 <https://github.com/platformio/platformio-core/issues/3808>`_)
|
||||
- Fixed an issue when `pio device monitor --eol <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#cmdoption-pio-device-monitor-eol>`__ and "send_on_enter" filter do not work properly (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
|
||||
|
||||
5.0.4 (2020-12-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added "Core" suffix when showing PlatformIO Core version using ``pio --version`` command
|
||||
- Improved ".ccls" configuration file for Emacs, Vim, and Sublime Text integrations
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.3 with improved C++ parser and several new MISRA rules
|
||||
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.11 with new diagnostics and updated mass suppression mechanism
|
||||
|
||||
- Show a warning message about deprecated support for Python 2 and Python 3.5
|
||||
- Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension
|
||||
- Fixed a "git-sh-setup: file not found" error when installing project dependencies from Git VCS (`issue #3740 <https://github.com/platformio/platformio-core/issues/3740>`_)
|
||||
- Fixed an issue with package publishing on Windows when Unix permissions are not preserved (`issue #3776 <https://github.com/platformio/platformio-core/issues/3776>`_)
|
||||
|
||||
5.0.3 (2020-11-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added an error selector for `Sublime Text <https://docs.platformio.org/page/integration/ide/sublimetext.html>`__ build runner (`issue #3733 <https://github.com/platformio/platformio-core/issues/3733>`_)
|
||||
- Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode
|
||||
- Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used
|
||||
- Print ignored test suites and environments in the test summary report only in verbose mode (`issue #3726 <https://github.com/platformio/platformio-core/issues/3726>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue when `pio package pack <https://docs.platformio.org/page/core/userguide/package/cmd_pack.html>`__ ignores some folders (`issue #3730 <https://github.com/platformio/platformio-core/issues/3730>`_)
|
||||
|
||||
5.0.2 (2020-10-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -118,7 +191,8 @@ Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/
|
||||
- Display system-wide information using a new `pio system info <https://docs.platformio.org/page/core/userguide/system/cmd_info.html>`__ command (`issue #3521 <https://github.com/platformio/platformio-core/issues/3521>`_)
|
||||
- Remove unused data using a new `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command (`issue #3522 <https://github.com/platformio/platformio-core/issues/3522>`_)
|
||||
- Show ignored project environments only in the verbose mode (`issue #3641 <https://github.com/platformio/platformio-core/issues/3641>`_)
|
||||
- Do not escape compiler arguments in VSCode template on Windows.
|
||||
- Do not escape compiler arguments in VSCode template on Windows
|
||||
- Drop support for Python 2 and 3.5.
|
||||
|
||||
.. _release_notes_4:
|
||||
|
||||
|
||||
4
Makefile
4
Makefile
@@ -3,8 +3,8 @@ lint:
|
||||
pylint -j 6 --rcfile=./.pylintrc ./tests
|
||||
|
||||
isort:
|
||||
isort -rc ./platformio
|
||||
isort -rc ./tests
|
||||
isort ./platformio
|
||||
isort ./tests
|
||||
|
||||
format:
|
||||
black --target-version py27 ./platformio
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: deae09a880...3293903cac
2
examples
2
examples
Submodule examples updated: 84855946ea...a0631a8b07
@@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (5, 0, 2)
|
||||
VERSION = (5, 1, 1)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -31,11 +31,11 @@ __description__ = (
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "PlatformIO"
|
||||
__email__ = "contact@platformio.org"
|
||||
__author__ = "PlatformIO Labs"
|
||||
__email__ = "contact@piolabs.com"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO Labs"
|
||||
|
||||
__accounts_api__ = "https://api.accounts.platformio.org"
|
||||
__registry_api__ = [
|
||||
@@ -47,13 +47,13 @@ __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.3.1",
|
||||
"contrib-piohome": "~3.3.4",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-unity": "~1.20500.0",
|
||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
|
||||
"tool-cppcheck": "~1.210.0",
|
||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40100.2",
|
||||
"tool-cppcheck": "~1.230.0",
|
||||
"tool-clangtidy": "~1.100000.0",
|
||||
"tool-pvs-studio": "~7.9.0",
|
||||
"tool-pvs-studio": "~7.11.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
|
||||
@@ -33,7 +33,7 @@ except: # pylint: disable=bare-except
|
||||
@click.command(
|
||||
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
|
||||
)
|
||||
@click.version_option(__version__, prog_name="PlatformIO")
|
||||
@click.version_option(__version__, prog_name="PlatformIO Core")
|
||||
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
|
||||
@@ -55,6 +55,10 @@ DEFAULT_SETTINGS = {
|
||||
"description": "Check for the platform updates interval (days)",
|
||||
"value": 7,
|
||||
},
|
||||
"check_prune_system_threshold": {
|
||||
"description": "Check for pruning unnecessary data threshold (megabytes)",
|
||||
"value": 1024,
|
||||
},
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for HTTP API requests",
|
||||
"value": True,
|
||||
@@ -255,6 +259,8 @@ def get_cid():
|
||||
uid = None
|
||||
if os.getenv("C9_UID"):
|
||||
uid = os.getenv("C9_UID")
|
||||
elif os.getenv("GITPOD_GIT_USER_NAME"):
|
||||
uid = os.getenv("GITPOD_GIT_USER_NAME")
|
||||
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
uid = json.loads(
|
||||
|
||||
@@ -81,12 +81,19 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
|
||||
# Declare command verbose messages
|
||||
command_strings = dict(
|
||||
ARCOM="Archiving",
|
||||
LINKCOM="Linking",
|
||||
RANLIBCOM="Indexing",
|
||||
ASCOM="Compiling",
|
||||
ASPPCOM="Compiling",
|
||||
CCCOM="Compiling",
|
||||
CXXCOM="Compiling",
|
||||
)
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
DEFAULT_ENV_OPTIONS["ARCOMSTR"] = "Archiving $TARGET"
|
||||
DEFAULT_ENV_OPTIONS["LINKCOMSTR"] = "Linking $TARGET"
|
||||
DEFAULT_ENV_OPTIONS["RANLIBCOMSTR"] = "Indexing $TARGET"
|
||||
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
|
||||
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
|
||||
for name, value in command_strings.items():
|
||||
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ from platformio.proc import where_is_program
|
||||
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
|
||||
# which is the name that most clang tools search for by default.
|
||||
|
||||
# TODO: Is there a better way to do this than this global? Right now this exists so that the
|
||||
# Is there a better way to do this than this global? Right now this exists so that the
|
||||
# emitter we add can record all of the things it emits, so that the scanner for the top level
|
||||
# compilation database can access the complete list, and also so that the writer has easy
|
||||
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
|
||||
@@ -104,7 +104,7 @@ def makeEmitCompilationDbEntry(comstr):
|
||||
__COMPILATIONDB_ENV=env,
|
||||
)
|
||||
|
||||
# TODO: Technically, these next two lines should not be required: it should be fine to
|
||||
# Technically, these next two lines should not be required: it should be fine to
|
||||
# cache the entries. However, they don't seem to update properly. Since they are quick
|
||||
# to re-generate disable caching and sidestep this problem.
|
||||
env.AlwaysBuild(entry)
|
||||
|
||||
@@ -17,7 +17,8 @@ from __future__ import absolute_import
|
||||
import os
|
||||
from glob import glob
|
||||
|
||||
from SCons.Defaults import processDefines # pylint: disable=import-error
|
||||
import SCons.Defaults # pylint: disable=import-error
|
||||
import SCons.Subst # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
@@ -58,8 +59,16 @@ def _dump_includes(env):
|
||||
for g in toolchain_incglobs:
|
||||
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
|
||||
|
||||
# include Unity framework if there are tests in project
|
||||
includes["unity"] = []
|
||||
unity_dir = get_core_package_dir("tool-unity")
|
||||
auto_install_unity = False
|
||||
test_dir = env.GetProjectConfig().get_optional_dir("test")
|
||||
if os.path.isdir(test_dir) and os.listdir(test_dir) != ["README"]:
|
||||
auto_install_unity = True
|
||||
unity_dir = get_core_package_dir(
|
||||
"tool-unity",
|
||||
auto_install=auto_install_unity,
|
||||
)
|
||||
if unity_dir:
|
||||
includes["unity"].append(unity_dir)
|
||||
|
||||
@@ -92,7 +101,7 @@ def _get_gcc_defines(env):
|
||||
def _dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
@@ -141,25 +150,17 @@ def _get_svd_path(env):
|
||||
return None
|
||||
|
||||
|
||||
def _escape_build_flag(flags):
|
||||
return [flag if " " not in flag else '"%s"' % flag for flag in flags]
|
||||
def _subst_cmd(env, cmd):
|
||||
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
|
||||
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
|
||||
|
||||
|
||||
def DumpIDEData(env, globalenv):
|
||||
""" env here is `projenv`"""
|
||||
|
||||
env["__escape_build_flag"] = _escape_build_flag
|
||||
|
||||
LINTCCOM = (
|
||||
"${__escape_build_flag(CFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
|
||||
)
|
||||
LINTCXXCOM = (
|
||||
"${__escape_build_flag(CXXFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
|
||||
)
|
||||
|
||||
data = {
|
||||
"env_name": env["PIOENV"],
|
||||
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
|
||||
"libsource_dirs": [env.subst(item) for item in env.GetLibSourceDirs()],
|
||||
"defines": _dump_defines(env),
|
||||
"includes": _dump_includes(env),
|
||||
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
@@ -181,7 +182,7 @@ def DumpIDEData(env, globalenv):
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
for item in SCons.Defaults.processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
@@ -189,7 +190,13 @@ def DumpIDEData(env, globalenv):
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
data.update({"cc_flags": env_.subst(LINTCCOM), "cxx_flags": env_.subst(LINTCXXCOM)})
|
||||
# export C/C++ build flags
|
||||
data.update(
|
||||
{
|
||||
"cc_flags": _subst_cmd(env_, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(env_, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
}
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@@ -14,15 +14,30 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from hashlib import md5
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data
|
||||
|
||||
# Windows CLI has limit with command length to 8192
|
||||
# Leave 2000 chars for flags and other options
|
||||
MAX_LINE_LENGTH = 6000 if WINDOWS else 128072
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8192
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8192 if WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
||||
def tempfile_arg_esc_func(arg):
|
||||
arg = quote_spaces(arg)
|
||||
if not WINDOWS:
|
||||
return arg
|
||||
# GCC requires double Windows slashes, let's use UNIX separator
|
||||
return WINPATHSEP_RE.sub(r"/\1", arg)
|
||||
|
||||
|
||||
def long_sources_hook(env, sources):
|
||||
@@ -41,30 +56,14 @@ def long_sources_hook(env, sources):
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def long_incflags_hook(env, incflags):
|
||||
_incflags = env.subst(incflags).replace("\\", "/")
|
||||
if len(_incflags) < MAX_LINE_LENGTH:
|
||||
return incflags
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _incflags.split(" -I"):
|
||||
line = line.strip()
|
||||
if not line.startswith("-I"):
|
||||
line = "-I" + line
|
||||
data.append('-I"%s"' % line[2:])
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(
|
||||
build_dir, "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest()
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
tmp_file = os.path.join(
|
||||
build_dir, "longcmd-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
|
||||
)
|
||||
if isfile(tmp_file):
|
||||
if os.path.isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
fp.write(data)
|
||||
@@ -76,17 +75,21 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.Replace(_long_sources_hook=long_sources_hook)
|
||||
env.Replace(_long_incflags_hook=long_incflags_hook)
|
||||
coms = {}
|
||||
for key in ("ARCOM", "LINKCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
|
||||
)
|
||||
for key in ("_CCCOMCOM", "ASPPCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}"
|
||||
)
|
||||
env.Replace(**coms)
|
||||
kwargs = dict(
|
||||
_long_sources_hook=long_sources_hook,
|
||||
TEMPFILE=TempFileMunge,
|
||||
MAXLINELENGTH=MAX_LINE_LENGTH,
|
||||
TEMPFILEARGESCFUNC=tempfile_arg_esc_func,
|
||||
TEMPFILESUFFIX=".tmp",
|
||||
TEMPFILEDIR="$BUILD_DIR",
|
||||
)
|
||||
|
||||
for name in ("LINKCOM", "ASCOM", "ASPPCOM", "CCCOM", "CXXCOM"):
|
||||
kwargs[name] = "${TEMPFILE('%s','$%sSTR')}" % (env.get(name), name)
|
||||
|
||||
kwargs["ARCOM"] = env.get("ARCOM", "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
|
||||
)
|
||||
env.Replace(**kwargs)
|
||||
|
||||
return env
|
||||
|
||||
@@ -52,6 +52,7 @@ def BoardConfig(env, board=None):
|
||||
except (AssertionError, UnknownBoard) as e:
|
||||
sys.stderr.write("Error: %s\n" % str(e))
|
||||
env.Exit(1)
|
||||
return None
|
||||
|
||||
|
||||
def GetFrameworkScript(env, framework):
|
||||
|
||||
@@ -167,6 +167,29 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
if os.path.isfile(f):
|
||||
os.remove(f)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return cmd_result["returncode"] == 0
|
||||
|
||||
def execute_check_cmd(self, cmd):
|
||||
result = proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
if not self.is_check_successful(result):
|
||||
click.echo(
|
||||
"\nError: Failed to execute check command! Exited with code %d."
|
||||
% result["returncode"]
|
||||
)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(patterns):
|
||||
c_extension = (".c",)
|
||||
@@ -200,11 +223,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
else:
|
||||
if self.options.get("verbose"):
|
||||
|
||||
@@ -49,6 +49,12 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
|
||||
return DefectItem(severity, category, message, file_, line, column, defect_id)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Note: Clang-Tidy returns 1 for not critical compilation errors,
|
||||
# so 0 and 1 are only acceptable values
|
||||
return cmd_result["returncode"] < 2
|
||||
|
||||
def configure_command(self):
|
||||
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")
|
||||
|
||||
|
||||
@@ -96,20 +96,19 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
self._buffer = ""
|
||||
return None
|
||||
|
||||
self._buffer = ""
|
||||
return DefectItem(**args)
|
||||
|
||||
def configure_command(
|
||||
self, language, src_files
|
||||
): # pylint: disable=arguments-differ
|
||||
def configure_command(self, language, src_file): # pylint: disable=arguments-differ
|
||||
tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck")
|
||||
|
||||
cmd = [
|
||||
tool_path,
|
||||
"--addon-python=%s" % proc.get_pythonexe_path(),
|
||||
"--error-exitcode=1",
|
||||
"--error-exitcode=3",
|
||||
"--verbose" if self.options.get("verbose") else "--quiet",
|
||||
]
|
||||
|
||||
@@ -157,8 +156,8 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
"--include=" + inc
|
||||
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
|
||||
)
|
||||
cmd.append("--file-list=%s" % self._generate_src_file(src_files))
|
||||
cmd.append("--includes-file=%s" % self._generate_inc_file())
|
||||
cmd.append('"%s"' % src_file)
|
||||
|
||||
return cmd
|
||||
|
||||
@@ -220,29 +219,32 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if os.path.isfile(dump_file):
|
||||
os.remove(dump_file)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Cppcheck is configured to return '3' if a defect is found
|
||||
return cmd_result["returncode"] in (0, 3)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
|
||||
languages = ("c", "c++")
|
||||
if not any([project_files[t] for t in languages]):
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
src_files_scope = ("c", "c++")
|
||||
if not any(project_files[t] for t in src_files_scope):
|
||||
click.echo("Error: Nothing to check.")
|
||||
return True
|
||||
for language in languages:
|
||||
if not project_files[language]:
|
||||
continue
|
||||
cmd = self.configure_command(language, project_files[language])
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
for scope, files in project_files.items():
|
||||
if scope not in src_files_scope:
|
||||
continue
|
||||
for src_file in files:
|
||||
cmd = self.configure_command(scope, src_file)
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
self.clean_up()
|
||||
|
||||
|
||||
@@ -52,6 +52,11 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
)
|
||||
)
|
||||
|
||||
def tool_output_filter(self, line):
|
||||
if "license was not entered" in line.lower():
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
def _process_defects(self, defects):
|
||||
for defect in defects:
|
||||
if not isinstance(defect, DefectItem):
|
||||
@@ -182,7 +187,13 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
flags = self.cc_flags
|
||||
compiler = self.cc_path
|
||||
|
||||
cmd = [compiler, src_file, "-E", "-o", self._tmp_preprocessed_file]
|
||||
cmd = [
|
||||
compiler,
|
||||
'"%s"' % src_file,
|
||||
"-E",
|
||||
"-o",
|
||||
'"%s"' % self._tmp_preprocessed_file,
|
||||
]
|
||||
cmd.extend([f for f in flags if f])
|
||||
cmd.extend(["-D%s" % d for d in self.cpp_defines])
|
||||
cmd.append('@"%s"' % self._tmp_cmd_file)
|
||||
@@ -203,6 +214,12 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
if os.path.isdir(self._tmp_dir):
|
||||
shutil.rmtree(self._tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return (
|
||||
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
@@ -219,11 +236,8 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
self._bad_input = True
|
||||
continue
|
||||
|
||||
result = proc.exec_command(cmd)
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if result["returncode"] != 0 or "license" in result["err"].lower():
|
||||
self._bad_input = True
|
||||
click.echo(result["err"])
|
||||
result = self.execute_check_cmd(cmd)
|
||||
if result["returncode"] != 0:
|
||||
continue
|
||||
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
||||
|
||||
@@ -176,6 +176,7 @@ def configure_initial_debug_options(platform, env_options):
|
||||
tool_name,
|
||||
tool_settings,
|
||||
),
|
||||
speed=env_options.get("debug_speed", tool_settings.get("speed")),
|
||||
server=server_options,
|
||||
)
|
||||
return result
|
||||
@@ -191,7 +192,7 @@ def configure_esp32_load_cmds(debug_options, configuration):
|
||||
debug_options["load_cmds"] != ["load"],
|
||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
||||
not flash_images,
|
||||
not all([isfile(item["path"]) for item in flash_images]),
|
||||
not all(isfile(item["path"]) for item in flash_images),
|
||||
]
|
||||
if any(ignore_conds):
|
||||
return debug_options["load_cmds"]
|
||||
|
||||
@@ -124,16 +124,25 @@ class DebugServer(BaseProcess):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _wait_until_ready(self):
|
||||
timeout = 10
|
||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
||||
timeout = 60 if ready_pattern else 10
|
||||
elapsed = 0
|
||||
delay = 0.5
|
||||
auto_ready_delay = 0.5
|
||||
while not self._ready and not self._process_ended and elapsed < timeout:
|
||||
yield self.async_sleep(delay)
|
||||
if not self.debug_options.get("server", {}).get("ready_pattern"):
|
||||
if not ready_pattern:
|
||||
self._ready = self._last_activity < (time.time() - auto_ready_delay)
|
||||
elapsed += delay
|
||||
|
||||
def _check_ready_by_pattern(self, data):
|
||||
if self._ready:
|
||||
return self._ready
|
||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
||||
if ready_pattern:
|
||||
self._ready = ready_pattern.encode() in data
|
||||
return self._ready
|
||||
|
||||
@staticmethod
|
||||
def async_sleep(secs):
|
||||
d = defer.Deferred()
|
||||
@@ -147,11 +156,11 @@ class DebugServer(BaseProcess):
|
||||
super(DebugServer, self).outReceived(
|
||||
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
|
||||
)
|
||||
if self._ready:
|
||||
return
|
||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
||||
if ready_pattern:
|
||||
self._ready = ready_pattern.encode() in data
|
||||
self._check_ready_by_pattern(data)
|
||||
|
||||
def errReceived(self, data):
|
||||
super(DebugServer, self).errReceived(data)
|
||||
self._check_ready_by_pattern(data)
|
||||
|
||||
def processEnded(self, reason):
|
||||
self._process_ended = True
|
||||
|
||||
@@ -179,7 +179,9 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
for name in os.listdir(filters_dir):
|
||||
if not name.endswith(".py"):
|
||||
continue
|
||||
device_helpers.load_monitor_filter(os.path.join(filters_dir, name))
|
||||
device_helpers.load_monitor_filter(
|
||||
os.path.join(filters_dir, name), options=kwargs
|
||||
)
|
||||
|
||||
project_options = {}
|
||||
try:
|
||||
@@ -193,9 +195,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
if "platform" in project_options:
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
platform = PlatformFactory.new(project_options["platform"])
|
||||
device_helpers.register_platform_filters(
|
||||
platform, kwargs["project_dir"], kwargs["environment"]
|
||||
)
|
||||
device_helpers.register_platform_filters(platform, options=kwargs)
|
||||
|
||||
if not kwargs["port"]:
|
||||
ports = util.get_serial_ports(filter_hwid=True)
|
||||
|
||||
@@ -18,12 +18,13 @@ from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class DeviceMonitorFilter(miniterm.Transform):
|
||||
def __init__(self, project_dir=None, environment=None):
|
||||
def __init__(self, options=None):
|
||||
""" Called by PlatformIO to pass context """
|
||||
miniterm.Transform.__init__(self)
|
||||
|
||||
self.project_dir = project_dir
|
||||
self.environment = environment
|
||||
self.options = options or {}
|
||||
self.project_dir = self.options.get("project_dir")
|
||||
self.environment = self.options.get("environment")
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
if not self.environment:
|
||||
|
||||
@@ -22,10 +22,17 @@ class SendOnEnter(DeviceMonitorFilter):
|
||||
super(SendOnEnter, self).__init__(*args, **kwargs)
|
||||
self._buffer = ""
|
||||
|
||||
if self.options.get("eol") == "CR":
|
||||
self._eol = "\r"
|
||||
elif self.options.get("eol") == "LF":
|
||||
self._eol = "\n"
|
||||
else:
|
||||
self._eol = "\r\n"
|
||||
|
||||
def tx(self, text):
|
||||
self._buffer += text
|
||||
if self._buffer.endswith("\r\n"):
|
||||
text = self._buffer[:-2]
|
||||
if self._buffer.endswith(self._eol):
|
||||
text = self._buffer
|
||||
self._buffer = ""
|
||||
return text
|
||||
return ""
|
||||
|
||||
@@ -76,7 +76,7 @@ def get_board_hwids(project_dir, platform, board):
|
||||
return platform.board_config(board).get("build.hwids", [])
|
||||
|
||||
|
||||
def load_monitor_filter(path, project_dir=None, environment=None):
|
||||
def load_monitor_filter(path, options=None):
|
||||
name = os.path.basename(path)
|
||||
name = name[: name.find(".")]
|
||||
module = load_python_module("platformio.commands.device.filters.%s" % name, path)
|
||||
@@ -87,12 +87,12 @@ def load_monitor_filter(path, project_dir=None, environment=None):
|
||||
or cls == DeviceMonitorFilter
|
||||
):
|
||||
continue
|
||||
obj = cls(project_dir, environment)
|
||||
obj = cls(options)
|
||||
miniterm.TRANSFORMATIONS[obj.NAME] = obj
|
||||
return True
|
||||
|
||||
|
||||
def register_platform_filters(platform, project_dir, environment):
|
||||
def register_platform_filters(platform, options=None):
|
||||
monitor_dir = os.path.join(platform.get_dir(), "monitor")
|
||||
if not os.path.isdir(monitor_dir):
|
||||
return
|
||||
@@ -103,4 +103,4 @@ def register_platform_filters(platform, project_dir, environment):
|
||||
path = os.path.join(monitor_dir, name)
|
||||
if not os.path.isfile(path):
|
||||
continue
|
||||
load_monitor_filter(path, project_dir, environment)
|
||||
load_monitor_filter(path, options)
|
||||
|
||||
@@ -12,20 +12,15 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-locals,too-many-statements
|
||||
|
||||
import mimetypes
|
||||
import socket
|
||||
from os.path import isdir
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.package.manager.core import get_core_package_dir, inject_contrib_pysite
|
||||
from platformio.commands.home.helpers import is_port_used
|
||||
from platformio.compat import ensure_python3
|
||||
|
||||
|
||||
@click.command("home", short_help="UI to manage PlatformIO")
|
||||
@click.command("home", short_help="GUI to manage PlatformIO")
|
||||
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
|
||||
@click.option(
|
||||
"--host",
|
||||
@@ -45,61 +40,30 @@ from platformio.package.manager.core import get_core_package_dir, inject_contrib
|
||||
"are connected. Default is 0 which means never auto shutdown"
|
||||
),
|
||||
)
|
||||
def cli(port, host, no_open, shutdown_timeout):
|
||||
# pylint: disable=import-error, import-outside-toplevel
|
||||
|
||||
# import contrib modules
|
||||
inject_contrib_pysite()
|
||||
|
||||
from autobahn.twisted.resource import WebSocketResource
|
||||
from twisted.internet import reactor
|
||||
from twisted.web import server
|
||||
from twisted.internet.error import CannotListenError
|
||||
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.ide import IDERPC
|
||||
from platformio.commands.home.rpc.handlers.misc import MiscRPC
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.commands.home.rpc.handlers.account import AccountRPC
|
||||
from platformio.commands.home.rpc.server import JSONRPCServerFactory
|
||||
from platformio.commands.home.web import WebRoot
|
||||
|
||||
factory = JSONRPCServerFactory(shutdown_timeout)
|
||||
factory.addHandler(AppRPC(), namespace="app")
|
||||
factory.addHandler(IDERPC(), namespace="ide")
|
||||
factory.addHandler(MiscRPC(), namespace="misc")
|
||||
factory.addHandler(OSRPC(), namespace="os")
|
||||
factory.addHandler(PIOCoreRPC(), namespace="core")
|
||||
factory.addHandler(ProjectRPC(), namespace="project")
|
||||
factory.addHandler(AccountRPC(), namespace="account")
|
||||
|
||||
contrib_dir = get_core_package_dir("contrib-piohome")
|
||||
if not isdir(contrib_dir):
|
||||
raise exception.PlatformioException("Invalid path to PIO Home Contrib")
|
||||
@click.option(
|
||||
"--session-id",
|
||||
help=(
|
||||
"A unique session identifier to keep PIO Home isolated from other instances "
|
||||
"and protect from 3rd party access"
|
||||
),
|
||||
)
|
||||
def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
ensure_python3()
|
||||
|
||||
# Ensure PIO Home mimetypes are known
|
||||
mimetypes.add_type("text/html", ".html")
|
||||
mimetypes.add_type("text/css", ".css")
|
||||
mimetypes.add_type("application/javascript", ".js")
|
||||
|
||||
root = WebRoot(contrib_dir)
|
||||
root.putChild(b"wsrpc", WebSocketResource(factory))
|
||||
site = server.Site(root)
|
||||
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
return
|
||||
|
||||
already_started = is_port_used(host, port)
|
||||
home_url = "http://%s:%d" % (host, port)
|
||||
if not no_open:
|
||||
if already_started:
|
||||
click.launch(home_url)
|
||||
else:
|
||||
reactor.callLater(1, lambda: click.launch(home_url))
|
||||
|
||||
home_url = "http://%s:%d%s" % (
|
||||
host,
|
||||
port,
|
||||
("/session/%s/" % session_id) if session_id else "/",
|
||||
)
|
||||
click.echo(
|
||||
"\n".join(
|
||||
[
|
||||
@@ -108,45 +72,28 @@ def cli(port, host, no_open, shutdown_timeout):
|
||||
" /\\-_--\\ PlatformIO Home",
|
||||
"/ \\_-__\\",
|
||||
"|[]| [] | %s" % home_url,
|
||||
"|__|____|______________%s" % ("_" * len(host)),
|
||||
"|__|____|__%s" % ("_" * len(home_url)),
|
||||
]
|
||||
)
|
||||
)
|
||||
click.echo("")
|
||||
click.echo("Open PlatformIO Home in your browser by this URL => %s" % home_url)
|
||||
|
||||
try:
|
||||
reactor.listenTCP(port, site, interface=host)
|
||||
except CannotListenError as e:
|
||||
click.secho(str(e), fg="red", err=True)
|
||||
already_started = True
|
||||
|
||||
if already_started:
|
||||
if is_port_used(host, port):
|
||||
click.secho(
|
||||
"PlatformIO Home server is already started in another process.", fg="yellow"
|
||||
)
|
||||
if not no_open:
|
||||
click.launch(home_url)
|
||||
return
|
||||
|
||||
click.echo("PIO Home has been started. Press Ctrl+C to shutdown.")
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.home.run import run_server
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
||||
run_server(
|
||||
host=host,
|
||||
port=port,
|
||||
no_open=no_open,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
home_url=home_url,
|
||||
)
|
||||
|
||||
@@ -12,36 +12,27 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=keyword-arg-before-vararg,arguments-differ,signature-differs
|
||||
import socket
|
||||
|
||||
import requests
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.internet import threads # pylint: disable=import-error
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class AsyncSession(requests.Session):
|
||||
def __init__(self, n=None, *args, **kwargs):
|
||||
if n:
|
||||
pool = reactor.getThreadPool()
|
||||
pool.adjustPoolsize(0, n)
|
||||
|
||||
super(AsyncSession, self).__init__(*args, **kwargs)
|
||||
|
||||
def request(self, *args, **kwargs):
|
||||
async def request( # pylint: disable=signature-differs,invalid-overridden-method
|
||||
self, *args, **kwargs
|
||||
):
|
||||
func = super(AsyncSession, self).request
|
||||
return threads.deferToThread(func, *args, **kwargs)
|
||||
|
||||
def wrap(self, *args, **kwargs): # pylint: disable=no-self-use
|
||||
return defer.ensureDeferred(*args, **kwargs)
|
||||
return await run_in_threadpool(func, *args, **kwargs)
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def requests_session():
|
||||
return AsyncSession(n=5)
|
||||
return AsyncSession()
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
@@ -49,3 +40,23 @@ def get_core_fullpath():
|
||||
return where_is_program(
|
||||
"platformio" + (".exe" if "windows" in util.get_systype() else "")
|
||||
)
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -12,18 +12,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio.clients.account import AccountClient
|
||||
|
||||
|
||||
class AccountRPC(object):
|
||||
class AccountRPC:
|
||||
@staticmethod
|
||||
def call_client(method, *args, **kwargs):
|
||||
try:
|
||||
client = AccountClient()
|
||||
return getattr(client, method)(*args, **kwargs)
|
||||
except Exception as e: # pylint: disable=bare-except
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4003, message="PIO Account Call Error", data=str(e)
|
||||
)
|
||||
|
||||
@@ -20,7 +20,7 @@ from platformio import __version__, app, fs, util
|
||||
from platformio.project.helpers import get_project_core_dir, is_platformio_project
|
||||
|
||||
|
||||
class AppRPC(object):
|
||||
class AppRPC:
|
||||
|
||||
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
|
||||
|
||||
|
||||
@@ -14,29 +14,30 @@
|
||||
|
||||
import time
|
||||
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio.compat import get_running_loop
|
||||
|
||||
|
||||
class IDERPC(object):
|
||||
class IDERPC:
|
||||
def __init__(self):
|
||||
self._queue = {}
|
||||
|
||||
def send_command(self, sid, command, params):
|
||||
if not self._queue.get(sid):
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4005, message="PIO Home IDE agent is not started"
|
||||
)
|
||||
while self._queue[sid]:
|
||||
self._queue[sid].pop().callback(
|
||||
self._queue[sid].pop().set_result(
|
||||
{"id": time.time(), "method": command, "params": params}
|
||||
)
|
||||
|
||||
def listen_commands(self, sid=0):
|
||||
async def listen_commands(self, sid=0):
|
||||
if sid not in self._queue:
|
||||
self._queue[sid] = []
|
||||
self._queue[sid].append(defer.Deferred())
|
||||
return self._queue[sid][-1]
|
||||
self._queue[sid].append(get_running_loop().create_future())
|
||||
return await self._queue[sid][-1]
|
||||
|
||||
def open_project(self, sid, project_dir):
|
||||
return self.send_command(sid, "open_project", project_dir)
|
||||
|
||||
@@ -15,14 +15,13 @@
|
||||
import json
|
||||
import time
|
||||
|
||||
from twisted.internet import defer, reactor # pylint: disable=import-error
|
||||
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.compat import create_task
|
||||
|
||||
|
||||
class MiscRPC(object):
|
||||
def load_latest_tweets(self, data_url):
|
||||
class MiscRPC:
|
||||
async def load_latest_tweets(self, data_url):
|
||||
cache_key = ContentCache.key_from_args(data_url, "tweets")
|
||||
cache_valid = "180d"
|
||||
with ContentCache() as cc:
|
||||
@@ -31,22 +30,20 @@ class MiscRPC(object):
|
||||
cache_data = json.loads(cache_data)
|
||||
# automatically update cache in background every 12 hours
|
||||
if cache_data["time"] < (time.time() - (3600 * 12)):
|
||||
reactor.callLater(
|
||||
5, self._preload_latest_tweets, data_url, cache_key, cache_valid
|
||||
create_task(
|
||||
self._preload_latest_tweets(data_url, cache_key, cache_valid)
|
||||
)
|
||||
return cache_data["result"]
|
||||
|
||||
result = self._preload_latest_tweets(data_url, cache_key, cache_valid)
|
||||
return result
|
||||
return await self._preload_latest_tweets(data_url, cache_key, cache_valid)
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def _preload_latest_tweets(data_url, cache_key, cache_valid):
|
||||
result = json.loads((yield OSRPC.fetch_content(data_url)))
|
||||
async def _preload_latest_tweets(data_url, cache_key, cache_valid):
|
||||
result = json.loads((await OSRPC.fetch_content(data_url)))
|
||||
with ContentCache() as cc:
|
||||
cc.set(
|
||||
cache_key,
|
||||
json.dumps({"time": int(time.time()), "result": result}),
|
||||
cache_valid,
|
||||
)
|
||||
defer.returnValue(result)
|
||||
return result
|
||||
|
||||
@@ -14,25 +14,23 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
from functools import cmp_to_key
|
||||
|
||||
import click
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
|
||||
from platformio import __default_requests_timeout__, fs, util
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.clients.http import ensure_internet_on
|
||||
from platformio.commands.home import helpers
|
||||
from platformio.compat import PY2, get_filesystem_encoding, glob_recursive
|
||||
|
||||
|
||||
class OSRPC(object):
|
||||
class OSRPC:
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def fetch_content(uri, data=None, headers=None, cache_valid=None):
|
||||
async def fetch_content(uri, data=None, headers=None, cache_valid=None):
|
||||
if not headers:
|
||||
headers = {
|
||||
"User-Agent": (
|
||||
@@ -46,18 +44,18 @@ class OSRPC(object):
|
||||
if cache_key:
|
||||
result = cc.get(cache_key)
|
||||
if result is not None:
|
||||
defer.returnValue(result)
|
||||
return result
|
||||
|
||||
# check internet before and resolve issue with 60 seconds timeout
|
||||
ensure_internet_on(raise_exception=True)
|
||||
|
||||
session = helpers.requests_session()
|
||||
if data:
|
||||
r = yield session.post(
|
||||
r = await session.post(
|
||||
uri, data=data, headers=headers, timeout=__default_requests_timeout__
|
||||
)
|
||||
else:
|
||||
r = yield session.get(
|
||||
r = await session.get(
|
||||
uri, headers=headers, timeout=__default_requests_timeout__
|
||||
)
|
||||
|
||||
@@ -66,11 +64,11 @@ class OSRPC(object):
|
||||
if cache_valid:
|
||||
with ContentCache() as cc:
|
||||
cc.set(cache_key, result, cache_valid)
|
||||
defer.returnValue(result)
|
||||
return result
|
||||
|
||||
def request_content(self, uri, data=None, headers=None, cache_valid=None):
|
||||
async def request_content(self, uri, data=None, headers=None, cache_valid=None):
|
||||
if uri.startswith("http"):
|
||||
return self.fetch_content(uri, data, headers, cache_valid)
|
||||
return await self.fetch_content(uri, data, headers, cache_valid)
|
||||
if os.path.isfile(uri):
|
||||
with io.open(uri, encoding="utf-8") as fp:
|
||||
return fp.read()
|
||||
@@ -82,13 +80,11 @@ class OSRPC(object):
|
||||
|
||||
@staticmethod
|
||||
def reveal_file(path):
|
||||
return click.launch(
|
||||
path.encode(get_filesystem_encoding()) if PY2 else path, locate=True
|
||||
)
|
||||
return click.launch(path, locate=True)
|
||||
|
||||
@staticmethod
|
||||
def open_file(path):
|
||||
return click.launch(path.encode(get_filesystem_encoding()) if PY2 else path)
|
||||
return click.launch(path)
|
||||
|
||||
@staticmethod
|
||||
def is_file(path):
|
||||
@@ -121,7 +117,9 @@ class OSRPC(object):
|
||||
result = set()
|
||||
for pathname in pathnames:
|
||||
result |= set(
|
||||
glob_recursive(os.path.join(root, pathname) if root else pathname)
|
||||
glob.glob(
|
||||
os.path.join(root, pathname) if root else pathname, recursive=True
|
||||
)
|
||||
)
|
||||
return list(result)
|
||||
|
||||
|
||||
@@ -17,23 +17,15 @@ from __future__ import absolute_import
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from io import BytesIO, StringIO
|
||||
from io import StringIO
|
||||
|
||||
import click
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import threads # pylint: disable=import-error
|
||||
from twisted.internet import utils # pylint: disable=import-error
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import __main__, __version__, fs
|
||||
from platformio import __main__, __version__, fs, proc
|
||||
from platformio.commands.home import helpers
|
||||
from platformio.compat import (
|
||||
PY2,
|
||||
get_filesystem_encoding,
|
||||
get_locale_encoding,
|
||||
is_bytes,
|
||||
string_types,
|
||||
)
|
||||
from platformio.compat import get_locale_encoding, is_bytes
|
||||
|
||||
try:
|
||||
from thread import get_ident as thread_get_ident
|
||||
@@ -52,13 +44,11 @@ class MultiThreadingStdStream(object):
|
||||
|
||||
def _ensure_thread_buffer(self, thread_id):
|
||||
if thread_id not in self._buffers:
|
||||
self._buffers[thread_id] = BytesIO() if PY2 else StringIO()
|
||||
self._buffers[thread_id] = StringIO()
|
||||
|
||||
def write(self, value):
|
||||
thread_id = thread_get_ident()
|
||||
self._ensure_thread_buffer(thread_id)
|
||||
if PY2 and isinstance(value, unicode): # pylint: disable=undefined-variable
|
||||
value = value.encode()
|
||||
return self._buffers[thread_id].write(
|
||||
value.decode() if is_bytes(value) else value
|
||||
)
|
||||
@@ -74,7 +64,7 @@ class MultiThreadingStdStream(object):
|
||||
return result
|
||||
|
||||
|
||||
class PIOCoreRPC(object):
|
||||
class PIOCoreRPC:
|
||||
@staticmethod
|
||||
def version():
|
||||
return __version__
|
||||
@@ -89,16 +79,9 @@ class PIOCoreRPC(object):
|
||||
sys.stderr = PIOCoreRPC.thread_stderr
|
||||
|
||||
@staticmethod
|
||||
def call(args, options=None):
|
||||
return defer.maybeDeferred(PIOCoreRPC._call_generator, args, options)
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def _call_generator(args, options=None):
|
||||
async def call(args, options=None):
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, string_types):
|
||||
args[i] = arg.encode(get_filesystem_encoding()) if PY2 else arg
|
||||
else:
|
||||
if not isinstance(arg, str):
|
||||
args[i] = str(arg)
|
||||
|
||||
options = options or {}
|
||||
@@ -106,27 +89,34 @@ class PIOCoreRPC(object):
|
||||
|
||||
try:
|
||||
if options.get("force_subprocess"):
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
else:
|
||||
result = yield PIOCoreRPC._call_inline(args, options)
|
||||
try:
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
except ValueError:
|
||||
# fall-back to subprocess method
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
result = await PIOCoreRPC._call_subprocess(args, options)
|
||||
return PIOCoreRPC._process_result(result, to_json)
|
||||
result = await PIOCoreRPC._call_inline(args, options)
|
||||
try:
|
||||
return PIOCoreRPC._process_result(result, to_json)
|
||||
except ValueError:
|
||||
# fall-back to subprocess method
|
||||
result = await PIOCoreRPC._call_subprocess(args, options)
|
||||
return PIOCoreRPC._process_result(result, to_json)
|
||||
except Exception as e: # pylint: disable=bare-except
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4003, message="PIO Core Call Error", data=str(e)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _call_inline(args, options):
|
||||
PIOCoreRPC.setup_multithreading_std_streams()
|
||||
cwd = options.get("cwd") or os.getcwd()
|
||||
async def _call_subprocess(args, options):
|
||||
result = await run_in_threadpool(
|
||||
proc.exec_command,
|
||||
[helpers.get_core_fullpath()] + args,
|
||||
cwd=options.get("cwd") or os.getcwd(),
|
||||
)
|
||||
return (result["out"], result["err"], result["returncode"])
|
||||
|
||||
def _thread_task():
|
||||
@staticmethod
|
||||
async def _call_inline(args, options):
|
||||
PIOCoreRPC.setup_multithreading_std_streams()
|
||||
|
||||
def _thread_safe_call(args, cwd):
|
||||
with fs.cd(cwd):
|
||||
exit_code = __main__.main(["-c"] + args)
|
||||
return (
|
||||
@@ -135,16 +125,8 @@ class PIOCoreRPC(object):
|
||||
exit_code,
|
||||
)
|
||||
|
||||
return threads.deferToThread(_thread_task)
|
||||
|
||||
@staticmethod
|
||||
def _call_subprocess(args, options):
|
||||
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||
return utils.getProcessOutputAndValue(
|
||||
helpers.get_core_fullpath(),
|
||||
args,
|
||||
path=cwd,
|
||||
env={k: v for k, v in os.environ.items() if "%" not in k},
|
||||
return await run_in_threadpool(
|
||||
_thread_safe_call, args=args, cwd=options.get("cwd") or os.getcwd()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -18,12 +18,11 @@ import os
|
||||
import shutil
|
||||
import time
|
||||
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.compat import PY2, get_filesystem_encoding
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
@@ -32,7 +31,7 @@ from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
from platformio.project.options import get_config_options_schema
|
||||
|
||||
|
||||
class ProjectRPC(object):
|
||||
class ProjectRPC:
|
||||
@staticmethod
|
||||
def config_call(init_kwargs, method, *args):
|
||||
assert isinstance(init_kwargs, dict)
|
||||
@@ -185,7 +184,7 @@ class ProjectRPC(object):
|
||||
)
|
||||
return sorted(result, key=lambda data: data["platform"]["title"])
|
||||
|
||||
def init(self, board, framework, project_dir):
|
||||
async def init(self, board, framework, project_dir):
|
||||
assert project_dir
|
||||
state = AppRPC.load_state()
|
||||
if not os.path.isdir(project_dir):
|
||||
@@ -198,14 +197,13 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(self._generate_project_main, project_dir, framework)
|
||||
return d
|
||||
return self._generate_project_main(project_dir, framework)
|
||||
|
||||
@staticmethod
|
||||
def _generate_project_main(_, project_dir, framework):
|
||||
def _generate_project_main(project_dir, framework):
|
||||
main_content = None
|
||||
if framework == "arduino":
|
||||
main_content = "\n".join(
|
||||
@@ -252,27 +250,23 @@ class ProjectRPC(object):
|
||||
fp.write(main_content.strip())
|
||||
return project_dir
|
||||
|
||||
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
|
||||
async def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
|
||||
board = str(board)
|
||||
if arduino_project_dir and PY2:
|
||||
arduino_project_dir = arduino_project_dir.encode(get_filesystem_encoding())
|
||||
# don't import PIO Project
|
||||
if is_platformio_project(arduino_project_dir):
|
||||
return arduino_project_dir
|
||||
|
||||
is_arduino_project = any(
|
||||
[
|
||||
os.path.isfile(
|
||||
os.path.join(
|
||||
arduino_project_dir,
|
||||
"%s.%s" % (os.path.basename(arduino_project_dir), ext),
|
||||
)
|
||||
os.path.isfile(
|
||||
os.path.join(
|
||||
arduino_project_dir,
|
||||
"%s.%s" % (os.path.basename(arduino_project_dir), ext),
|
||||
)
|
||||
for ext in ("ino", "pde")
|
||||
]
|
||||
)
|
||||
for ext in ("ino", "pde")
|
||||
)
|
||||
if not is_arduino_project:
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
|
||||
)
|
||||
|
||||
@@ -293,14 +287,9 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig()
|
||||
src_dir = config.get_optional_dir("src")
|
||||
@@ -310,9 +299,9 @@ class ProjectRPC(object):
|
||||
return project_dir
|
||||
|
||||
@staticmethod
|
||||
def import_pio(project_dir):
|
||||
async def import_pio(project_dir):
|
||||
if not project_dir or not is_platformio_project(project_dir):
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4001, message="Not an PlatformIO project: %s" % project_dir
|
||||
)
|
||||
new_project_dir = os.path.join(
|
||||
@@ -328,8 +317,7 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": new_project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(lambda _: new_project_dir)
|
||||
return d
|
||||
return new_project_dir
|
||||
|
||||
@@ -12,90 +12,86 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=import-error
|
||||
|
||||
import click
|
||||
import jsonrpc
|
||||
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
|
||||
from jsonrpc.exceptions import JSONRPCDispatchException
|
||||
from twisted.internet import defer, reactor
|
||||
from ajsonrpc.dispatcher import Dispatcher
|
||||
from ajsonrpc.manager import AsyncJSONRPCResponseManager
|
||||
from starlette.endpoints import WebSocketEndpoint
|
||||
|
||||
from platformio.compat import PY2, dump_json_to_unicode, is_bytes
|
||||
from platformio.compat import create_task, get_running_loop
|
||||
from platformio.proc import force_exit
|
||||
|
||||
|
||||
class JSONRPCServerProtocol(WebSocketServerProtocol):
|
||||
def onOpen(self):
|
||||
self.factory.connection_nums += 1
|
||||
if self.factory.shutdown_timer:
|
||||
self.factory.shutdown_timer.cancel()
|
||||
self.factory.shutdown_timer = None
|
||||
class JSONRPCServerFactoryBase:
|
||||
|
||||
def onClose(self, wasClean, code, reason): # pylint: disable=unused-argument
|
||||
self.factory.connection_nums -= 1
|
||||
if self.factory.connection_nums == 0:
|
||||
self.factory.shutdownByTimeout()
|
||||
|
||||
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
|
||||
# click.echo("> %s" % payload)
|
||||
response = jsonrpc.JSONRPCResponseManager.handle(
|
||||
payload, self.factory.dispatcher
|
||||
).data
|
||||
# if error
|
||||
if "result" not in response:
|
||||
self.sendJSONResponse(response)
|
||||
return None
|
||||
|
||||
d = defer.maybeDeferred(lambda: response["result"])
|
||||
d.addCallback(self._callback, response)
|
||||
d.addErrback(self._errback, response)
|
||||
|
||||
return None
|
||||
|
||||
def _callback(self, result, response):
|
||||
response["result"] = result
|
||||
self.sendJSONResponse(response)
|
||||
|
||||
def _errback(self, failure, response):
|
||||
if isinstance(failure.value, JSONRPCDispatchException):
|
||||
e = failure.value
|
||||
else:
|
||||
e = JSONRPCDispatchException(code=4999, message=failure.getErrorMessage())
|
||||
del response["result"]
|
||||
response["error"] = e.error._data # pylint: disable=protected-access
|
||||
self.sendJSONResponse(response)
|
||||
|
||||
def sendJSONResponse(self, response):
|
||||
# click.echo("< %s" % response)
|
||||
if "error" in response:
|
||||
click.secho("Error: %s" % response["error"], fg="red", err=True)
|
||||
response = dump_json_to_unicode(response)
|
||||
if not PY2 and not is_bytes(response):
|
||||
response = response.encode("utf-8")
|
||||
self.sendMessage(response)
|
||||
|
||||
|
||||
class JSONRPCServerFactory(WebSocketServerFactory):
|
||||
|
||||
protocol = JSONRPCServerProtocol
|
||||
connection_nums = 0
|
||||
shutdown_timer = 0
|
||||
shutdown_timer = None
|
||||
|
||||
def __init__(self, shutdown_timeout=0):
|
||||
super(JSONRPCServerFactory, self).__init__()
|
||||
self.shutdown_timeout = shutdown_timeout
|
||||
self.dispatcher = jsonrpc.Dispatcher()
|
||||
self.manager = AsyncJSONRPCResponseManager(
|
||||
Dispatcher(), is_server_error_verbose=True
|
||||
)
|
||||
|
||||
def shutdownByTimeout(self):
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def addObjectHandler(self, handler, namespace):
|
||||
self.manager.dispatcher.add_object(handler, prefix="%s." % namespace)
|
||||
|
||||
def on_client_connect(self):
|
||||
self.connection_nums += 1
|
||||
if self.shutdown_timer:
|
||||
self.shutdown_timer.cancel()
|
||||
self.shutdown_timer = None
|
||||
|
||||
def on_client_disconnect(self):
|
||||
self.connection_nums -= 1
|
||||
if self.connection_nums < 1:
|
||||
self.connection_nums = 0
|
||||
|
||||
if self.connection_nums == 0:
|
||||
self.shutdown_by_timeout()
|
||||
|
||||
async def on_shutdown(self):
|
||||
pass
|
||||
|
||||
def shutdown_by_timeout(self):
|
||||
if self.shutdown_timeout < 1:
|
||||
return
|
||||
|
||||
def _auto_shutdown_server():
|
||||
click.echo("Automatically shutdown server on timeout")
|
||||
reactor.stop()
|
||||
force_exit()
|
||||
|
||||
self.shutdown_timer = reactor.callLater(
|
||||
self.shutdown_timer = get_running_loop().call_later(
|
||||
self.shutdown_timeout, _auto_shutdown_server
|
||||
)
|
||||
|
||||
def addHandler(self, handler, namespace):
|
||||
self.dispatcher.build_method_map(handler, prefix="%s." % namespace)
|
||||
|
||||
class WebSocketJSONRPCServerFactory(JSONRPCServerFactoryBase):
|
||||
def __call__(self, *args, **kwargs):
|
||||
ws = WebSocketJSONRPCServer(*args, **kwargs)
|
||||
ws.factory = self
|
||||
return ws
|
||||
|
||||
|
||||
class WebSocketJSONRPCServer(WebSocketEndpoint):
|
||||
encoding = "text"
|
||||
factory: WebSocketJSONRPCServerFactory = None
|
||||
|
||||
async def on_connect(self, websocket):
|
||||
await websocket.accept()
|
||||
self.factory.on_client_connect() # pylint: disable=no-member
|
||||
|
||||
async def on_receive(self, websocket, data):
|
||||
create_task(self._handle_rpc(websocket, data))
|
||||
|
||||
async def on_disconnect(self, websocket, close_code):
|
||||
self.factory.on_client_disconnect() # pylint: disable=no-member
|
||||
|
||||
async def _handle_rpc(self, websocket, data):
|
||||
# pylint: disable=no-member
|
||||
response = await self.factory.manager.get_response_for_payload(data)
|
||||
if response.error:
|
||||
click.secho("Error: %s" % response.error.data, fg="red", err=True)
|
||||
await websocket.send_text(self.factory.manager.serialize(response.body))
|
||||
|
||||
99
platformio/commands/home/run.py
Normal file
99
platformio/commands/home/run.py
Normal file
@@ -0,0 +1,99 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import click
|
||||
import uvicorn
|
||||
from starlette.applications import Starlette
|
||||
from starlette.middleware import Middleware
|
||||
from starlette.responses import PlainTextResponse
|
||||
from starlette.routing import Mount, Route, WebSocketRoute
|
||||
from starlette.staticfiles import StaticFiles
|
||||
from starlette.status import HTTP_403_FORBIDDEN
|
||||
|
||||
from platformio.commands.home.rpc.handlers.account import AccountRPC
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.ide import IDERPC
|
||||
from platformio.commands.home.rpc.handlers.misc import MiscRPC
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
|
||||
from platformio.compat import get_running_loop
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import force_exit
|
||||
|
||||
|
||||
class ShutdownMiddleware:
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
if scope["type"] == "http" and b"__shutdown__" in scope.get("query_string", {}):
|
||||
await shutdown_server()
|
||||
await self.app(scope, receive, send)
|
||||
|
||||
|
||||
async def shutdown_server(_=None):
|
||||
get_running_loop().call_later(0.5, force_exit)
|
||||
return PlainTextResponse("Server has been shutdown!")
|
||||
|
||||
|
||||
async def protected_page(_):
|
||||
return PlainTextResponse(
|
||||
"Protected PlatformIO Home session", status_code=HTTP_403_FORBIDDEN
|
||||
)
|
||||
|
||||
|
||||
def run_server(host, port, no_open, shutdown_timeout, home_url):
|
||||
contrib_dir = get_core_package_dir("contrib-piohome")
|
||||
if not os.path.isdir(contrib_dir):
|
||||
raise PlatformioException("Invalid path to PIO Home Contrib")
|
||||
|
||||
ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)
|
||||
ws_rpc_factory.addObjectHandler(AccountRPC(), namespace="account")
|
||||
ws_rpc_factory.addObjectHandler(AppRPC(), namespace="app")
|
||||
ws_rpc_factory.addObjectHandler(IDERPC(), namespace="ide")
|
||||
ws_rpc_factory.addObjectHandler(MiscRPC(), namespace="misc")
|
||||
ws_rpc_factory.addObjectHandler(OSRPC(), namespace="os")
|
||||
ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace="core")
|
||||
ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace="project")
|
||||
|
||||
path = urlparse(home_url).path
|
||||
routes = [
|
||||
WebSocketRoute(path + "wsrpc", ws_rpc_factory, name="wsrpc"),
|
||||
Route(path + "__shutdown__", shutdown_server, methods=["POST"]),
|
||||
Mount(path, StaticFiles(directory=contrib_dir, html=True), name="static"),
|
||||
]
|
||||
if path != "/":
|
||||
routes.append(Route("/", protected_page))
|
||||
|
||||
uvicorn.run(
|
||||
Starlette(
|
||||
middleware=[Middleware(ShutdownMiddleware)],
|
||||
routes=routes,
|
||||
on_startup=[
|
||||
lambda: click.echo(
|
||||
"PIO Home has been started. Press Ctrl+C to shutdown."
|
||||
),
|
||||
lambda: None if no_open else click.launch(home_url),
|
||||
],
|
||||
),
|
||||
host=host,
|
||||
port=port,
|
||||
log_level="warning",
|
||||
)
|
||||
@@ -1,28 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.web import static # pylint: disable=import-error
|
||||
|
||||
|
||||
class WebRoot(static.File):
|
||||
def render_GET(self, request):
|
||||
if request.args.get(b"__shutdown__", False):
|
||||
reactor.stop()
|
||||
return "Server has been stopped"
|
||||
|
||||
request.setHeader("cache-control", "no-cache, no-store, must-revalidate")
|
||||
request.setHeader("pragma", "no-cache")
|
||||
request.setHeader("expires", "0")
|
||||
return static.File.render_GET(self, request)
|
||||
@@ -23,6 +23,7 @@ from platformio.clients.registry import RegistryClient
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.package.pack import PackagePacker
|
||||
from platformio.package.unpack import FileUnpacker, TARArchiver
|
||||
|
||||
|
||||
def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
@@ -81,6 +82,17 @@ def package_pack(package, output):
|
||||
)
|
||||
def package_publish(package, owner, released_at, private, notify):
|
||||
assert ensure_python3()
|
||||
|
||||
# publish .tar.gz instantly without repacking
|
||||
if not os.path.isdir(package) and isinstance(
|
||||
FileUnpacker.new_archiver(package), TARArchiver
|
||||
):
|
||||
response = RegistryClient().publish_package(
|
||||
package, owner, released_at, private, notify
|
||||
)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
return
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
|
||||
with fs.cd(tmp_dir):
|
||||
p = PackagePacker(package)
|
||||
|
||||
@@ -149,15 +149,19 @@ def project_init(
|
||||
):
|
||||
if not silent:
|
||||
if project_dir == os.getcwd():
|
||||
click.secho("\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho("will be used for the project.", fg="yellow")
|
||||
click.secho("\nThe current working directory ", fg="yellow", nl=False)
|
||||
try:
|
||||
click.secho(project_dir, fg="cyan", nl=False)
|
||||
except UnicodeEncodeError:
|
||||
click.secho(json.dumps(project_dir), fg="cyan", nl=False)
|
||||
click.secho(" will be used for the project.", fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo(
|
||||
"The next files/directories have been created in %s"
|
||||
% click.style(project_dir, fg="cyan")
|
||||
)
|
||||
click.echo("The next files/directories have been created in ", nl=False)
|
||||
try:
|
||||
click.secho(project_dir, fg="cyan")
|
||||
except UnicodeEncodeError:
|
||||
click.secho(json.dumps(project_dir), fg="cyan")
|
||||
click.echo(
|
||||
"%s - Put project header files here" % click.style("include", fg="cyan")
|
||||
)
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -27,11 +26,15 @@ from platformio.commands.system.completion import (
|
||||
install_completion_code,
|
||||
uninstall_completion_code,
|
||||
)
|
||||
from platformio.commands.system.prune import (
|
||||
prune_cached_data,
|
||||
prune_core_packages,
|
||||
prune_platform_packages,
|
||||
)
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
@click.group("system", short_help="Miscellaneous system commands")
|
||||
@@ -99,22 +102,49 @@ def system_info(json_output):
|
||||
|
||||
@cli.command("prune", short_help="Remove unused data")
|
||||
@click.option("--force", "-f", is_flag=True, help="Do not prompt for confirmation")
|
||||
def system_prune(force):
|
||||
click.secho("WARNING! This will remove:", fg="yellow")
|
||||
click.echo(" - cached API requests")
|
||||
click.echo(" - cached package downloads")
|
||||
click.echo(" - temporary data")
|
||||
if not force:
|
||||
click.confirm("Do you want to continue?", abort=True)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not prune, only show data that will be removed"
|
||||
)
|
||||
@click.option("--cache", is_flag=True, help="Prune only cached data")
|
||||
@click.option(
|
||||
"--core-packages", is_flag=True, help="Prune only unnecessary core packages"
|
||||
)
|
||||
@click.option(
|
||||
"--platform-packages",
|
||||
is_flag=True,
|
||||
help="Prune only unnecessary development platform packages",
|
||||
)
|
||||
def system_prune(force, dry_run, cache, core_packages, platform_packages):
|
||||
if dry_run:
|
||||
click.secho(
|
||||
"Dry run mode (do not prune, only show data that will be removed)",
|
||||
fg="yellow",
|
||||
)
|
||||
click.echo()
|
||||
|
||||
reclaimed_total = 0
|
||||
cache_dir = get_project_cache_dir()
|
||||
if os.path.isdir(cache_dir):
|
||||
reclaimed_total += fs.calculate_folder_size(cache_dir)
|
||||
fs.rmtree(cache_dir)
|
||||
reclaimed_cache = 0
|
||||
reclaimed_core_packages = 0
|
||||
reclaimed_platform_packages = 0
|
||||
prune_all = not any([cache, core_packages, platform_packages])
|
||||
|
||||
if cache or prune_all:
|
||||
reclaimed_cache = prune_cached_data(force, dry_run)
|
||||
click.echo()
|
||||
|
||||
if core_packages or prune_all:
|
||||
reclaimed_core_packages = prune_core_packages(force, dry_run)
|
||||
click.echo()
|
||||
|
||||
if platform_packages or prune_all:
|
||||
reclaimed_platform_packages = prune_platform_packages(force, dry_run)
|
||||
click.echo()
|
||||
|
||||
click.secho(
|
||||
"Total reclaimed space: %s" % fs.humanize_file_size(reclaimed_total), fg="green"
|
||||
"Total reclaimed space: %s"
|
||||
% fs.humanize_file_size(
|
||||
reclaimed_cache + reclaimed_core_packages + reclaimed_platform_packages
|
||||
),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
|
||||
98
platformio/commands/system/prune.py
Normal file
98
platformio/commands/system/prune.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from operator import itemgetter
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.manager.core import remove_unnecessary_core_packages
|
||||
from platformio.package.manager.platform import remove_unnecessary_platform_packages
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
def prune_cached_data(force=False, dry_run=False, silent=False):
|
||||
reclaimed_space = 0
|
||||
if not silent:
|
||||
click.secho("Prune cached data:", bold=True)
|
||||
click.echo(" - cached API requests")
|
||||
click.echo(" - cached package downloads")
|
||||
click.echo(" - temporary data")
|
||||
cache_dir = get_project_cache_dir()
|
||||
if os.path.isdir(cache_dir):
|
||||
reclaimed_space += fs.calculate_folder_size(cache_dir)
|
||||
if not dry_run:
|
||||
if not force:
|
||||
click.confirm("Do you want to continue?", abort=True)
|
||||
fs.rmtree(cache_dir)
|
||||
if not silent:
|
||||
click.secho("Space on disk: %s" % fs.humanize_file_size(reclaimed_space))
|
||||
return reclaimed_space
|
||||
|
||||
|
||||
def prune_core_packages(force=False, dry_run=False, silent=False):
|
||||
if not silent:
|
||||
click.secho("Prune unnecessary core packages:", bold=True)
|
||||
return _prune_packages(force, dry_run, silent, remove_unnecessary_core_packages)
|
||||
|
||||
|
||||
def prune_platform_packages(force=False, dry_run=False, silent=False):
|
||||
if not silent:
|
||||
click.secho("Prune unnecessary development platform packages:", bold=True)
|
||||
return _prune_packages(force, dry_run, silent, remove_unnecessary_platform_packages)
|
||||
|
||||
|
||||
def _prune_packages(force, dry_run, silent, handler):
|
||||
if not silent:
|
||||
click.echo("Calculating...")
|
||||
items = [
|
||||
(
|
||||
pkg,
|
||||
fs.calculate_folder_size(pkg.path),
|
||||
)
|
||||
for pkg in handler(dry_run=True)
|
||||
]
|
||||
items = sorted(items, key=itemgetter(1), reverse=True)
|
||||
reclaimed_space = sum([item[1] for item in items])
|
||||
if items and not silent:
|
||||
click.echo(
|
||||
tabulate(
|
||||
[
|
||||
(
|
||||
pkg.metadata.spec.humanize(),
|
||||
str(pkg.metadata.version),
|
||||
fs.humanize_file_size(size),
|
||||
)
|
||||
for (pkg, size) in items
|
||||
],
|
||||
headers=["Package", "Version", "Size"],
|
||||
)
|
||||
)
|
||||
if not dry_run:
|
||||
if not force:
|
||||
click.confirm("Do you want to continue?", abort=True)
|
||||
handler(dry_run=False)
|
||||
if not silent:
|
||||
click.secho("Space on disk: %s" % fs.humanize_file_size(reclaimed_space))
|
||||
return reclaimed_space
|
||||
|
||||
|
||||
def calculate_unnecessary_system_data():
|
||||
return (
|
||||
prune_cached_data(force=True, dry_run=True, silent=True)
|
||||
+ prune_core_packages(force=True, dry_run=True, silent=True)
|
||||
+ prune_platform_packages(force=True, dry_run=True, silent=True)
|
||||
)
|
||||
@@ -25,6 +25,7 @@ from tabulate import tabulate
|
||||
from platformio import app, exception, fs, util
|
||||
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
||||
from platformio.commands.test.native import NativeTestProcessor
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
@@ -128,9 +129,9 @@ def cli( # pylint: disable=redefined-builtin
|
||||
not environment and default_envs and envname not in default_envs,
|
||||
testname != "*"
|
||||
and patterns["filter"]
|
||||
and not any([fnmatch(testname, p) for p in patterns["filter"]]),
|
||||
and not any(fnmatch(testname, p) for p in patterns["filter"]),
|
||||
testname != "*"
|
||||
and any([fnmatch(testname, p) for p in patterns["ignore"]]),
|
||||
and any(fnmatch(testname, p) for p in patterns["ignore"]),
|
||||
]
|
||||
if any(skip_conditions):
|
||||
results.append({"env": envname, "test": testname})
|
||||
@@ -140,9 +141,9 @@ def cli( # pylint: disable=redefined-builtin
|
||||
print_processing_header(testname, envname)
|
||||
|
||||
cls = (
|
||||
NativeTestProcessor
|
||||
if config.get(section, "platform") == "native"
|
||||
else EmbeddedTestProcessor
|
||||
EmbeddedTestProcessor
|
||||
if is_embedded_platform(config.get(section, "platform"))
|
||||
else NativeTestProcessor
|
||||
)
|
||||
tp = cls(
|
||||
ctx,
|
||||
@@ -177,7 +178,7 @@ def cli( # pylint: disable=redefined-builtin
|
||||
if without_testing:
|
||||
return
|
||||
|
||||
print_testing_summary(results)
|
||||
print_testing_summary(results, verbose)
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
if command_failed:
|
||||
@@ -194,6 +195,12 @@ def get_test_names(test_dir):
|
||||
return names
|
||||
|
||||
|
||||
def is_embedded_platform(name):
|
||||
if not name:
|
||||
return False
|
||||
return PlatformFactory.new(name).is_embedded()
|
||||
|
||||
|
||||
def print_processing_header(test, env):
|
||||
click.echo(
|
||||
"Processing %s in %s environment"
|
||||
@@ -222,7 +229,7 @@ def print_processing_footer(result):
|
||||
)
|
||||
|
||||
|
||||
def print_testing_summary(results):
|
||||
def print_testing_summary(results, verbose=False):
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
@@ -236,6 +243,8 @@ def print_testing_summary(results):
|
||||
failed_nums += 1
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
if not verbose:
|
||||
continue
|
||||
status_str = "IGNORED"
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
|
||||
@@ -95,7 +95,7 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
||||
if isinstance(line, bytes):
|
||||
line = line.decode("utf8", "ignore")
|
||||
self.on_run_out(line)
|
||||
if all([l in line for l in ("Tests", "Failures", "Ignored")]):
|
||||
if all(l in line for l in ("Tests", "Failures", "Ignored")):
|
||||
break
|
||||
ser.close()
|
||||
return not self._run_failed
|
||||
|
||||
@@ -62,13 +62,14 @@ def ci_strings_are_equal(a, b):
|
||||
|
||||
|
||||
def ensure_python3(raise_exception=True):
|
||||
if not raise_exception or not PY2:
|
||||
return not PY2
|
||||
compatible = sys.version_info >= (3, 6)
|
||||
if not raise_exception or compatible:
|
||||
return compatible
|
||||
raise UserSideException(
|
||||
"Python 3.5 or later is required for this operation. \n"
|
||||
"Please install the latest Python 3 and reinstall PlatformIO Core using "
|
||||
"installation script:\n"
|
||||
"https://docs.platformio.org/page/core/installation.html"
|
||||
"Python 3.6 or later is required for this operation. \n"
|
||||
"Please check a migration guide:\n"
|
||||
"https://docs.platformio.org/en/latest/core/migration.html"
|
||||
"#drop-support-for-python-2-and-3-5"
|
||||
)
|
||||
|
||||
|
||||
@@ -77,6 +78,12 @@ if PY2:
|
||||
|
||||
string_types = (str, unicode)
|
||||
|
||||
def create_task(coro, name=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_running_loop():
|
||||
raise NotImplementedError
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (buffer, bytearray))
|
||||
|
||||
@@ -128,6 +135,12 @@ else:
|
||||
import importlib.util
|
||||
from glob import escape as glob_escape
|
||||
|
||||
if sys.version_info >= (3, 7):
|
||||
from asyncio import create_task, get_running_loop
|
||||
else:
|
||||
from asyncio import ensure_future as create_task
|
||||
from asyncio import get_event_loop as get_running_loop
|
||||
|
||||
string_types = (str,)
|
||||
|
||||
def is_bytes(x):
|
||||
|
||||
@@ -1,22 +1,12 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
%
|
||||
clang
|
||||
|
||||
% if cc_stds:
|
||||
{{"%c"}} -std=c{{ cc_stds[-1] }}
|
||||
% end
|
||||
% if cxx_stds:
|
||||
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
|
||||
% end
|
||||
{{"%c"}} {{ !cc_flags }}
|
||||
{{"%cpp"}} {{ !cxx_flags }}
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ include }}
|
||||
-I{{ !include }}
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
-D{{ define }}
|
||||
-D{{ !define }}
|
||||
% end
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
% for include in filter_includes(includes):
|
||||
-I{{include}}
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
@@ -1,3 +1,13 @@
|
||||
% import re
|
||||
%
|
||||
% cpp_standards_remap = {
|
||||
% "0x": "11",
|
||||
% "1y": "14",
|
||||
% "1z": "17",
|
||||
% "2a": "20",
|
||||
% "2b": "23"
|
||||
% }
|
||||
|
||||
win32 {
|
||||
HOMEDIR += $$(USERPROFILE)
|
||||
}
|
||||
@@ -27,3 +37,9 @@ HEADERS += {{file}}
|
||||
SOURCES += {{file}}
|
||||
% end
|
||||
% end
|
||||
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% if cxx_stds:
|
||||
CONFIG += c++{{ cpp_standards_remap.get(cxx_stds[-1], cxx_stds[-1]) }}
|
||||
% end
|
||||
|
||||
12
platformio/ide/tpls/sublimetext/.ccls.tpl
Normal file
12
platformio/ide/tpls/sublimetext/.ccls.tpl
Normal file
@@ -0,0 +1,12 @@
|
||||
clang
|
||||
|
||||
{{"%c"}} {{ !cc_flags }}
|
||||
{{"%cpp"}} {{ !cxx_flags }}
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ !include }}
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
-D{{ !define }}
|
||||
% end
|
||||
@@ -5,9 +5,10 @@
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "PlatformIO",
|
||||
"variants":
|
||||
[
|
||||
@@ -15,78 +16,73 @@
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Build"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"upload"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Upload"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"clean"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Clean"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"test"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Test"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"program"
|
||||
],
|
||||
"name": "Upload using Programmer"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"uploadfs"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Upload SPIFFS image"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"update"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Update platforms and libraries"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"upgrade"
|
||||
],
|
||||
"name": "Upgrade PlatformIO Core"
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
%
|
||||
clang
|
||||
|
||||
% if cc_stds:
|
||||
{{"%c"}} -std=c{{ cc_stds[-1] }}
|
||||
% end
|
||||
% if cxx_stds:
|
||||
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
|
||||
% end
|
||||
{{"%c"}} {{ !cc_flags }}
|
||||
{{"%cpp"}} {{ !cxx_flags }}
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ include }}
|
||||
-I{{ !include }}
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
% for include in filter_includes(includes):
|
||||
-I"{{include}}"
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
@@ -1,9 +0,0 @@
|
||||
% _defines = " ".join(["-D%s" % d.replace(" ", "\\\\ ") for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(filter_includes(includes)) }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
@@ -83,20 +83,15 @@
|
||||
% forced_includes = _find_forced_includes(
|
||||
% filter_args(cc_m_flags, ["-include", "-imacros"]), cleaned_includes)
|
||||
%
|
||||
//
|
||||
// !!! WARNING !!! AUTO-GENERATED FILE!
|
||||
// PLEASE DO NOT MODIFY IT AND USE "platformio.ini":
|
||||
// https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
//
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "!!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags"
|
||||
},
|
||||
{
|
||||
% if systype == "windows":
|
||||
"name": "Win32",
|
||||
% elif systype == "darwin":
|
||||
"name": "Mac",
|
||||
"macFrameworkPath": [],
|
||||
% else:
|
||||
"name": "Linux",
|
||||
% end
|
||||
"name": "PlatformIO",
|
||||
"includePath": [
|
||||
% for include in cleaned_includes:
|
||||
"{{ include }}",
|
||||
@@ -118,7 +113,6 @@
|
||||
% end
|
||||
""
|
||||
],
|
||||
"intelliSenseMode": "clang-x64",
|
||||
% if cc_stds:
|
||||
"cStandard": "c{{ cc_stds[-1] }}",
|
||||
% end
|
||||
|
||||
126
platformio/ide/tpls/vscode/.vscode/launch.json.tpl
vendored
126
platformio/ide/tpls/vscode/.vscode/launch.json.tpl
vendored
@@ -1,44 +1,96 @@
|
||||
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
|
||||
|
||||
// PIO Unified Debugger
|
||||
//
|
||||
// Documentation: https://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
|
||||
% from os.path import dirname, join
|
||||
% import codecs
|
||||
% import json
|
||||
% import os
|
||||
%
|
||||
% def _escape(text):
|
||||
% return text.replace('"', '\"')
|
||||
% end
|
||||
%
|
||||
% def _escape_path(path):
|
||||
% return path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PIO Debug",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% def get_pio_configurations():
|
||||
% predebug = {
|
||||
% "type": "platformio-debug",
|
||||
% "request": "launch",
|
||||
% "name": "PIO Debug (skip Pre-Debug)",
|
||||
% "executable": _escape_path(prog_path),
|
||||
% "projectEnvName": env_name,
|
||||
% "toolchainBinDir": _escape_path(os.path.dirname(gdb_path)),
|
||||
% "internalConsoleOptions": "openOnSessionStart",
|
||||
% }
|
||||
%
|
||||
% if svd_path:
|
||||
% predebug["svdPath"] = _escape_path(svd_path)
|
||||
% end
|
||||
% debug = predebug.copy()
|
||||
% debug["name"] = "PIO Debug"
|
||||
% debug["preLaunchTask"] = {
|
||||
% "type": "PlatformIO",
|
||||
% "task": ("Pre-Debug (%s)" % env_name) if len(config.envs()) > 1 else "Pre-Debug",
|
||||
% }
|
||||
% return [debug, predebug]
|
||||
% end
|
||||
"preLaunchTask": {
|
||||
"type": "PlatformIO",
|
||||
"task": "Pre-Debug"
|
||||
},
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PIO Debug (skip Pre-Debug)",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
%
|
||||
% def _remove_comments(lines):
|
||||
% data = ""
|
||||
% for line in lines:
|
||||
% line = line.strip()
|
||||
% if not line.startswith("//"):
|
||||
% data += line
|
||||
% end
|
||||
% end
|
||||
% return data
|
||||
% end
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
}
|
||||
]
|
||||
}
|
||||
%
|
||||
% def _contains_external_configurations(launch_config):
|
||||
% return any(
|
||||
% c.get("type", "") != "platformio-debug"
|
||||
% for c in launch_config.get("configurations", [])
|
||||
% )
|
||||
% end
|
||||
%
|
||||
% def _remove_pio_configurations(launch_config):
|
||||
% if "configurations" not in launch_config:
|
||||
% return launch_config
|
||||
% end
|
||||
%
|
||||
% external_configurations = [
|
||||
% config
|
||||
% for config in launch_config["configurations"]
|
||||
% if config.get("type", "") != "platformio-debug"
|
||||
% ]
|
||||
%
|
||||
% launch_config["configurations"] = external_configurations
|
||||
% return launch_config
|
||||
% end
|
||||
%
|
||||
% def get_launch_configuration():
|
||||
% launch_config = {"version": "0.2.0", "configurations": []}
|
||||
% launch_file = os.path.join(project_dir, ".vscode", "launch.json")
|
||||
% if os.path.isfile(launch_file):
|
||||
% with codecs.open(launch_file, "r", encoding="utf8") as fp:
|
||||
% launch_data = _remove_comments(fp.readlines())
|
||||
% try:
|
||||
% prev_config = json.loads(launch_data)
|
||||
% if _contains_external_configurations(prev_config):
|
||||
% launch_config = _remove_pio_configurations(prev_config)
|
||||
% end
|
||||
% except:
|
||||
% pass
|
||||
% end
|
||||
% end
|
||||
% end
|
||||
% launch_config["configurations"].extend(get_pio_configurations())
|
||||
% return launch_config
|
||||
% end
|
||||
%
|
||||
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
|
||||
//
|
||||
// PIO Unified Debugger
|
||||
//
|
||||
// Documentation: https://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
|
||||
{{ json.dumps(get_launch_configuration(), indent=4, ensure_ascii=False) }}
|
||||
|
||||
@@ -26,7 +26,9 @@ from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib.command import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.commands.system.prune import calculate_unnecessary_system_data
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
@@ -38,13 +40,16 @@ from platformio.proc import is_container
|
||||
|
||||
|
||||
def on_platformio_start(ctx, force, caller):
|
||||
ensure_python3(raise_exception=True)
|
||||
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
app.set_session_var("force_option", force)
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
if not PlatformioCLI.in_silence():
|
||||
after_upgrade(ctx)
|
||||
if PlatformioCLI.in_silence():
|
||||
return
|
||||
after_upgrade(ctx)
|
||||
|
||||
|
||||
def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
@@ -55,6 +60,7 @@ def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
check_platformio_upgrade()
|
||||
check_internal_updates(ctx, "platforms")
|
||||
check_internal_updates(ctx, "libraries")
|
||||
check_prune_system()
|
||||
except (
|
||||
http.HTTPClientError,
|
||||
http.InternetIsOffline,
|
||||
@@ -73,17 +79,20 @@ def on_platformio_exception(e):
|
||||
|
||||
def set_caller(caller=None):
|
||||
caller = caller or getenv("PLATFORMIO_CALLER")
|
||||
if not caller:
|
||||
if getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
app.set_session_var("caller_id", caller)
|
||||
if caller:
|
||||
return app.set_session_var("caller_id", caller)
|
||||
if getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif getenv("GITPOD_INSTANCE_ID") or getenv("GITPOD_WORKSPACE_URL"):
|
||||
caller = "gitpod"
|
||||
elif is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
return app.set_session_var("caller_id", caller)
|
||||
|
||||
|
||||
class Upgrader(object):
|
||||
@@ -326,3 +335,31 @@ def check_internal_updates(ctx, what): # pylint: disable=too-many-branches
|
||||
|
||||
click.echo("*" * terminal_width)
|
||||
click.echo("")
|
||||
|
||||
|
||||
def check_prune_system():
|
||||
last_check = app.get_state_item("last_check", {})
|
||||
interval = 30 * 3600 * 24 # 1 time per month
|
||||
if (time() - interval) < last_check.get("prune_system", 0):
|
||||
return
|
||||
|
||||
last_check["prune_system"] = int(time())
|
||||
app.set_state_item("last_check", last_check)
|
||||
threshold_mb = int(app.get_setting("check_prune_system_threshold") or 0)
|
||||
if threshold_mb <= 0:
|
||||
return
|
||||
|
||||
unnecessary_size = calculate_unnecessary_system_data()
|
||||
if (unnecessary_size / 1024) < threshold_mb:
|
||||
return
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo()
|
||||
click.echo("*" * terminal_width)
|
||||
click.secho(
|
||||
"We found %s of unnecessary PlatformIO system data (temporary files, "
|
||||
"unnecessary packages, etc.).\nUse `pio system prune --dry-run` to list "
|
||||
"them or `pio system prune` to save disk space."
|
||||
% fs.humanize_file_size(unnecessary_size),
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
@@ -153,7 +153,7 @@ class PackageManagerInstallMixin(object):
|
||||
finally:
|
||||
if os.path.isdir(tmp_dir):
|
||||
try:
|
||||
shutil.rmtree(tmp_dir)
|
||||
fs.rmtree(tmp_dir)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
|
||||
@@ -207,9 +207,9 @@ class PackageManageRegistryMixin(object):
|
||||
time.sleep(1)
|
||||
return (None, None)
|
||||
|
||||
def pick_best_registry_version(self, versions, spec=None):
|
||||
def filter_incompatible_registry_versions(self, versions, spec=None):
|
||||
assert not spec or isinstance(spec, PackageSpec)
|
||||
best = None
|
||||
result = []
|
||||
for version in versions:
|
||||
semver = cast_version_to_semver(version["name"])
|
||||
if spec and spec.requirements and semver not in spec.requirements:
|
||||
@@ -218,6 +218,13 @@ class PackageManageRegistryMixin(object):
|
||||
self.is_system_compatible(f.get("system")) for f in version["files"]
|
||||
):
|
||||
continue
|
||||
result.append(version)
|
||||
return result
|
||||
|
||||
def pick_best_registry_version(self, versions, spec=None):
|
||||
best = None
|
||||
for version in self.filter_incompatible_registry_versions(versions, spec):
|
||||
semver = cast_version_to_semver(version["name"])
|
||||
if not best or (semver > cast_version_to_semver(best["name"])):
|
||||
best = version
|
||||
return best
|
||||
|
||||
@@ -26,7 +26,10 @@ class PackageManagerUpdateMixin(object):
|
||||
def outdated(self, pkg, spec=None):
|
||||
assert isinstance(pkg, PackageItem)
|
||||
assert not spec or isinstance(spec, PackageSpec)
|
||||
assert os.path.isdir(pkg.path) and pkg.metadata
|
||||
assert pkg.metadata
|
||||
|
||||
if not os.path.isdir(pkg.path):
|
||||
return PackageOutdatedResult(current=pkg.metadata.version)
|
||||
|
||||
# skip detached package to a specific version
|
||||
detached_conditions = [
|
||||
@@ -104,7 +107,7 @@ class PackageManagerUpdateMixin(object):
|
||||
|
||||
outdated = self.outdated(pkg, to_spec)
|
||||
if not silent:
|
||||
self.print_outdated_state(outdated, show_incompatible)
|
||||
self.print_outdated_state(outdated, only_check, show_incompatible)
|
||||
|
||||
if only_check or not outdated.is_outdated(allow_incompatible=False):
|
||||
return pkg
|
||||
@@ -116,24 +119,39 @@ class PackageManagerUpdateMixin(object):
|
||||
self.unlock()
|
||||
|
||||
@staticmethod
|
||||
def print_outdated_state(outdated, show_incompatible=True):
|
||||
def print_outdated_state(outdated, only_check, show_incompatible):
|
||||
if outdated.detached:
|
||||
return click.echo("[%s]" % (click.style("Detached", fg="yellow")))
|
||||
|
||||
if (
|
||||
not outdated.latest
|
||||
or outdated.current == outdated.latest
|
||||
or (not show_incompatible and outdated.current == outdated.wanted)
|
||||
):
|
||||
return click.echo("[%s]" % (click.style("Up-to-date", fg="green")))
|
||||
|
||||
if outdated.wanted and outdated.current == outdated.wanted:
|
||||
return click.echo(
|
||||
"[%s]" % (click.style("Incompatible %s" % outdated.latest, fg="yellow"))
|
||||
)
|
||||
|
||||
if only_check:
|
||||
return click.echo(
|
||||
"[%s]"
|
||||
% (
|
||||
click.style(
|
||||
"Outdated %s" % str(outdated.wanted or outdated.latest),
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return click.echo(
|
||||
"[%s]"
|
||||
% (
|
||||
click.style(
|
||||
"Outdated %s" % str(outdated.wanted or outdated.latest), fg="red"
|
||||
"Updating to %s" % str(outdated.wanted or outdated.latest),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -27,7 +27,18 @@ from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.proc import get_pythonexe_path
|
||||
|
||||
|
||||
def get_core_package_dir(name):
|
||||
def get_installed_core_packages():
|
||||
result = []
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
result.append(pkg)
|
||||
return result
|
||||
|
||||
|
||||
def get_core_package_dir(name, auto_install=True):
|
||||
if name not in __core_packages__:
|
||||
raise exception.PlatformioException("Please upgrade PlatformIO Core")
|
||||
pm = ToolPackageManager()
|
||||
@@ -37,8 +48,10 @@ def get_core_package_dir(name):
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
return pkg.path
|
||||
if not auto_install:
|
||||
return None
|
||||
assert pm.install(spec)
|
||||
_remove_unnecessary_packages()
|
||||
remove_unnecessary_core_packages()
|
||||
return pm.get_package(spec).path
|
||||
|
||||
|
||||
@@ -52,24 +65,40 @@ def update_core_packages(only_check=False, silent=False):
|
||||
if not silent or pm.outdated(pkg, spec).is_outdated():
|
||||
pm.update(pkg, spec, only_check=only_check)
|
||||
if not only_check:
|
||||
_remove_unnecessary_packages()
|
||||
remove_unnecessary_core_packages()
|
||||
return True
|
||||
|
||||
|
||||
def _remove_unnecessary_packages():
|
||||
def remove_unnecessary_core_packages(dry_run=False):
|
||||
candidates = []
|
||||
pm = ToolPackageManager()
|
||||
best_pkg_versions = {}
|
||||
|
||||
for name, requirements in __core_packages__.items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
best_pkg_versions[pkg.metadata.name] = pkg.metadata.version
|
||||
|
||||
for pkg in pm.get_installed():
|
||||
if pkg.metadata.name not in best_pkg_versions:
|
||||
continue
|
||||
if pkg.metadata.version != best_pkg_versions[pkg.metadata.name]:
|
||||
pm.uninstall(pkg)
|
||||
skip_conds = [
|
||||
os.path.isfile(os.path.join(pkg.path, ".piokeep")),
|
||||
pkg.metadata.spec.owner != "platformio",
|
||||
pkg.metadata.name not in best_pkg_versions,
|
||||
pkg.metadata.name in best_pkg_versions
|
||||
and pkg.metadata.version == best_pkg_versions[pkg.metadata.name],
|
||||
]
|
||||
if not any(skip_conds):
|
||||
candidates.append(pkg)
|
||||
|
||||
if dry_run:
|
||||
return candidates
|
||||
|
||||
for pkg in candidates:
|
||||
pm.uninstall(pkg)
|
||||
|
||||
return candidates
|
||||
|
||||
|
||||
def inject_contrib_pysite(verify_openssl=False):
|
||||
@@ -108,6 +137,9 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
|
||||
fs.rmtree(target_dir)
|
||||
os.makedirs(target_dir)
|
||||
|
||||
# issue 3865: There is no "rustup" in "Raspbian GNU/Linux 10 (buster)"
|
||||
os.environ["CRYPTOGRAPHY_DONT_BUILD_RUST"] = "1"
|
||||
|
||||
# build dependencies
|
||||
args = [
|
||||
get_pythonexe_path(),
|
||||
@@ -160,7 +192,6 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
|
||||
pkg.dump_meta()
|
||||
|
||||
# remove unused files
|
||||
shutil.rmtree(os.path.join(target_dir, "autobahn", "xbr", "contracts"))
|
||||
for root, dirs, files in os.walk(target_dir):
|
||||
for t in ("_test", "test", "tests"):
|
||||
if t in dirs:
|
||||
@@ -169,19 +200,6 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
|
||||
if name.endswith((".chm", ".pyc")):
|
||||
os.remove(os.path.join(root, name))
|
||||
|
||||
# apply patches
|
||||
with open(
|
||||
os.path.join(target_dir, "autobahn", "twisted", "__init__.py"), "r+"
|
||||
) as fp:
|
||||
contents = fp.read()
|
||||
contents = contents.replace(
|
||||
"from autobahn.twisted.wamp import ApplicationSession",
|
||||
"# from autobahn.twisted.wamp import ApplicationSession",
|
||||
)
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(contents)
|
||||
|
||||
return target_dir
|
||||
|
||||
|
||||
@@ -192,8 +210,6 @@ def get_contrib_pysite_deps():
|
||||
twisted_version = "19.10.0" if PY2 else "20.3.0"
|
||||
result = [
|
||||
"twisted == %s" % twisted_version,
|
||||
"autobahn == %s" % ("19.11.2" if PY2 else "20.7.1"),
|
||||
"json-rpc == 1.13.0",
|
||||
]
|
||||
|
||||
# twisted[tls], see setup.py for %twisted_version%
|
||||
@@ -201,14 +217,6 @@ def get_contrib_pysite_deps():
|
||||
["pyopenssl >= 16.0.0", "service_identity >= 18.1.0", "idna >= 0.6, != 2.3"]
|
||||
)
|
||||
|
||||
# zeroconf
|
||||
if PY2:
|
||||
result.append(
|
||||
"https://github.com/ivankravets/python-zeroconf/" "archive/pio-py27.zip"
|
||||
)
|
||||
else:
|
||||
result.append("zeroconf == 0.26.0")
|
||||
|
||||
if "windows" in sys_type:
|
||||
result.append("pypiwin32 == 223")
|
||||
# workaround for twisted wheels
|
||||
|
||||
@@ -112,22 +112,23 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
)
|
||||
|
||||
def _install_dependency(self, dependency, silent=False):
|
||||
if set(["name", "version"]) <= set(dependency.keys()) and any(
|
||||
c in dependency["version"] for c in (":", "/", "@")
|
||||
):
|
||||
spec = PackageSpec("%s=%s" % (dependency["name"], dependency["version"]))
|
||||
else:
|
||||
spec = PackageSpec(
|
||||
owner=dependency.get("owner"),
|
||||
name=dependency.get("name"),
|
||||
requirements=dependency.get("version"),
|
||||
)
|
||||
spec = PackageSpec(
|
||||
owner=dependency.get("owner"),
|
||||
name=dependency.get("name"),
|
||||
requirements=dependency.get("version"),
|
||||
)
|
||||
search_filters = {
|
||||
key: value
|
||||
for key, value in dependency.items()
|
||||
if key in ("authors", "platforms", "frameworks")
|
||||
}
|
||||
return self._install(spec, search_filters=search_filters or None, silent=silent)
|
||||
try:
|
||||
return self._install(
|
||||
spec, search_filters=search_filters or None, silent=silent
|
||||
)
|
||||
except UnknownPackageError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def uninstall_dependencies(self, pkg, silent=False):
|
||||
assert isinstance(pkg, PackageItem)
|
||||
@@ -137,11 +138,12 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
if not silent:
|
||||
self.print_message("Removing dependencies...", fg="yellow")
|
||||
for dependency in manifest.get("dependencies"):
|
||||
pkg = self.get_package(
|
||||
PackageSpec(
|
||||
name=dependency.get("name"), requirements=dependency.get("version")
|
||||
)
|
||||
spec = PackageSpec(
|
||||
owner=dependency.get("owner"),
|
||||
name=dependency.get("name"),
|
||||
requirements=dependency.get("version"),
|
||||
)
|
||||
pkg = self.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
self._uninstall(pkg, silent=silent)
|
||||
|
||||
@@ -12,10 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio import util
|
||||
from platformio.clients.http import HTTPClientError, InternetIsOffline
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.manager.core import get_installed_core_packages
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageType
|
||||
from platformio.platform.exception import IncompatiblePlatform, UnknownBoard
|
||||
@@ -69,7 +72,6 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
)
|
||||
p.install_python_packages()
|
||||
p.on_installed()
|
||||
self.cleanup_packages(list(p.packages))
|
||||
return pkg
|
||||
|
||||
def uninstall(self, spec, silent=False, skip_dependencies=False):
|
||||
@@ -83,7 +85,6 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
if not skip_dependencies:
|
||||
p.uninstall_python_packages()
|
||||
p.on_uninstalled()
|
||||
self.cleanup_packages(list(p.packages))
|
||||
return pkg
|
||||
|
||||
def update( # pylint: disable=arguments-differ, too-many-arguments
|
||||
@@ -118,7 +119,6 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
)
|
||||
|
||||
p.update_packages(only_check)
|
||||
self.cleanup_packages(list(p.packages))
|
||||
|
||||
if missed_pkgs:
|
||||
p.install_packages(
|
||||
@@ -127,32 +127,6 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
|
||||
return new_pkg or pkg
|
||||
|
||||
def cleanup_packages(self, names):
|
||||
self.memcache_reset()
|
||||
deppkgs = {}
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for pkg in p.get_installed_packages():
|
||||
if pkg.metadata.name not in deppkgs:
|
||||
deppkgs[pkg.metadata.name] = set()
|
||||
deppkgs[pkg.metadata.name].add(pkg.metadata.version)
|
||||
|
||||
pm = ToolPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
if pkg.metadata.name not in names:
|
||||
continue
|
||||
if (
|
||||
pkg.metadata.name not in deppkgs
|
||||
or pkg.metadata.version not in deppkgs[pkg.metadata.name]
|
||||
):
|
||||
try:
|
||||
pm.uninstall(pkg.metadata.spec)
|
||||
except UnknownPackageError:
|
||||
pass
|
||||
|
||||
self.memcache_reset()
|
||||
return True
|
||||
|
||||
@util.memoized(expire="5s")
|
||||
def get_installed_boards(self):
|
||||
boards = []
|
||||
@@ -193,3 +167,37 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
):
|
||||
return manifest
|
||||
raise UnknownBoard(id_)
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def remove_unnecessary_platform_packages(dry_run=False):
|
||||
candidates = []
|
||||
required = set()
|
||||
core_packages = get_installed_core_packages()
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for pkg in p.get_installed_packages(with_optional=True):
|
||||
required.add(pkg)
|
||||
|
||||
pm = ToolPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
skip_conds = [
|
||||
pkg.metadata.spec.url,
|
||||
os.path.isfile(os.path.join(pkg.path, ".piokeep")),
|
||||
pkg in required,
|
||||
pkg in core_packages,
|
||||
]
|
||||
if not any(skip_conds):
|
||||
candidates.append(pkg)
|
||||
|
||||
if dry_run:
|
||||
return candidates
|
||||
|
||||
for pkg in candidates:
|
||||
pm.uninstall(pkg)
|
||||
|
||||
return candidates
|
||||
|
||||
@@ -568,6 +568,7 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
|
||||
continue
|
||||
found = True
|
||||
item["maintainer"] = True
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if not item.get("email") and email and "@" in email:
|
||||
item["email"] = email
|
||||
if not found:
|
||||
|
||||
@@ -141,9 +141,10 @@ class ExampleSchema(StrictSchema):
|
||||
name = fields.Str(
|
||||
required=True,
|
||||
validate=[
|
||||
validate.Length(min=1, max=100),
|
||||
validate.Length(min=1, max=255),
|
||||
validate.Regexp(
|
||||
r"^[a-zA-Z\d\-\_/]+$", error="Only [a-zA-Z0-9-_/] chars are allowed"
|
||||
r"^[a-zA-Z\d\-\_/\. ]+$",
|
||||
error="Only [a-zA-Z0-9-_/. ] chars are allowed",
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -252,5 +253,9 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
spdx_data_url = "https://dl.bintray.com/platformio/dl-misc/spdx-licenses-3.json"
|
||||
version = "3.12"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||
"v%s/json/licenses.json" % version
|
||||
)
|
||||
return json.loads(fetch_remote_content(spdx_data_url))
|
||||
|
||||
@@ -107,16 +107,21 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments
|
||||
self, raw=None, owner=None, id=None, name=None, requirements=None, url=None
|
||||
):
|
||||
self._requirements = None
|
||||
self.owner = owner
|
||||
self.id = id
|
||||
self.name = name
|
||||
self._requirements = None
|
||||
self.url = url
|
||||
self.raw = raw
|
||||
if requirements:
|
||||
self.requirements = requirements
|
||||
try:
|
||||
self.requirements = requirements
|
||||
except ValueError as exc:
|
||||
if not self.name or self.url or self.raw:
|
||||
raise exc
|
||||
self.raw = "%s=%s" % (self.name, requirements)
|
||||
self._name_is_custom = False
|
||||
self._parse(raw)
|
||||
self._parse(self.raw)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all(
|
||||
@@ -405,7 +410,12 @@ class PackageItem(object):
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all([self.path == other.path, self.metadata == other.metadata])
|
||||
if not self.path or not other.path:
|
||||
return self.path == other.path
|
||||
return os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(os.path.realpath(self.path))
|
||||
|
||||
def exists(self):
|
||||
return os.path.isdir(self.path)
|
||||
|
||||
@@ -20,8 +20,8 @@ import tarfile
|
||||
import tempfile
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.exception import PackageException
|
||||
from platformio.compat import WINDOWS, ensure_python3
|
||||
from platformio.package.exception import PackageException, UserSideException
|
||||
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema
|
||||
from platformio.package.meta import PackageItem
|
||||
@@ -46,6 +46,8 @@ class PackagePacker(object):
|
||||
".git/",
|
||||
".hg/",
|
||||
".svn/",
|
||||
]
|
||||
EXCLUDE_EXTRA = [
|
||||
# Tests
|
||||
"tests?",
|
||||
# Docs
|
||||
@@ -115,12 +117,17 @@ class PackagePacker(object):
|
||||
|
||||
# if zip/tar.gz -> unpack to tmp dir
|
||||
if not os.path.isdir(src):
|
||||
if WINDOWS:
|
||||
raise UserSideException(
|
||||
"Packaging from an archive does not work on Windows OS. Please "
|
||||
"extract data from `%s` manually and pack a folder instead"
|
||||
% src
|
||||
)
|
||||
with FileUnpacker(src) as fu:
|
||||
assert fu.unpack(tmp_dir, silent=True)
|
||||
src = tmp_dir
|
||||
|
||||
src = self.find_source_root(src)
|
||||
|
||||
manifest = self.load_manifest(src)
|
||||
filename = self.get_archive_name(
|
||||
manifest["name"],
|
||||
@@ -188,7 +195,7 @@ class PackagePacker(object):
|
||||
return dst
|
||||
|
||||
def compute_src_filters(self, src, include, exclude):
|
||||
exclude_default = self.EXCLUDE_DEFAULT[:]
|
||||
exclude_extra = self.EXCLUDE_EXTRA[:]
|
||||
# extend with library extra filters
|
||||
if any(
|
||||
os.path.isfile(os.path.join(src, name))
|
||||
@@ -198,11 +205,15 @@ class PackagePacker(object):
|
||||
ManifestFileType.MODULE_JSON,
|
||||
)
|
||||
):
|
||||
exclude_default.extend(self.EXCLUDE_LIBRARY_EXTRA)
|
||||
exclude_extra.extend(self.EXCLUDE_LIBRARY_EXTRA)
|
||||
|
||||
result = ["+<%s>" % p for p in include or ["*", ".*"]]
|
||||
result += ["-<%s>" % p for p in self.EXCLUDE_DEFAULT]
|
||||
# exclude items declared in manifest
|
||||
result += ["-<%s>" % p for p in exclude or []]
|
||||
result += ["-<%s>" % p for p in exclude_default]
|
||||
# apply extra excludes if no custom "export" field in manifest
|
||||
if not include and not exclude:
|
||||
result += ["-<%s>" % p for p in exclude_extra]
|
||||
# automatically include manifests
|
||||
result += ["+<%s>" % p for p in self.INCLUDE_DEFAULT]
|
||||
return result
|
||||
|
||||
@@ -134,27 +134,28 @@ class FileUnpacker(object):
|
||||
self.path = path
|
||||
self._archiver = None
|
||||
|
||||
def _init_archiver(self):
|
||||
def __enter__(self):
|
||||
self._archiver = self.new_archiver(self.path)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._archiver:
|
||||
self._archiver.close()
|
||||
|
||||
@staticmethod
|
||||
def new_archiver(path):
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
with open(self.path, "rb") as fp:
|
||||
with open(path, "rb") as fp:
|
||||
data = fp.read(magic_len)
|
||||
for magic, archiver in magic_map.items():
|
||||
if data.startswith(magic):
|
||||
return archiver(self.path)
|
||||
raise PackageException("Unknown archive type '%s'" % self.path)
|
||||
|
||||
def __enter__(self):
|
||||
self._archiver = self._init_archiver()
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._archiver:
|
||||
self._archiver.close()
|
||||
return archiver(path)
|
||||
raise PackageException("Unknown archive type '%s'" % path)
|
||||
|
||||
def unpack(
|
||||
self, dest_dir=None, with_progress=True, check_unpacked=True, silent=False
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
from os.path import join
|
||||
from subprocess import CalledProcessError, check_call
|
||||
from sys import modules
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from platformio import proc
|
||||
from platformio.package.exception import (
|
||||
PackageException,
|
||||
PlatformioException,
|
||||
UserSideException,
|
||||
)
|
||||
from platformio.proc import exec_command
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
@@ -51,7 +51,7 @@ class VCSClientFactory(object):
|
||||
if not type_:
|
||||
raise VCSBaseException("VCS: Unknown repository type %s" % remote_url)
|
||||
try:
|
||||
obj = getattr(modules[__name__], "%sClient" % type_.title())(
|
||||
obj = getattr(sys.modules[__name__], "%sClient" % type_.title())(
|
||||
src_dir, remote_url, tag, silent
|
||||
)
|
||||
assert isinstance(obj, VCSClientBase)
|
||||
@@ -86,7 +86,7 @@ class VCSClientBase(object):
|
||||
|
||||
@property
|
||||
def storage_dir(self):
|
||||
return join(self.src_dir, "." + self.command)
|
||||
return os.path.join(self.src_dir, "." + self.command)
|
||||
|
||||
def export(self):
|
||||
raise NotImplementedError
|
||||
@@ -108,17 +108,19 @@ class VCSClientBase(object):
|
||||
args = [self.command] + args
|
||||
if "cwd" not in kwargs:
|
||||
kwargs["cwd"] = self.src_dir
|
||||
if "env" not in kwargs:
|
||||
kwargs["env"] = os.environ
|
||||
try:
|
||||
check_call(args, **kwargs)
|
||||
subprocess.check_call(args, **kwargs)
|
||||
return True
|
||||
except CalledProcessError as e:
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise VCSBaseException("VCS: Could not process command %s" % e.cmd)
|
||||
|
||||
def get_cmd_output(self, args, **kwargs):
|
||||
args = [self.command] + args
|
||||
if "cwd" not in kwargs:
|
||||
kwargs["cwd"] = self.src_dir
|
||||
result = exec_command(args, **kwargs)
|
||||
result = proc.exec_command(args, **kwargs)
|
||||
if result["returncode"] == 0:
|
||||
return result["out"].strip()
|
||||
raise VCSBaseException(
|
||||
@@ -129,6 +131,28 @@ class VCSClientBase(object):
|
||||
class GitClient(VCSClientBase):
|
||||
|
||||
command = "git"
|
||||
_configured = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.configure()
|
||||
super(GitClient, self).__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def configure(cls):
|
||||
if cls._configured:
|
||||
return True
|
||||
cls._configured = True
|
||||
try:
|
||||
result = proc.exec_command([cls.command, "--exec-path"])
|
||||
if result["returncode"] != 0:
|
||||
return False
|
||||
path = result["out"].strip()
|
||||
if path:
|
||||
proc.append_env_path("PATH", path)
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def check_client(self):
|
||||
try:
|
||||
@@ -173,7 +197,7 @@ class GitClient(VCSClientBase):
|
||||
if self.tag:
|
||||
args += ["--branch", self.tag]
|
||||
args += [self.remote_url, self.src_dir]
|
||||
assert self.run_cmd(args)
|
||||
assert self.run_cmd(args, cwd=os.getcwd())
|
||||
if is_commit:
|
||||
assert self.run_cmd(["reset", "--hard", self.tag])
|
||||
return self.run_cmd(
|
||||
|
||||
@@ -17,18 +17,17 @@ from platformio.package.meta import PackageSpec
|
||||
|
||||
|
||||
class PlatformPackagesMixin(object):
|
||||
def get_package_spec(self, name):
|
||||
version = self.packages[name].get("version", "")
|
||||
if any(c in version for c in (":", "/", "@")):
|
||||
return PackageSpec("%s=%s" % (name, version))
|
||||
def get_package_spec(self, name, version=None):
|
||||
return PackageSpec(
|
||||
owner=self.packages[name].get("owner"), name=name, requirements=version
|
||||
owner=self.packages[name].get("owner"),
|
||||
name=name,
|
||||
requirements=version or self.packages[name].get("version"),
|
||||
)
|
||||
|
||||
def get_package(self, name):
|
||||
def get_package(self, name, spec=None):
|
||||
if not name:
|
||||
return None
|
||||
return self.pm.get_package(self.get_package_spec(name))
|
||||
return self.pm.get_package(spec or self.get_package_spec(name))
|
||||
|
||||
def get_package_dir(self, name):
|
||||
pkg = self.get_package(name)
|
||||
@@ -38,12 +37,18 @@ class PlatformPackagesMixin(object):
|
||||
pkg = self.get_package(name)
|
||||
return str(pkg.metadata.version) if pkg else None
|
||||
|
||||
def get_installed_packages(self):
|
||||
def get_installed_packages(self, with_optional=False):
|
||||
result = []
|
||||
for name in self.packages:
|
||||
pkg = self.get_package(name)
|
||||
if pkg:
|
||||
result.append(pkg)
|
||||
for name, options in self.packages.items():
|
||||
versions = [options.get("version")]
|
||||
if with_optional:
|
||||
versions.extend(options.get("optionalVersions", []))
|
||||
for version in versions:
|
||||
if not version:
|
||||
continue
|
||||
pkg = self.get_package(name, self.get_package_spec(name, version))
|
||||
if pkg:
|
||||
result.append(pkg)
|
||||
return result
|
||||
|
||||
def dump_used_packages(self):
|
||||
|
||||
@@ -134,7 +134,9 @@ class PlatformRunMixin(object):
|
||||
args,
|
||||
stdout=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stdout_line,
|
||||
data_callback=lambda data: _write_and_flush(sys.stdout, data),
|
||||
data_callback=lambda data: None
|
||||
if self.silent
|
||||
else _write_and_flush(sys.stdout, data),
|
||||
),
|
||||
stderr=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stderr_line,
|
||||
|
||||
@@ -252,6 +252,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
click.secho(
|
||||
"Could not install Python packages -> %s" % e, fg="red", err=True
|
||||
)
|
||||
return None
|
||||
|
||||
def uninstall_python_packages(self):
|
||||
if not self.python_packages:
|
||||
|
||||
@@ -37,6 +37,8 @@ class PlatformFactory(object):
|
||||
|
||||
@classmethod
|
||||
def new(cls, pkg_or_spec):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
|
||||
platform_dir = None
|
||||
platform_name = None
|
||||
if isinstance(pkg_or_spec, PackageItem):
|
||||
@@ -45,9 +47,7 @@ class PlatformFactory(object):
|
||||
elif os.path.isdir(pkg_or_spec):
|
||||
platform_dir = pkg_or_spec
|
||||
else:
|
||||
from platformio.package.manager.platform import ( # pylint: disable=import-outside-toplevel
|
||||
PlatformPackageManager,
|
||||
)
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
|
||||
pkg = PlatformPackageManager().get_package(pkg_or_spec)
|
||||
if not pkg:
|
||||
|
||||
@@ -20,9 +20,11 @@ from threading import Thread
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import (
|
||||
PY2,
|
||||
WINDOWS,
|
||||
get_filesystem_encoding,
|
||||
get_locale_encoding,
|
||||
get_running_loop,
|
||||
string_types,
|
||||
)
|
||||
|
||||
@@ -30,7 +32,10 @@ from platformio.compat import (
|
||||
class AsyncPipeBase(object):
|
||||
def __init__(self):
|
||||
self._fd_read, self._fd_write = os.pipe()
|
||||
self._pipe_reader = os.fdopen(self._fd_read)
|
||||
if PY2:
|
||||
self._pipe_reader = os.fdopen(self._fd_read)
|
||||
else:
|
||||
self._pipe_reader = os.fdopen(self._fd_read, errors="backslashreplace")
|
||||
self._buffer = ""
|
||||
self._thread = Thread(target=self.run)
|
||||
self._thread.start()
|
||||
@@ -66,10 +71,10 @@ class BuildAsyncPipe(AsyncPipeBase):
|
||||
line = ""
|
||||
print_immediately = False
|
||||
|
||||
for byte in iter(lambda: self._pipe_reader.read(1), ""):
|
||||
self._buffer += byte
|
||||
for char in iter(lambda: self._pipe_reader.read(1), ""):
|
||||
self._buffer += char
|
||||
|
||||
if line and byte.strip() and line[-3:] == (byte * 3):
|
||||
if line and char.strip() and line[-3:] == (char * 3):
|
||||
print_immediately = True
|
||||
|
||||
if print_immediately:
|
||||
@@ -77,12 +82,12 @@ class BuildAsyncPipe(AsyncPipeBase):
|
||||
if line:
|
||||
self.data_callback(line)
|
||||
line = ""
|
||||
self.data_callback(byte)
|
||||
if byte == "\n":
|
||||
self.data_callback(char)
|
||||
if char == "\n":
|
||||
print_immediately = False
|
||||
else:
|
||||
line += byte
|
||||
if byte != "\n":
|
||||
line += char
|
||||
if char != "\n":
|
||||
continue
|
||||
self.line_callback(line)
|
||||
line = ""
|
||||
@@ -125,7 +130,9 @@ def exec_command(*args, **kwargs):
|
||||
result[s[3:]] = kwargs[s].get_buffer()
|
||||
|
||||
for k, v in result.items():
|
||||
if isinstance(result[k], bytes):
|
||||
if PY2 and isinstance(v, unicode): # pylint: disable=undefined-variable
|
||||
result[k] = v.encode()
|
||||
elif not PY2 and isinstance(result[k], bytes):
|
||||
try:
|
||||
result[k] = result[k].decode(
|
||||
get_locale_encoding() or get_filesystem_encoding()
|
||||
@@ -203,3 +210,20 @@ def where_is_program(program, envpath=None):
|
||||
return os.path.join(bin_dir, "%s.exe" % program)
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def append_env_path(name, value):
|
||||
cur_value = os.environ.get(name) or ""
|
||||
if cur_value and value in cur_value.split(os.pathsep):
|
||||
return cur_value
|
||||
os.environ[name] = os.pathsep.join([cur_value, value])
|
||||
return os.environ[name]
|
||||
|
||||
|
||||
def force_exit(code=0):
|
||||
try:
|
||||
get_running_loop().stop()
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
finally:
|
||||
sys.exit(code)
|
||||
|
||||
@@ -681,6 +681,11 @@ ProjectOptions = OrderedDict(
|
||||
"network address)"
|
||||
),
|
||||
),
|
||||
ConfigEnvOption(
|
||||
group="debug",
|
||||
name="debug_speed",
|
||||
description="A debug adapter speed (JTAG speed)",
|
||||
),
|
||||
ConfigEnvOption(
|
||||
group="debug",
|
||||
name="debug_svd_path",
|
||||
|
||||
@@ -25,7 +25,7 @@ from glob import glob
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, proc
|
||||
from platformio import __version__, compat, exception, proc
|
||||
from platformio.compat import PY2, WINDOWS
|
||||
from platformio.fs import cd, load_json # pylint: disable=unused-import
|
||||
from platformio.proc import exec_command # pylint: disable=unused-import
|
||||
@@ -162,14 +162,10 @@ def get_logical_devices():
|
||||
|
||||
|
||||
def get_mdns_services():
|
||||
# pylint: disable=import-outside-toplevel
|
||||
try:
|
||||
import zeroconf
|
||||
except ImportError:
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
compat.ensure_python3()
|
||||
|
||||
inject_contrib_pysite()
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import zeroconf
|
||||
|
||||
class mDNSListener(object):
|
||||
def __init__(self):
|
||||
@@ -178,15 +174,20 @@ def get_mdns_services():
|
||||
self._found_services = []
|
||||
|
||||
def __enter__(self):
|
||||
zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.", self)
|
||||
zeroconf.ServiceBrowser(
|
||||
self._zc,
|
||||
[
|
||||
"_http._tcp.local.",
|
||||
"_hap._tcp.local.",
|
||||
"_services._dns-sd._udp.local.",
|
||||
],
|
||||
self,
|
||||
)
|
||||
return self
|
||||
|
||||
def __exit__(self, etype, value, traceback):
|
||||
self._zc.close()
|
||||
|
||||
def remove_service(self, zc, type_, name):
|
||||
pass
|
||||
|
||||
def add_service(self, zc, type_, name):
|
||||
try:
|
||||
assert zeroconf.service_type_name(name)
|
||||
@@ -201,6 +202,12 @@ def get_mdns_services():
|
||||
if s:
|
||||
self._found_services.append(s)
|
||||
|
||||
def remove_service(self, zc, type_, name):
|
||||
pass
|
||||
|
||||
def update_service(self, zc, type_, name):
|
||||
pass
|
||||
|
||||
def get_services(self):
|
||||
return self._found_services
|
||||
|
||||
@@ -225,12 +232,7 @@ def get_mdns_services():
|
||||
{
|
||||
"type": service.type,
|
||||
"name": service.name,
|
||||
"ip": ".".join(
|
||||
[
|
||||
str(c if isinstance(c, int) else ord(c))
|
||||
for c in service.address
|
||||
]
|
||||
),
|
||||
"ip": ", ".join(service.parsed_addresses()),
|
||||
"port": service.port,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
@@ -167,3 +167,6 @@ ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="0666", ENV{ID_MM_DEVICE
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
@@ -26,15 +26,25 @@ import click
|
||||
envvar="PIO_INSTALL_DEVPLATFORMS_IGNORE",
|
||||
help="Ignore names split by comma",
|
||||
)
|
||||
def main(desktop, ignore):
|
||||
@click.option(
|
||||
"--ownernames",
|
||||
envvar="PIO_INSTALL_DEVPLATFORMS_OWNERNAMES",
|
||||
help="Filter dev-platforms by ownernames (split by comma)",
|
||||
)
|
||||
def main(desktop, ignore, ownernames):
|
||||
platforms = json.loads(
|
||||
subprocess.check_output(
|
||||
["platformio", "platform", "search", "--json-output"]
|
||||
).decode()
|
||||
)
|
||||
ignore = [n.strip() for n in (ignore or "").split(",") if n.strip()]
|
||||
ownernames = [n.strip() for n in (ownernames or "").split(",") if n.strip()]
|
||||
for platform in platforms:
|
||||
skip = [not desktop and platform["forDesktop"], platform["name"] in ignore]
|
||||
skip = [
|
||||
not desktop and platform["forDesktop"],
|
||||
platform["name"] in ignore,
|
||||
ownernames and platform["ownername"] not in ownernames,
|
||||
]
|
||||
if any(skip):
|
||||
continue
|
||||
subprocess.check_call(["platformio", "platform", "install", platform["name"]])
|
||||
|
||||
32
setup.py
32
setup.py
@@ -26,18 +26,28 @@ from platformio import (
|
||||
from platformio.compat import PY2, WINDOWS
|
||||
|
||||
|
||||
install_requires = [
|
||||
"bottle<0.13",
|
||||
minimal_requirements = [
|
||||
"bottle==0.12.*",
|
||||
"click>=5,<8%s" % (",!=7.1,!=7.1.1" if WINDOWS else ""),
|
||||
"colorama",
|
||||
"pyserial>=3,<4,!=3.3",
|
||||
"requests>=2.4.0,<3",
|
||||
"semantic_version>=2.8.1,<3",
|
||||
"tabulate>=0.8.3,<1",
|
||||
"pyelftools>=0.25,<1",
|
||||
"marshmallow%s" % (">=2,<3" if PY2 else ">=2"),
|
||||
"marshmallow%s" % (">=2,<3" if PY2 else ">=2,<4"),
|
||||
"pyelftools>=0.27,<1",
|
||||
"pyserial==3.*",
|
||||
"requests==2.*",
|
||||
"semantic_version==2.8.*",
|
||||
"tabulate==0.8.*",
|
||||
]
|
||||
|
||||
if not PY2:
|
||||
minimal_requirements.append("zeroconf==0.28.*")
|
||||
|
||||
home_requirements = [
|
||||
"aiofiles==0.6.*",
|
||||
"ajsonrpc==1.1.*",
|
||||
"starlette==0.14.*",
|
||||
"uvicorn==0.13.*",
|
||||
"wsproto==1.0.*",
|
||||
]
|
||||
|
||||
setup(
|
||||
name=__title__,
|
||||
@@ -48,10 +58,7 @@ setup(
|
||||
author_email=__email__,
|
||||
url=__url__,
|
||||
license=__license__,
|
||||
python_requires=", ".join(
|
||||
[">=2.7", "!=3.0.*", "!=3.1.*", "!=3.2.*", "!=3.3.*", "!=3.4.*"]
|
||||
),
|
||||
install_requires=install_requires,
|
||||
install_requires=minimal_requirements + ([] if PY2 else home_requirements),
|
||||
packages=find_packages(exclude=["tests.*", "tests"]) + ["scripts"],
|
||||
package_data={
|
||||
"platformio": [
|
||||
@@ -77,7 +84,6 @@ setup(
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: C",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Software Development",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
|
||||
@@ -23,7 +23,7 @@ def test_board_json_output(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
boards = json.loads(result.output)
|
||||
assert isinstance(boards, list)
|
||||
assert any(["mbed" in b["frameworks"] for b in boards])
|
||||
assert any("mbed" in b["frameworks"] for b in boards)
|
||||
|
||||
|
||||
def test_board_raw_output(clirunner, validate_cliresult):
|
||||
|
||||
@@ -154,7 +154,7 @@ def test_check_includes_passed(clirunner, check_dir):
|
||||
inc_count = l.count("-I")
|
||||
|
||||
# at least 1 include path for default mode
|
||||
assert inc_count > 1
|
||||
assert inc_count > 0
|
||||
|
||||
|
||||
def test_check_silent_mode(clirunner, validate_cliresult, check_dir):
|
||||
@@ -410,6 +410,22 @@ check_tool = pvs-studio
|
||||
assert style == 0
|
||||
|
||||
|
||||
def test_check_pvs_studio_fails_without_license(clirunner, tmpdir):
|
||||
config = DEFAULT_CONFIG + "\ncheck_tool = pvs-studio"
|
||||
|
||||
tmpdir.join("platformio.ini").write(config)
|
||||
tmpdir.mkdir("src").join("main.c").write(TEST_CODE)
|
||||
|
||||
default_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
verbose_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir), "-v"])
|
||||
|
||||
assert default_result.exit_code != 0
|
||||
assert "failed to perform check" in default_result.output.lower()
|
||||
|
||||
assert verbose_result.exit_code != 0
|
||||
assert "license was not entered" in verbose_result.output.lower()
|
||||
|
||||
|
||||
def test_check_embedded_platform_all_tools(clirunner, validate_cliresult, tmpdir):
|
||||
config = """
|
||||
[env:test]
|
||||
@@ -505,3 +521,16 @@ int main() {}
|
||||
verbose_errors, _, _ = count_defects(result.output)
|
||||
|
||||
assert verbose_errors == errors == 1
|
||||
|
||||
|
||||
def test_check_handles_spaces_in_paths(clirunner, validate_cliresult, tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("project dir")
|
||||
config = DEFAULT_CONFIG + "\ncheck_tool = cppcheck, clangtidy, pvs-studio"
|
||||
tmpdir.join("platformio.ini").write(config)
|
||||
tmpdir.mkdir("src").join("main.cpp").write(
|
||||
PVS_STUDIO_FREE_LICENSE_HEADER + TEST_CODE
|
||||
)
|
||||
|
||||
default_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
|
||||
validate_cliresult(default_result)
|
||||
|
||||
@@ -70,10 +70,8 @@ def test_init_ide_vscode(clirunner, validate_cliresult, tmpdir):
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(str(tmpdir))
|
||||
assert all(
|
||||
[
|
||||
tmpdir.join(".vscode").join(f).check()
|
||||
for f in ("c_cpp_properties.json", "launch.json")
|
||||
]
|
||||
tmpdir.join(".vscode").join(f).check()
|
||||
for f in ("c_cpp_properties.json", "launch.json")
|
||||
)
|
||||
assert (
|
||||
"framework-arduino-avr"
|
||||
@@ -113,7 +111,7 @@ def test_init_ide_eclipse(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_init, ["-b", "uno", "--ide", "eclipse"])
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(getcwd())
|
||||
assert all([isfile(f) for f in (".cproject", ".project")])
|
||||
assert all(isfile(f) for f in (".cproject", ".project"))
|
||||
|
||||
|
||||
def test_init_special_board(clirunner, validate_cliresult):
|
||||
|
||||
@@ -172,27 +172,23 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[
|
||||
n in result.output
|
||||
for n in (
|
||||
"Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"Version: 5.10.1",
|
||||
"Source: git+https://github.com/gioblu/PJON.git#3.0",
|
||||
"Version: 3.0.0+sha.1fb26fd",
|
||||
)
|
||||
]
|
||||
n in result.output
|
||||
for n in (
|
||||
"Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"Version: 5.10.1",
|
||||
"Source: git+https://github.com/gioblu/PJON.git#3.0",
|
||||
"Version: 3.0.0+sha.1fb26fd",
|
||||
)
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
assert all(
|
||||
[
|
||||
n in result.output
|
||||
for n in (
|
||||
"__pkg_dir",
|
||||
'"__src_url": "git+https://github.com/gioblu/PJON.git#6.2"',
|
||||
'"version": "5.10.1"',
|
||||
)
|
||||
]
|
||||
n in result.output
|
||||
for n in (
|
||||
"__pkg_dir",
|
||||
'"__src_url": "git+https://github.com/gioblu/PJON.git#6.2"',
|
||||
'"version": "5.10.1"',
|
||||
)
|
||||
)
|
||||
items1 = [i["name"] for i in json.loads(result.output)]
|
||||
items2 = [
|
||||
@@ -236,7 +232,9 @@ def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "--dry-run", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert set(["ESPAsyncTCP", "NeoPixelBus"]) == set(lib["name"] for lib in output)
|
||||
assert set(["Adafruit PN532", "ESPAsyncTCP", "NeoPixelBus"]) == set(
|
||||
lib["name"] for lib in output
|
||||
)
|
||||
|
||||
|
||||
def test_global_lib_update(clirunner, validate_cliresult):
|
||||
@@ -256,7 +254,7 @@ def test_global_lib_update(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update"])
|
||||
validate_cliresult(result)
|
||||
assert result.output.count("[Detached]") == 1
|
||||
assert result.output.count("[Up-to-date]") == 15
|
||||
assert result.output.count("[Up-to-date]") == 14
|
||||
|
||||
# update unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
|
||||
@@ -314,7 +312,7 @@ def test_global_lib_uninstall(clirunner, validate_cliresult, isolated_pio_core):
|
||||
def test_lib_show(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["show", "64"])
|
||||
validate_cliresult(result)
|
||||
assert all([s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
|
||||
assert all(s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR"))
|
||||
result = clirunner.invoke(cmd_lib, ["show", "OneWire", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert "OneWire" in result.output
|
||||
@@ -331,10 +329,8 @@ def test_lib_stats(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["stats"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[
|
||||
s in result.output
|
||||
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
|
||||
]
|
||||
s in result.output
|
||||
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
|
||||
|
||||
@@ -103,7 +103,7 @@ def test_list_json_output(clirunner, validate_cliresult):
|
||||
def test_list_raw_output(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli_platform.platform_list)
|
||||
validate_cliresult(result)
|
||||
assert all([s in result.output for s in ("atmelavr", "espressif8266")])
|
||||
assert all(s in result.output for s in ("atmelavr", "espressif8266"))
|
||||
|
||||
|
||||
def test_update_check(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
@@ -34,9 +34,7 @@ def test_local_env():
|
||||
if result["returncode"] != 1:
|
||||
pytest.fail(str(result))
|
||||
# pylint: disable=unsupported-membership-test
|
||||
assert all([s in result["err"] for s in ("PASSED", "IGNORED", "FAILED")]), result[
|
||||
"out"
|
||||
]
|
||||
assert all(s in result["err"] for s in ("PASSED", "FAILED")), result["out"]
|
||||
|
||||
|
||||
def test_multiple_env_build(clirunner, validate_cliresult, tmpdir):
|
||||
|
||||
@@ -21,7 +21,7 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core):
|
||||
matches = ("Platform Manager", "Library Manager")
|
||||
result = clirunner.invoke(cmd_update, ["--only-check"])
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
assert all(m in result.output for m in matches)
|
||||
result = clirunner.invoke(cmd_update)
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
assert all(m in result.output for m in matches)
|
||||
|
||||
@@ -169,6 +169,15 @@ def test_spec_vcs_urls():
|
||||
url="git+git@github.com:platformio/platformio-core.git",
|
||||
requirements="^1.2.3,!=5",
|
||||
)
|
||||
assert PackageSpec(
|
||||
owner="platformio",
|
||||
name="external-repo",
|
||||
requirements="https://github.com/platformio/platformio-core",
|
||||
) == PackageSpec(
|
||||
owner="platformio",
|
||||
name="external-repo",
|
||||
url="git+https://github.com/platformio/platformio-core",
|
||||
)
|
||||
|
||||
|
||||
def test_spec_as_dict():
|
||||
|
||||
@@ -91,7 +91,7 @@ def test_check_and_update_libraries(clirunner, isolated_pio_core, validate_clire
|
||||
assert "There are the new updates for libraries (ArduinoJson)" in result.output
|
||||
assert "Please wait while updating libraries" in result.output
|
||||
assert re.search(
|
||||
r"Updating bblanchon/ArduinoJson\s+6\.12\.0\s+\[Outdated [\d\.]+\]",
|
||||
r"Updating bblanchon/ArduinoJson\s+6\.12\.0\s+\[Updating to [\d\.]+\]",
|
||||
result.output,
|
||||
)
|
||||
|
||||
@@ -143,7 +143,9 @@ def test_check_and_update_platforms(clirunner, isolated_pio_core, validate_clire
|
||||
validate_cliresult(result)
|
||||
assert "There are the new updates for platforms (native)" in result.output
|
||||
assert "Please wait while updating platforms" in result.output
|
||||
assert re.search(r"Updating native\s+0.0.0\s+\[Outdated [\d\.]+\]", result.output)
|
||||
assert re.search(
|
||||
r"Updating native\s+0.0.0\s+\[Updating to [\d\.]+\]", result.output
|
||||
)
|
||||
|
||||
# check updated version
|
||||
result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"])
|
||||
|
||||
10
tox.ini
10
tox.ini
@@ -13,14 +13,18 @@
|
||||
# limitations under the License.
|
||||
|
||||
[tox]
|
||||
envlist = py27,py37,py38,py39
|
||||
envlist = py36,py37,py38,py39
|
||||
|
||||
[isort]
|
||||
line_length = 88
|
||||
known_third_party=OpenSSL, SCons, jsonrpc, twisted, zope
|
||||
|
||||
[testenv]
|
||||
passenv = *
|
||||
usedevelop = True
|
||||
deps =
|
||||
py36,py37,py38,py39: black
|
||||
isort<5
|
||||
black
|
||||
isort
|
||||
pylint
|
||||
pytest
|
||||
pytest-xdist
|
||||
|
||||
Reference in New Issue
Block a user