mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 07:12:31 +01:00
Compare commits
208 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
30b00e7a9d | ||
|
|
9800fb7b2c | ||
|
|
3b66f4270c | ||
|
|
dc14bd7362 | ||
|
|
4be5185ed3 | ||
|
|
1ea0adf6af | ||
|
|
7cb40ef3b0 | ||
|
|
044bf61a4d | ||
|
|
e0f9cb8c26 | ||
|
|
d6d1c6b327 | ||
|
|
4c177c1ad3 | ||
|
|
490af8ac37 | ||
|
|
ca48e6c172 | ||
|
|
7533c369d4 | ||
|
|
cd8024c762 | ||
|
|
0b4aedbeeb | ||
|
|
3d2ac4698c | ||
|
|
e0a3b81877 | ||
|
|
af21c50aec | ||
|
|
1cbc424488 | ||
|
|
887e542cb2 | ||
|
|
780c62d925 | ||
|
|
122ebed16d | ||
|
|
158aabbdf2 | ||
|
|
a8c3f2bdf6 | ||
|
|
8814f4e92d | ||
|
|
ba5f61f92b | ||
|
|
43dd429aa2 | ||
|
|
cd8179a41f | ||
|
|
10136729ab | ||
|
|
c5d7c4f88e | ||
|
|
e3796cfda1 | ||
|
|
847fdd4deb | ||
|
|
c56b35f504 | ||
|
|
bc9d9ac2db | ||
|
|
e2f0d96f09 | ||
|
|
4e78c3ec40 | ||
|
|
dfffd5e97b | ||
|
|
8c13d13f80 | ||
|
|
32d501bed1 | ||
|
|
17a7293967 | ||
|
|
59902abd09 | ||
|
|
a4756987a4 | ||
|
|
b04c1591c2 | ||
|
|
83c4e5f463 | ||
|
|
8ada1c2b34 | ||
|
|
b7b01dd6a0 | ||
|
|
5c2673cd71 | ||
|
|
09f7ff2db3 | ||
|
|
5e8eb77090 | ||
|
|
a0493e6ac4 | ||
|
|
4d755f2692 | ||
|
|
fcb676abc6 | ||
|
|
fa0de1dad4 | ||
|
|
6653c02487 | ||
|
|
0939b43899 | ||
|
|
537558d410 | ||
|
|
7c9e0393f8 | ||
|
|
9ddf73baa6 | ||
|
|
699da0a8fb | ||
|
|
fd8c9786c0 | ||
|
|
410324b2c7 | ||
|
|
36d470279c | ||
|
|
e498119e0d | ||
|
|
9ff117b0fb | ||
|
|
2695690b34 | ||
|
|
8dc20f93d5 | ||
|
|
f8d21e5b32 | ||
|
|
72ac6c86df | ||
|
|
8c2a7df53e | ||
|
|
d92e36efa0 | ||
|
|
741e9a40b3 | ||
|
|
7527143fff | ||
|
|
a23fef010f | ||
|
|
cd4f5541ac | ||
|
|
73089b3cb0 | ||
|
|
3a70c902a9 | ||
|
|
bedbae6311 | ||
|
|
842679c32b | ||
|
|
10ff4ae77a | ||
|
|
bc325ab2cc | ||
|
|
a31a7f2b06 | ||
|
|
4278574450 | ||
|
|
6f8f2511c2 | ||
|
|
5282124664 | ||
|
|
83bb6611b9 | ||
|
|
dcc02c3e14 | ||
|
|
f070399cad | ||
|
|
b9920b286f | ||
|
|
d278f8f215 | ||
|
|
3c5c65769c | ||
|
|
2f7362951c | ||
|
|
f4535190a3 | ||
|
|
236c4570cf | ||
|
|
5844c536a4 | ||
|
|
6627fd5790 | ||
|
|
25074d80d3 | ||
|
|
f032663b33 | ||
|
|
d24702eb29 | ||
|
|
9051677d74 | ||
|
|
7637286efa | ||
|
|
31a24e1652 | ||
|
|
c8c4028a23 | ||
|
|
0bd27a36e9 | ||
|
|
ddfe5a6c03 | ||
|
|
ee93ca1615 | ||
|
|
4c2aca4956 | ||
|
|
dd14b5e2ed | ||
|
|
6464420c1c | ||
|
|
79ec493c79 | ||
|
|
abb464707d | ||
|
|
7c846b8968 | ||
|
|
84c2e0a3d6 | ||
|
|
c2ddc89e46 | ||
|
|
1495e24e1e | ||
|
|
6e16b43568 | ||
|
|
6c18b37d54 | ||
|
|
6134db8e81 | ||
|
|
3cf62f8fa6 | ||
|
|
523b6dfa98 | ||
|
|
3928cb522e | ||
|
|
de856ee730 | ||
|
|
d3b7508bd5 | ||
|
|
6c71a3bea2 | ||
|
|
d2e27f5385 | ||
|
|
2a5de43964 | ||
|
|
029e66cd06 | ||
|
|
96fb8c74f9 | ||
|
|
b006f53010 | ||
|
|
19d518fc4c | ||
|
|
f01cd7570c | ||
|
|
ffebfd4376 | ||
|
|
e4264a6a51 | ||
|
|
d85bc0f7f8 | ||
|
|
1445a91fab | ||
|
|
3b878747f2 | ||
|
|
401f8a4891 | ||
|
|
6bec593b93 | ||
|
|
aef49a8bff | ||
|
|
772e25df49 | ||
|
|
3363b3a516 | ||
|
|
1f096fe03f | ||
|
|
32e440bec7 | ||
|
|
99b5204802 | ||
|
|
3c17b31d5e | ||
|
|
89a80f158e | ||
|
|
c42db2ec22 | ||
|
|
6a3b6f0d44 | ||
|
|
ca2622b7a6 | ||
|
|
b9a9fd4f43 | ||
|
|
1ea6d47110 | ||
|
|
256acf7e23 | ||
|
|
284ccc9e8a | ||
|
|
655eedd7b0 | ||
|
|
bb6490d6f2 | ||
|
|
300b7b2138 | ||
|
|
86c4bd69d2 | ||
|
|
dd63c8002a | ||
|
|
13fc8508b3 | ||
|
|
a76933990c | ||
|
|
7e3e394707 | ||
|
|
cee3f4d90f | ||
|
|
c557473cfb | ||
|
|
f893fcf135 | ||
|
|
092326cb91 | ||
|
|
92a5c1bac6 | ||
|
|
4b2f0eb1d5 | ||
|
|
9ae67fdad9 | ||
|
|
5142feba7a | ||
|
|
8cbe7bc7a6 | ||
|
|
d8f36b6534 | ||
|
|
58d533a3bb | ||
|
|
18e130fd12 | ||
|
|
b72c1636f7 | ||
|
|
f68c18d1e5 | ||
|
|
db6b8a6dbc | ||
|
|
5afa0a955e | ||
|
|
ca3b3717d3 | ||
|
|
d4784c05f5 | ||
|
|
7a01da7039 | ||
|
|
42690d3fa7 | ||
|
|
50cbc4d4e2 | ||
|
|
63c2278a83 | ||
|
|
4bccaae945 | ||
|
|
e12bc9fe5f | ||
|
|
ac63cf0240 | ||
|
|
30709fd0b3 | ||
|
|
6f9985125d | ||
|
|
743a3e2c02 | ||
|
|
bd21ff0d3e | ||
|
|
46858fff39 | ||
|
|
854c549e1c | ||
|
|
4b5bc91abb | ||
|
|
375c396b7b | ||
|
|
7aaa9c028b | ||
|
|
7f351bc7c8 | ||
|
|
c42fe32972 | ||
|
|
a6e61a7a5a | ||
|
|
4bc3e3cf95 | ||
|
|
4a7a8b8b68 | ||
|
|
51ab0bbd3c | ||
|
|
30937df4e6 | ||
|
|
b15a4e746a | ||
|
|
1b17234c41 | ||
|
|
26f897cb55 | ||
|
|
99d049a6dd | ||
|
|
f3c3402b35 | ||
|
|
55b9c446f1 |
1
.github/workflows/core.yml
vendored
1
.github/workflows/core.yml
vendored
@@ -33,6 +33,7 @@ jobs:
|
||||
pip install tox
|
||||
|
||||
- name: Python Lint
|
||||
if: ${{ matrix.python-version != '3.6' }}
|
||||
run: |
|
||||
tox -e lint
|
||||
|
||||
|
||||
7
.github/workflows/deployment.yml
vendored
7
.github/workflows/deployment.yml
vendored
@@ -3,7 +3,7 @@ name: Deployment
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- "master"
|
||||
- "release/**"
|
||||
|
||||
jobs:
|
||||
@@ -34,8 +34,11 @@ jobs:
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
run: python setup.py sdist
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
|
||||
12
.github/workflows/projects.yml
vendored
12
.github/workflows/projects.yml
vendored
@@ -13,15 +13,15 @@ jobs:
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
- esphome:
|
||||
repository: "esphome/esphome"
|
||||
folder: "esphome"
|
||||
config_dir: "esphome"
|
||||
env_name: "esp32-arduino"
|
||||
# - esphome:
|
||||
# repository: "esphome/esphome"
|
||||
# folder: "esphome"
|
||||
# config_dir: "esphome"
|
||||
# env_name: "esp32-arduino"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
config_dir: "smartknob/firmware"
|
||||
config_dir: "smartknob"
|
||||
env_name: "view"
|
||||
- espurna:
|
||||
repository: "xoseperez/espurna"
|
||||
|
||||
16
.pylintrc
16
.pylintrc
@@ -3,21 +3,9 @@ output-format=colorized
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=
|
||||
bad-continuation,
|
||||
bad-whitespace,
|
||||
missing-docstring,
|
||||
ungrouped-imports,
|
||||
invalid-name,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
superfluous-parens,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
useless-object-inheritance,
|
||||
useless-import-alias,
|
||||
bad-option-value,
|
||||
consider-using-dict-items,
|
||||
consider-using-f-string,
|
||||
|
||||
; PY2 Compat
|
||||
super-with-arguments,
|
||||
raise-missing-from
|
||||
cyclic-import
|
||||
|
||||
94
HISTORY.rst
94
HISTORY.rst
@@ -13,6 +13,100 @@ PlatformIO Core 6
|
||||
|
||||
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
|
||||
|
||||
6.1.5 (2022-11-01)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added a new `enable_proxy_strict_ssl <https://docs.platformio.org/en/latest/core/userguide/cmd_settings.html>`__ setting to disable the proxy server certificate verification (`issue #4432 <https://github.com/platformio/platformio-core/issues/4432>`_)
|
||||
* Documented `PlatformIO Core Proxy Configuration <https://docs.platformio.org/en/latest/core/installation/proxy-configuration.html>`__
|
||||
* Speeded up device port finder by avoiding loading board HWIDs from development platforms
|
||||
* Improved caching of build metadata in debug mode
|
||||
* Fixed an issue when `pio pkg install --storage-dir <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html>`__ command requires PlatformIO project (`issue #4410 <https://github.com/platformio/platformio-core/issues/4410>`_)
|
||||
|
||||
6.1.4 (2022-08-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for accepting the original FileNode environment in a "callback" function when using `Build Middlewares <https://docs.platformio.org/en/latest/scripting/middlewares.html>`__ (`pull #4380 <https://github.com/platformio/platformio-core/pull/4380>`_)
|
||||
* Improved device port finder when using dual channel UART converter (`issue #4367 <https://github.com/platformio/platformio-core/issues/4367>`_)
|
||||
* Improved project dependency resolving when using the `pio project init --ide <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command
|
||||
* Upgraded build engine to the SCons 4.4.0 (`release notes <https://github.com/SCons/scons/releases/tag/4.4.0>`__)
|
||||
* Keep custom "unwantedRecommendations" when generating projects for VSCode (`issue #4383 <https://github.com/platformio/platformio-core/issues/4383>`_)
|
||||
* Do not resolve project dependencies for the ``cleanall`` target (`issue #4344 <https://github.com/platformio/platformio-core/issues/4344>`_)
|
||||
* Warn about calling "env.BuildSources" in a POST-type script (`issue #4385 <https://github.com/platformio/platformio-core/issues/4385>`_)
|
||||
* Fixed an issue when escaping macros/defines for IDE integration (`issue #4360 <https://github.com/platformio/platformio-core/issues/4360>`_)
|
||||
* Fixed an issue when the "cleanall" target removes dependencies from all working environments (`issue #4386 <https://github.com/platformio/platformio-core/issues/4386>`_)
|
||||
|
||||
6.1.3 (2022-07-18)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a regression bug when opening device monitor without any filters (`issue #4363 <https://github.com/platformio/platformio-core/issues/4363>`_)
|
||||
|
||||
6.1.2 (2022-07-18)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Export a ``PIO_UNIT_TESTING`` macro to the project source files and dependent libraries in the |UNITTESTING| mode
|
||||
* Improved detection of Windows architecture (`issue #4353 <https://github.com/platformio/platformio-core/issues/4353>`_)
|
||||
* Warn about unknown `device monitor filters <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #4362 <https://github.com/platformio/platformio-core/issues/4362>`_)
|
||||
* Fixed a regression bug when `libArchive <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/libarchive.html>`__ option declared in the `library.json <https://docs.platformio.org/en/latest/manifests/library-json/index.html>`__ manifest was ignored (`issue #4351 <https://github.com/platformio/platformio-core/issues/4351>`_)
|
||||
* Fixed an issue when the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command didn't work with Python 3.6 (`issue #4352 <https://github.com/platformio/platformio-core/issues/4352>`_)
|
||||
|
||||
6.1.1 (2022-07-11)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added new ``monitor_encoding`` project configuration option to configure `Device Monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ (`issue #4350 <https://github.com/platformio/platformio-core/issues/4350>`_)
|
||||
* Allowed specifying project environments for `pio ci <https://docs.platformio.org/en/latest/core/userguide/cmd_ci.html>`__ command (`issue #4347 <https://github.com/platformio/platformio-core/issues/4347>`_)
|
||||
* Show "TimeoutError" only in the verbose mode when can not find a serial port
|
||||
* Fixed an issue when a serial port was not automatically detected if the board has predefined HWIDs
|
||||
* Fixed an issue with endless scanning of project dependencies (`issue #4349 <https://github.com/platformio/platformio-core/issues/4349>`_)
|
||||
* Fixed an issue with |LDF| when incompatible libraries were used for the working project environment with the missed framework (`pull #4346 <https://github.com/platformio/platformio-core/pull/4346>`_)
|
||||
|
||||
6.1.0 (2022-07-06)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* **Device Manager**
|
||||
|
||||
- Automatically reconnect device monitor if a connection fails
|
||||
- Added new `pio device monitor --no-reconnect <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#cmdoption-pio-device-monitor-no-reconnect>`__ option to disable automatic reconnection
|
||||
- Handle device monitor disconnects more gracefully (`issue #3939 <https://github.com/platformio/platformio-core/issues/3939>`_)
|
||||
- Improved a serial port finder for `Black Magic Probe <https://docs.platformio.org/en/latest/plus/debug-tools/blackmagic.html>`__ (`issue #4023 <https://github.com/platformio/platformio-core/issues/4023>`_)
|
||||
- Improved a serial port finder for a board with predefined HWIDs
|
||||
- Replaced ``monitor_flags`` with independent project configuration options: `monitor_parity <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-parity>`__, `monitor_eol <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-eol>`__, `monitor_raw <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-raw>`__, `monitor_echo <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-echo>`__
|
||||
- Fixed an issue when the monitor filters were not applied in their order (`issue #4320 <https://github.com/platformio/platformio-core/issues/4320>`_)
|
||||
|
||||
* **Unit Testing**
|
||||
|
||||
- Updated "Getting Started" documentation for `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/googletest.html>`__ testing and mocking framework
|
||||
- Export |UNITTESTING| flags only to the project build environment (``projenv``, files in "src" folder)
|
||||
- Merged the "building" stage with "uploading" for the embedded target (`issue #4307 <https://github.com/platformio/platformio-core/issues/4307>`_)
|
||||
- Do not resolve dependencies from the project "src" folder when the `test_build_src <https://docs.platformio.org/en/latest//projectconf/section_env_test.html#test-build-src>`__ option is not enabled
|
||||
- Do not immediately terminate a testing program when results are received
|
||||
- Fixed an issue when a custom `pio test --project-config <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-c>`__ was not handled properly (`issue #4299 <https://github.com/platformio/platformio-core/issues/4299>`_)
|
||||
- Fixed an issue when testing results were wrong in the verbose mode (`issue #4336 <https://github.com/platformio/platformio-core/issues/4336>`_)
|
||||
|
||||
* **Build System**
|
||||
|
||||
- Significantly improved support for `Pre & Post Actions <https://docs.platformio.org/en/latest/scripting/actions.html>`__
|
||||
|
||||
* Allowed to declare actions in the `PRE-type scripts <https://docs.platformio.org/en/latest/scripting/launch_types.html>`__ even if the target is not ready yet
|
||||
* Allowed library maintainers to use Pre & Post Actions in the library `extraScript <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/extrascript.html>`__
|
||||
|
||||
- Documented `Stringification <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#stringification>`__ – converting a macro argument into a string constant (`issue #4310 <https://github.com/platformio/platformio-core/issues/4310>`_)
|
||||
- Added new `pio run --monitor-port <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-pio-run-monitor-port>`__ option to specify custom device monitor port to the ``monitor`` target (`issue #4337 <https://github.com/platformio/platformio-core/issues/4337>`_)
|
||||
- Added ``env.StringifyMacro(value)`` helper function for the `Advanced Scripting <https://docs.platformio.org/en/latest/scripting/index.html>`__
|
||||
- Allowed to ``Import("projenv")`` in a library extra script (`issue #4305 <https://github.com/platformio/platformio-core/issues/4305>`_)
|
||||
- Fixed an issue when the `build_unflags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-unflags>`__ operation ignores a flag value (`issue #4309 <https://github.com/platformio/platformio-core/issues/4309>`_)
|
||||
- Fixed an issue when the `build_unflags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-unflags>`__ option was not applied to the ``ASPPFLAGS`` scope
|
||||
- Fixed an issue on Windows OS when flags were wrapped to the temporary file while generating the `Compilation database "compile_commands.json" <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__
|
||||
- Fixed an issue with the |LDF| when recursively scanning dependencies in the ``chain`` mode
|
||||
- Fixed a "PermissionError" on Windows when running "clean" or "cleanall" targets (`issue #4331 <https://github.com/platformio/platformio-core/issues/4331>`_)
|
||||
|
||||
* **Package Management**
|
||||
|
||||
- Fixed an issue when library dependencies were installed for the incompatible project environment (`issue #4338 <https://github.com/platformio/platformio-core/issues/4338>`_)
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Warn about incompatible Bash version for the `Shell Completion <https://docs.platformio.org/en/latest/core/userguide/system/completion/index.html>`__ (`issue #4326 <https://github.com/platformio/platformio-core/issues/4326>`_)
|
||||
|
||||
6.0.2 (2022-06-01)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
4
Makefile
4
Makefile
@@ -1,6 +1,6 @@
|
||||
lint:
|
||||
pylint -j 6 --rcfile=./.pylintrc ./tests
|
||||
pylint -j 6 --rcfile=./.pylintrc ./platformio
|
||||
pylint --rcfile=./.pylintrc ./tests
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
|
||||
isort:
|
||||
isort ./platformio
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
||||
|
||||
PlatformIO Core
|
||||
===============
|
||||
|
||||
@@ -99,3 +95,7 @@ Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
|
||||
The PlatformIO is licensed under the permissive Apache 2.0 license,
|
||||
so you can use it in both commercial and personal projects with confidence.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
||||
2
docs
2
docs
Submodule docs updated: 300060ea08...f82e7f4266
2
examples
2
examples
Submodule examples updated: 6c52fd3277...f98cb5a9be
@@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (6, 0, 2)
|
||||
VERSION = (6, 1, 5)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -44,12 +44,10 @@ __registry_mirror_hosts__ = [
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.1",
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-scons": "~4.40300.0",
|
||||
"tool-scons": "~4.40400.0",
|
||||
"tool-cppcheck": "~1.270.0",
|
||||
"tool-clangtidy": "~1.120001.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
|
||||
@@ -31,7 +31,7 @@ from platformio.compat import IS_CYGWIN, ensure_python3
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
@click.pass_context
|
||||
def cli(ctx, force, caller, no_ansi):
|
||||
def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
try:
|
||||
if (
|
||||
no_ansi
|
||||
@@ -53,7 +53,7 @@ def cli(ctx, force, caller, no_ansi):
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_platformio_start(ctx, force, caller)
|
||||
maintenance.on_platformio_start(ctx, caller)
|
||||
|
||||
|
||||
@cli.result_callback()
|
||||
@@ -100,15 +100,15 @@ def main(argv=None):
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
cli() # pylint: disable=no-value-for-parameter
|
||||
except SystemExit as e:
|
||||
if e.code and str(e.code).isdigit():
|
||||
exit_code = int(e.code)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
except SystemExit as exc:
|
||||
if exc.code and str(exc.code).isdigit():
|
||||
exit_code = int(exc.code)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if not isinstance(exc, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(exc)
|
||||
error_str = "Error: "
|
||||
if isinstance(e, exception.PlatformioException):
|
||||
error_str += str(e)
|
||||
if isinstance(exc, exception.PlatformioException):
|
||||
error_str += str(exc)
|
||||
else:
|
||||
error_str += format_exc()
|
||||
error_str += """
|
||||
@@ -128,7 +128,7 @@ An unexpected error occurred. Further steps:
|
||||
============================================================
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
exit_code = int(str(e)) if str(e).isdigit() else 1
|
||||
exit_code = int(str(exc)) if str(exc).isdigit() else 1
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
|
||||
|
||||
@@ -46,8 +46,8 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
def get_refresh_token():
|
||||
try:
|
||||
return app.get_state_item("account").get("auth").get("refresh_token")
|
||||
except: # pylint:disable=bare-except
|
||||
raise AccountNotAuthorized()
|
||||
except Exception as exc:
|
||||
raise AccountNotAuthorized() from exc
|
||||
|
||||
@staticmethod
|
||||
def delete_local_session():
|
||||
|
||||
@@ -37,8 +37,8 @@ def team_list_cmd(orgname, json_output):
|
||||
return click.echo(json.dumps(data[orgname] if orgname else data))
|
||||
if not any(data.values()):
|
||||
return click.secho("You do not have any teams.", fg="yellow")
|
||||
for org_name in data:
|
||||
for team in data[org_name]:
|
||||
for org_name, teams in data.items():
|
||||
for team in teams:
|
||||
click.echo()
|
||||
click.secho("%s:%s" % (org_name, team.get("name")), fg="cyan")
|
||||
click.echo("-" * len("%s:%s" % (org_name, team.get("name"))))
|
||||
|
||||
@@ -32,7 +32,7 @@ def validate_username(value, field="username"):
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d_.+-]+@[a-z\d\-]+\.[a-z\d\-.]+$", value, flags=re.I):
|
||||
if not re.match(r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import getpass
|
||||
import hashlib
|
||||
import json
|
||||
@@ -60,17 +58,20 @@ DEFAULT_SETTINGS = {
|
||||
"value": get_default_projects_dir(),
|
||||
"validator": projects_dir_validate,
|
||||
},
|
||||
"enable_proxy_strict_ssl": {
|
||||
"description": "Verify the proxy server certificate against the list of supplied CAs",
|
||||
"value": True,
|
||||
},
|
||||
}
|
||||
|
||||
SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"force_option": False,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
}
|
||||
|
||||
|
||||
class State(object):
|
||||
class State:
|
||||
def __init__(self, path=None, lock=False):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
@@ -103,8 +104,10 @@ class State(object):
|
||||
try:
|
||||
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(json.dumps(self._storage))
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(
|
||||
os.path.dirname(self.path)
|
||||
) from exc
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
@@ -113,8 +116,8 @@ class State(object):
|
||||
self._lockfile = LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path)) from exc
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
@@ -169,8 +172,8 @@ def sanitize_setting(name, value):
|
||||
value = str(value).lower() in ("true", "yes", "y", "1")
|
||||
elif isinstance(defdata["value"], int):
|
||||
value = int(value)
|
||||
except Exception:
|
||||
raise exception.InvalidSettingValue(value, name)
|
||||
except Exception as exc:
|
||||
raise exception.InvalidSettingValue(value, name) from exc
|
||||
return value
|
||||
|
||||
|
||||
@@ -227,13 +230,7 @@ def set_session_var(name, value):
|
||||
|
||||
|
||||
def is_disabled_progressbar():
|
||||
return any(
|
||||
[
|
||||
get_session_var("force_option"),
|
||||
proc.is_ci(),
|
||||
os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
|
||||
]
|
||||
)
|
||||
return os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
|
||||
|
||||
def get_cid():
|
||||
|
||||
@@ -28,7 +28,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import compat, fs
|
||||
from platformio import app, compat, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
@@ -53,19 +53,20 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
"cc",
|
||||
"c++",
|
||||
"link",
|
||||
"piohooks",
|
||||
"pioasm",
|
||||
"platformio",
|
||||
"piobuild",
|
||||
"pioproject",
|
||||
"pioplatform",
|
||||
"piotest",
|
||||
"piotarget",
|
||||
"piomaxlen",
|
||||
"piolib",
|
||||
"pioupload",
|
||||
"piosize",
|
||||
"pioino",
|
||||
"piomisc",
|
||||
"piointegration",
|
||||
"piomaxlen",
|
||||
],
|
||||
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
|
||||
variables=clivars,
|
||||
@@ -78,9 +79,9 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROG_PATH="$PROGPATH", # deprecated
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
|
||||
# Declare command verbose messages
|
||||
@@ -110,6 +111,8 @@ env.Replace(
|
||||
|
||||
# Setup project optional directories
|
||||
config = env.GetProjectConfig()
|
||||
app.set_session_var("custom_project_conf", config.path)
|
||||
|
||||
env.Replace(
|
||||
PROJECT_DIR=get_project_dir(),
|
||||
PROJECT_CORE_DIR=config.get("platformio", "core_dir"),
|
||||
@@ -123,6 +126,7 @@ env.Replace(
|
||||
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
|
||||
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
|
||||
BUILD_TYPE=env.GetBuildType(),
|
||||
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
|
||||
LIBSOURCE_DIRS=[
|
||||
config.get("platformio", "lib_dir"),
|
||||
@@ -197,7 +201,7 @@ for item in env.GetExtraScripts("post"):
|
||||
if env.get("SIZETOOL") and not (
|
||||
set(["nobuild", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
):
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
env.Depends("upload", "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
@@ -209,7 +213,7 @@ if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
["upload", "program"],
|
||||
"upload",
|
||||
env.VerboseAction(
|
||||
lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol...",
|
||||
@@ -219,6 +223,8 @@ env.AddPreAction(
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
env.ProcessDelayedActions()
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
|
||||
@@ -23,15 +23,13 @@
|
||||
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
|
||||
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
|
||||
import SCons
|
||||
|
||||
from platformio.builder.tools.platformio import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.builder.tools.piobuild import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
# Implements the ability for SCons to emit a compilation database for the MongoDB project. See
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import SCons.Tool.asm # pylint: disable=import-error
|
||||
|
||||
#
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import sys
|
||||
@@ -23,7 +21,6 @@ from SCons.Node import FS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Export # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
@@ -76,10 +73,7 @@ def BuildProgram(env):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
program = env.Program(
|
||||
os.path.join("$BUILD_DIR", env.subst("$PROGNAME$PROGSUFFIX")),
|
||||
env["PIOBUILDFILES"],
|
||||
)
|
||||
program = env.Program(env.subst("$PROGPATH"), env["PIOBUILDFILES"])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
AlwaysBuild(
|
||||
@@ -90,7 +84,7 @@ def BuildProgram(env):
|
||||
)
|
||||
)
|
||||
|
||||
print("Building in %s mode" % env.GetBuildType())
|
||||
print("Building in %s mode" % env["BUILD_TYPE"])
|
||||
|
||||
return program
|
||||
|
||||
@@ -125,10 +119,8 @@ def ProcessProgramDeps(env):
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
if "debug" in env.GetBuildType():
|
||||
if "debug" in env["BUILD_TYPE"]:
|
||||
env.ConfigureDebugTarget()
|
||||
if "test" in env.GetBuildType():
|
||||
env.ConfigureTestTarget()
|
||||
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
@@ -142,23 +134,22 @@ def ProcessProgramDeps(env):
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
project_lib_builder = env.ConfigureProjectLibBuilder()
|
||||
projenv = project_lib_builder.env
|
||||
plb = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=project_lib_builder.build())
|
||||
env.Prepend(LIBS=plb.build())
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: project_lib_builder.env.get(key)
|
||||
key: plb.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if project_lib_builder.env.get(key)
|
||||
if plb.env.get(key)
|
||||
}
|
||||
)
|
||||
|
||||
if "test" in env.GetBuildType():
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
build_files_before_nums = len(env.get("PIOBUILDFILES", []))
|
||||
projenv.BuildSources(
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
|
||||
)
|
||||
if len(env.get("PIOBUILDFILES", [])) - build_files_before_nums < 1:
|
||||
@@ -168,8 +159,8 @@ def ProcessProjectDeps(env):
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
if "test" not in env.GetBuildType() or env.GetProjectOption("test_build_src"):
|
||||
projenv.BuildSources(
|
||||
if "test" not in env["BUILD_TYPE"] or env.GetProjectOption("test_build_src"):
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
|
||||
)
|
||||
|
||||
@@ -180,8 +171,6 @@ def ProcessProjectDeps(env):
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
Export("projenv")
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
@@ -246,33 +235,30 @@ def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
|
||||
# get all flags and copy them to each "*FLAGS" variable
|
||||
all_flags = []
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
all_flags.extend(unflags)
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
parsed[key].extend(all_flags)
|
||||
|
||||
for key, unflags in parsed.items():
|
||||
for unflag in unflags:
|
||||
for current in env.get(key, []):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
isinstance(current, (tuple, list)) and unflag[0] == current[0],
|
||||
]
|
||||
if any(conditions):
|
||||
env[key].remove(current)
|
||||
unflag_scopes = tuple(set(["ASPPFLAGS"] + list(parsed.keys())))
|
||||
for scope in unflag_scopes:
|
||||
for unflags in parsed.values():
|
||||
for unflag in unflags:
|
||||
for current in env.get(scope, []):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
not isinstance(unflag, (tuple, list))
|
||||
and isinstance(current, (tuple, list))
|
||||
and unflag == current[0],
|
||||
]
|
||||
if any(conditions):
|
||||
env[scope].remove(current)
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
def StringifyMacro(env, value): # pylint: disable=unused-argument
|
||||
return '\\"%s\\"' % value.replace('"', '\\\\\\"')
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None, src_exts=None):
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
return fs.match_src_files(
|
||||
env.subst(src_dir), src_filter, SRC_BUILD_EXT + SRC_HEADER_EXT
|
||||
)
|
||||
src_exts = src_exts or (SRC_BUILD_EXT + SRC_HEADER_EXT)
|
||||
return fs.match_src_files(env.subst(src_dir), src_filter, src_exts)
|
||||
|
||||
|
||||
def CollectBuildFiles(
|
||||
@@ -285,7 +271,7 @@ def CollectBuildFiles(
|
||||
if src_dir.endswith(os.sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter):
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter, SRC_BUILD_EXT):
|
||||
_reldir = os.path.dirname(item)
|
||||
_src_dir = os.path.join(src_dir, _reldir) if _reldir else src_dir
|
||||
_var_dir = os.path.join(variant_dir, _reldir) if _reldir else variant_dir
|
||||
@@ -294,8 +280,7 @@ def CollectBuildFiles(
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
|
||||
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
|
||||
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
|
||||
|
||||
middlewares = env.get("__PIO_BUILD_MIDDLEWARES")
|
||||
if not middlewares:
|
||||
@@ -307,7 +292,12 @@ def CollectBuildFiles(
|
||||
for callback, pattern in middlewares:
|
||||
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
|
||||
continue
|
||||
new_node = callback(new_node)
|
||||
if callback.__code__.co_argcount == 2:
|
||||
new_node = callback(env, new_node)
|
||||
else:
|
||||
new_node = callback(new_node)
|
||||
if not new_node:
|
||||
break
|
||||
if new_node:
|
||||
new_sources.append(new_node)
|
||||
|
||||
@@ -351,6 +341,14 @@ def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
if env.get("PIOMAINPROG"):
|
||||
sys.stderr.write(
|
||||
"Error: The main program is already constructed and the inline "
|
||||
"source files are not allowed. Please use `env.BuildLibrary(...)` "
|
||||
"or PRE-type script instead."
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[
|
||||
@@ -371,6 +369,7 @@ def generate(env):
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(StringifyMacro)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(AddBuildMiddleware)
|
||||
50
platformio/builder/tools/piohooks.py
Normal file
50
platformio/builder/tools/piohooks.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
def AddActionWrapper(handler):
|
||||
def wraps(env, files, action):
|
||||
if not isinstance(files, (list, tuple, set)):
|
||||
files = [files]
|
||||
known_nodes = []
|
||||
unknown_files = []
|
||||
for item in files:
|
||||
nodes = env.arg2nodes(item, env.fs.Entry)
|
||||
if nodes and nodes[0].exists():
|
||||
known_nodes.extend(nodes)
|
||||
else:
|
||||
unknown_files.append(item)
|
||||
if unknown_files:
|
||||
env.Append(**{"_PIO_DELAYED_ACTIONS": [(handler, unknown_files, action)]})
|
||||
if known_nodes:
|
||||
return handler(known_nodes, action)
|
||||
return []
|
||||
|
||||
return wraps
|
||||
|
||||
|
||||
def ProcessDelayedActions(env):
|
||||
for func, nodes, action in env.get("_PIO_DELAYED_ACTIONS", []):
|
||||
func(nodes, action)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.Replace(**{"_PIO_DELAYED_ACTIONS": []})
|
||||
env.AddMethod(AddActionWrapper(env.AddPreAction), "AddPreAction")
|
||||
env.AddMethod(AddActionWrapper(env.AddPostAction), "AddPostAction")
|
||||
env.AddMethod(ProcessDelayedActions)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import glob
|
||||
import io
|
||||
@@ -26,7 +24,7 @@ import click
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
|
||||
|
||||
class InoToCPPConverter(object):
|
||||
class InoToCPPConverter:
|
||||
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os
|
||||
@@ -25,7 +24,7 @@ from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def IsIntegrationDump(_):
|
||||
return set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
return set(["__idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def DumpIntegrationIncludes(env):
|
||||
@@ -94,7 +93,7 @@ def dump_defines(env):
|
||||
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
defines.append(env.subst(item).replace('\\"', '"'))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
@@ -145,51 +144,40 @@ def _subst_cmd(env, cmd):
|
||||
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
|
||||
|
||||
|
||||
def DumpIntegrationData(env, globalenv):
|
||||
"""env here is `projenv`"""
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
|
||||
data = {
|
||||
"env_name": env["PIOENV"],
|
||||
"libsource_dirs": [env.subst(item) for item in env.GetLibSourceDirs()],
|
||||
"defines": dump_defines(env),
|
||||
"includes": env.DumpIntegrationIncludes(),
|
||||
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path": env.subst("$PROG_PATH"),
|
||||
"svd_path": dump_svd_path(env),
|
||||
"compiler_type": env.GetCompilerType(),
|
||||
"build_type": globalenv.GetBuildType(),
|
||||
"env_name": globalenv["PIOENV"],
|
||||
"libsource_dirs": [
|
||||
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _subst_cmd(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"cxx_path": where_is_program(
|
||||
globalenv.subst("$CXX"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"gdb_path": where_is_program(
|
||||
globalenv.subst("$GDB"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"prog_path": globalenv.subst("$PROGPATH"),
|
||||
"svd_path": dump_svd_path(globalenv),
|
||||
"compiler_type": globalenv.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
{"offset": item[0], "path": globalenv.subst(item[1])}
|
||||
for item in globalenv.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
data["extra"].update(
|
||||
env.get("INTEGRATION_EXTRA_DATA", env.get("IDE_EXTRA_DATA", {}))
|
||||
)
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in SCons.Defaults.processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
# export C/C++ build flags
|
||||
data.update(
|
||||
{
|
||||
"cc_flags": _subst_cmd(env_, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(env_, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
}
|
||||
)
|
||||
|
||||
for key in ("IDE_EXTRA_DATA", "INTEGRATION_EXTRA_DATA"):
|
||||
data["extra"].update(globalenv.get(key, {}))
|
||||
return data
|
||||
|
||||
|
||||
@@ -198,6 +186,8 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env["IDE_EXTRA_DATA"] = {} # legacy support
|
||||
env["INTEGRATION_EXTRA_DATA"] = {}
|
||||
env.AddMethod(IsIntegrationDump)
|
||||
env.AddMethod(DumpIntegrationIncludes)
|
||||
env.AddMethod(DumpIntegrationData)
|
||||
|
||||
@@ -12,11 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=no-self-use, unused-argument, too-many-lines
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
# pylint: disable=assignment-from-no-return
|
||||
|
||||
from __future__ import absolute_import
|
||||
# pylint: disable=assignment-from-no-return, unused-argument, too-many-lines
|
||||
|
||||
import hashlib
|
||||
import io
|
||||
@@ -29,8 +26,8 @@ import SCons.Scanner # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio import exception, fs
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.http import HTTPClientError, InternetIsOffline
|
||||
from platformio.package.exception import (
|
||||
@@ -42,11 +39,11 @@ from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class LibBuilderFactory(object):
|
||||
class LibBuilderFactory:
|
||||
@staticmethod
|
||||
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
|
||||
clsname = "UnknownLibBuilder"
|
||||
@@ -95,7 +92,7 @@ class LibBuilderFactory(object):
|
||||
return ["mbed"]
|
||||
for fname in files:
|
||||
if not fs.path_endswith_ext(
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||
fname, piobuild.SRC_BUILD_EXT + piobuild.SRC_HEADER_EXT
|
||||
):
|
||||
continue
|
||||
with io.open(
|
||||
@@ -144,7 +141,11 @@ class LibBuilderBase:
|
||||
|
||||
self._deps_are_processed = False
|
||||
self._circular_deps = []
|
||||
self._processed_files = []
|
||||
self._processed_search_files = []
|
||||
|
||||
# pass a macro to the projenv + libs
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
self.env.Append(CPPDEFINES=["PIO_UNIT_TESTING"])
|
||||
|
||||
# reset source filter, could be overridden with extra script
|
||||
self.env["SRC_FILTER"] = ""
|
||||
@@ -155,20 +156,27 @@ class LibBuilderBase:
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__, self.path)
|
||||
|
||||
def __contains__(self, path):
|
||||
p1 = self.path
|
||||
p2 = path
|
||||
def __contains__(self, child_path):
|
||||
return self.is_common_builder(self.path, child_path)
|
||||
|
||||
def is_common_builder(self, root_path, child_path):
|
||||
if IS_WINDOWS:
|
||||
p1 = p1.lower()
|
||||
p2 = p2.lower()
|
||||
if p1 == p2:
|
||||
root_path = root_path.lower()
|
||||
child_path = child_path.lower()
|
||||
if root_path == child_path:
|
||||
return True
|
||||
if os.path.commonprefix([p1 + os.path.sep, p2]) == p1 + os.path.sep:
|
||||
if (
|
||||
os.path.commonprefix([root_path + os.path.sep, child_path])
|
||||
== root_path + os.path.sep
|
||||
):
|
||||
return True
|
||||
# try to resolve paths
|
||||
p1 = os.path.os.path.realpath(p1)
|
||||
p2 = os.path.os.path.realpath(p2)
|
||||
return os.path.commonprefix([p1 + os.path.sep, p2]) == p1 + os.path.sep
|
||||
root_path = os.path.realpath(root_path)
|
||||
child_path = os.path.realpath(child_path)
|
||||
return (
|
||||
os.path.commonprefix([root_path + os.path.sep, child_path])
|
||||
== root_path + os.path.sep
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -189,7 +197,7 @@ class LibBuilderBase:
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return piotool.SRC_FILTER_DEFAULT + [
|
||||
return piobuild.SRC_FILTER_DEFAULT + [
|
||||
"-<example%s>" % os.sep,
|
||||
"-<examples%s>" % os.sep,
|
||||
"-<test%s>" % os.sep,
|
||||
@@ -318,21 +326,14 @@ class LibBuilderBase:
|
||||
)
|
||||
|
||||
def get_search_files(self):
|
||||
items = [
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
for item in self.env.MatchSourceFiles(self.src_dir, self.src_filter)
|
||||
]
|
||||
include_dir = self.include_dir
|
||||
if include_dir:
|
||||
items.extend(
|
||||
[
|
||||
os.path.join(include_dir, item)
|
||||
for item in self.env.MatchSourceFiles(include_dir)
|
||||
]
|
||||
for item in self.env.MatchSourceFiles(
|
||||
self.src_dir, self.src_filter, piobuild.SRC_BUILD_EXT
|
||||
)
|
||||
return items
|
||||
]
|
||||
|
||||
def _get_found_includes( # pylint: disable=too-many-branches
|
||||
def get_implicit_includes( # pylint: disable=too-many-branches
|
||||
self, search_files=None
|
||||
):
|
||||
# all include directories
|
||||
@@ -340,7 +341,7 @@ class LibBuilderBase:
|
||||
LibBuilderBase._INCLUDE_DIRS_CACHE = [
|
||||
self.env.Dir(d)
|
||||
for d in ProjectAsLibBuilder(
|
||||
self.envorigin, "$PROJECT_DIR"
|
||||
self.envorigin, "$PROJECT_DIR", export_projenv=False
|
||||
).get_include_dirs()
|
||||
]
|
||||
for lb in self.env.GetLibBuilders():
|
||||
@@ -353,53 +354,55 @@ class LibBuilderBase:
|
||||
include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE)
|
||||
|
||||
result = []
|
||||
for path in search_files or []:
|
||||
if path in self._processed_files:
|
||||
search_files = search_files or []
|
||||
while search_files:
|
||||
node = self.env.File(search_files.pop(0))
|
||||
if node.get_abspath() in self._processed_search_files:
|
||||
continue
|
||||
self._processed_files.append(path)
|
||||
self._processed_search_files.append(node.get_abspath())
|
||||
|
||||
try:
|
||||
assert "+" in self.lib_ldf_mode
|
||||
candidates = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
self.env.File(path),
|
||||
node,
|
||||
self.env,
|
||||
tuple(include_dirs),
|
||||
depth=self.CCONDITIONAL_SCANNER_DEPTH,
|
||||
)
|
||||
# mark candidates already processed via Conditional Scanner
|
||||
self._processed_files.extend(
|
||||
[
|
||||
c.get_abspath()
|
||||
for c in candidates
|
||||
if c.get_abspath() not in self._processed_files
|
||||
]
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if self.verbose and "+" in self.lib_ldf_mode:
|
||||
sys.stderr.write(
|
||||
"Warning! Classic Pre Processor is used for `%s`, "
|
||||
"advanced has failed with `%s`\n" % (path, e)
|
||||
"advanced has failed with `%s`\n" % (node.get_abspath(), exc)
|
||||
)
|
||||
candidates = LibBuilderBase.CLASSIC_SCANNER(
|
||||
self.env.File(path), self.env, tuple(include_dirs)
|
||||
node, self.env, tuple(include_dirs)
|
||||
)
|
||||
|
||||
# print(path, [c.get_abspath() for c in candidates])
|
||||
# print(node.get_abspath(), [c.get_abspath() for c in candidates])
|
||||
for item in candidates:
|
||||
item_path = item.get_abspath()
|
||||
# process internal files recursively
|
||||
if (
|
||||
item_path not in self._processed_search_files
|
||||
and item_path not in search_files
|
||||
and item_path in self
|
||||
):
|
||||
search_files.append(item_path)
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
continue
|
||||
_h_path = item.get_abspath()
|
||||
if not fs.path_endswith_ext(_h_path, piotool.SRC_HEADER_EXT):
|
||||
if not fs.path_endswith_ext(item_path, piobuild.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[: _h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT + piotool.SRC_CXX_EXT:
|
||||
if not os.path.isfile("%s.%s" % (_f_part, ext)):
|
||||
item_fname = item_path[: item_path.rindex(".")]
|
||||
for ext in piobuild.SRC_C_EXT + piobuild.SRC_CXX_EXT:
|
||||
if not os.path.isfile("%s.%s" % (item_fname, ext)):
|
||||
continue
|
||||
_c_path = self.env.File("%s.%s" % (_f_part, ext))
|
||||
if _c_path not in result:
|
||||
result.append(_c_path)
|
||||
item_c_node = self.env.File("%s.%s" % (item_fname, ext))
|
||||
if item_c_node not in result:
|
||||
result.append(item_c_node)
|
||||
|
||||
return result
|
||||
|
||||
@@ -414,12 +417,13 @@ class LibBuilderBase:
|
||||
search_files = self.get_search_files()
|
||||
|
||||
lib_inc_map = {}
|
||||
for inc in self._get_found_includes(search_files):
|
||||
for inc in self.get_implicit_includes(search_files):
|
||||
inc_path = inc.get_abspath()
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if inc.get_abspath() in lb:
|
||||
if inc_path in lb:
|
||||
if lb not in lib_inc_map:
|
||||
lib_inc_map[lb] = []
|
||||
lib_inc_map[lb].append(inc.get_abspath())
|
||||
lib_inc_map[lb].append(inc_path)
|
||||
break
|
||||
|
||||
for lb, lb_search_files in lib_inc_map.items():
|
||||
@@ -554,7 +558,7 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
|
||||
src_filter = []
|
||||
is_utility = os.path.isdir(os.path.join(self.path, "utility"))
|
||||
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
|
||||
for ext in piobuild.SRC_BUILD_EXT + piobuild.SRC_HEADER_EXT:
|
||||
# arduino ide ignores files with .asm or .ASM extensions
|
||||
if ext.lower() == "asm":
|
||||
continue
|
||||
@@ -585,10 +589,14 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
return "chain+"
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["arduino", "energia"])
|
||||
return PackageCompatibility(frameworks=frameworks).is_compatible(
|
||||
PackageCompatibility(frameworks=["arduino", "energia"])
|
||||
)
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
||||
return PackageCompatibility(platforms=platforms).is_compatible(
|
||||
PackageCompatibility(platforms=self._manifest.get("platforms"))
|
||||
)
|
||||
|
||||
@property
|
||||
def build_flags(self):
|
||||
@@ -643,7 +651,9 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
return include_dirs
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["mbed"])
|
||||
return PackageCompatibility(frameworks=frameworks).is_compatible(
|
||||
PackageCompatibility(frameworks=["mbed"])
|
||||
)
|
||||
|
||||
def process_extra_options(self):
|
||||
self._process_mbed_lib_confs()
|
||||
@@ -768,6 +778,24 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
return os.path.abspath(self._manifest.get("build").get("includeDir"))
|
||||
return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = super().get_include_dirs()
|
||||
|
||||
# backwards compatibility with PlatformIO 2.0
|
||||
if (
|
||||
"build" not in self._manifest
|
||||
and self._has_arduino_manifest()
|
||||
and not os.path.isdir(os.path.join(self.path, "src"))
|
||||
and os.path.isdir(os.path.join(self.path, "utility"))
|
||||
):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if "srcDir" in self._manifest.get("build", {}):
|
||||
@@ -838,36 +866,33 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
)
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
||||
return PackageCompatibility(platforms=platforms).is_compatible(
|
||||
PackageCompatibility(platforms=self._manifest.get("platforms"))
|
||||
)
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, self._manifest.get("frameworks") or ["*"])
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = super().get_include_dirs()
|
||||
|
||||
# backwards compatibility with PlatformIO 2.0
|
||||
if (
|
||||
"build" not in self._manifest
|
||||
and self._has_arduino_manifest()
|
||||
and not os.path.isdir(os.path.join(self.path, "src"))
|
||||
and os.path.isdir(os.path.join(self.path, "utility"))
|
||||
):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
return PackageCompatibility(frameworks=frameworks).is_compatible(
|
||||
PackageCompatibility(frameworks=self._manifest.get("frameworks"))
|
||||
)
|
||||
|
||||
|
||||
class ProjectAsLibBuilder(LibBuilderBase):
|
||||
def __init__(self, env, *args, **kwargs):
|
||||
export_projenv = kwargs.get("export_projenv", True)
|
||||
if "export_projenv" in kwargs:
|
||||
del kwargs["export_projenv"]
|
||||
# backup original value, will be reset in base.__init__
|
||||
project_src_filter = env.get("SRC_FILTER")
|
||||
super().__init__(env, *args, **kwargs)
|
||||
self.env["SRC_FILTER"] = project_src_filter
|
||||
if export_projenv:
|
||||
env.Export(dict(projenv=self.env))
|
||||
|
||||
def __contains__(self, child_path):
|
||||
for root_path in (self.include_dir, self.src_dir, self.test_dir):
|
||||
if root_path and self.is_common_builder(root_path, child_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
@@ -878,21 +903,18 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
def src_dir(self):
|
||||
return self.env.subst("$PROJECT_SRC_DIR")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = []
|
||||
project_include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
|
||||
if os.path.isdir(project_include_dir):
|
||||
include_dirs.append(project_include_dir)
|
||||
for include_dir in super().get_include_dirs():
|
||||
if include_dir not in include_dirs:
|
||||
include_dirs.append(include_dir)
|
||||
return include_dirs
|
||||
@property
|
||||
def test_dir(self):
|
||||
return self.env.subst("$PROJECT_TEST_DIR")
|
||||
|
||||
def get_search_files(self):
|
||||
items = []
|
||||
build_type = self.env["BUILD_TYPE"]
|
||||
# project files
|
||||
items = super().get_search_files()
|
||||
if "test" not in build_type or self.env.GetProjectOption("test_build_src"):
|
||||
items.extend(super().get_search_files())
|
||||
# test files
|
||||
if "test" in self.env.GetBuildType():
|
||||
if "test" in build_type:
|
||||
items.extend(
|
||||
[
|
||||
os.path.join("$PROJECT_TEST_DIR", item)
|
||||
@@ -960,8 +982,8 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
try:
|
||||
lm.install(spec)
|
||||
did_install = True
|
||||
except (HTTPClientError, UnknownPackageError, InternetIsOffline) as e:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
except (HTTPClientError, UnknownPackageError, InternetIsOffline) as exc:
|
||||
click.secho("Warning! %s" % exc, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
if did_install:
|
||||
@@ -1030,7 +1052,7 @@ def IsCompatibleLibBuilder(env, lb, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)))
|
||||
sys.stderr.write("Platform incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
if compat_mode in ("soft", "strict") and not lb.is_frameworks_compatible(
|
||||
env.get("PIOFRAMEWORK", [])
|
||||
env.get("PIOFRAMEWORK", "__noframework__")
|
||||
):
|
||||
if verbose:
|
||||
sys.stderr.write("Framework incompatible library %s\n" % lb.path)
|
||||
@@ -1139,6 +1161,10 @@ def ConfigureProjectLibBuilder(env):
|
||||
_print_deps_tree(lb, level + 1)
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
project.env.ConfigureTestTarget()
|
||||
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project) # pylint: disable=no-member
|
||||
|
||||
click.echo("LDF: Library Dependency Finder -> https://bit.ly/configure-pio-ldf")
|
||||
|
||||
@@ -12,13 +12,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
@@ -70,11 +69,13 @@ def _file_long_data(env, data):
|
||||
return tmp_file
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
def exists(env):
|
||||
return "compiledb" not in COMMAND_LINE_TARGETS and not env.IsIntegrationDump()
|
||||
|
||||
|
||||
def generate(env):
|
||||
if not exists(env):
|
||||
return env
|
||||
kwargs = dict(
|
||||
_long_sources_hook=long_sources_hook,
|
||||
TEMPFILE=TempFileMunge,
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -51,8 +49,8 @@ def BoardConfig(env, board=None):
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
return p.board_config(board)
|
||||
except (AssertionError, UnknownBoard) as e:
|
||||
sys.stderr.write("Error: %s\n" % str(e))
|
||||
except (AssertionError, UnknownBoard) as exc:
|
||||
sys.stderr.write("Error: %s\n" % str(exc))
|
||||
env.Exit(1)
|
||||
return None
|
||||
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from platformio.compat import MISSING
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import environ, makedirs, remove
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from SCons.Action import Action # pylint: disable=import-error
|
||||
@@ -43,26 +41,18 @@ def PioClean(env, clean_all=False):
|
||||
|
||||
def _clean_dir(path):
|
||||
clean_rel_path = _relpath(path)
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
dst = os.path.join(root, f)
|
||||
os.remove(dst)
|
||||
print(
|
||||
"Removed %s"
|
||||
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
|
||||
)
|
||||
print(f"Removing {clean_rel_path}")
|
||||
fs.rmtree(path)
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
libdeps_dir = env.subst("$PROJECT_LIBDEPS_DIR")
|
||||
libdeps_dir = env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
|
||||
if os.path.isdir(build_dir):
|
||||
_clean_dir(build_dir)
|
||||
fs.rmtree(build_dir)
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
|
||||
if clean_all and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
fs.rmtree(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
|
||||
@@ -104,19 +94,6 @@ def DumpTargets(env):
|
||||
t["group"] == "Platform" for t in targets.values()
|
||||
):
|
||||
targets["upload"] = dict(name="upload", group="Platform", title="Upload")
|
||||
targets["compiledb"] = dict(
|
||||
name="compiledb",
|
||||
title="Compilation Database",
|
||||
description="Generate compilation database `compile_commands.json`",
|
||||
group="Advanced",
|
||||
)
|
||||
targets["clean"] = dict(name="clean", title="Clean", group="General")
|
||||
targets["cleanall"] = dict(
|
||||
name="cleanall",
|
||||
title="Clean All",
|
||||
group="General",
|
||||
description="Clean a build environment and installed library dependencies",
|
||||
)
|
||||
return list(targets.values())
|
||||
|
||||
|
||||
|
||||
@@ -12,19 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.test.result import TestSuite
|
||||
from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
|
||||
def ConfigureTestTarget(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "PIO_UNIT_TESTING"],
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piotool.SRC_BUILD_EXT],
|
||||
CPPDEFINES=["UNIT_TEST"], # deprecated, use PIO_UNIT_TESTING
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piobuild.SRC_BUILD_EXT],
|
||||
)
|
||||
env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"])
|
||||
|
||||
@@ -38,7 +36,7 @@ def ConfigureTestTarget(env):
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[
|
||||
f"+<{test_name}{os.path.sep}*.{ext}>"
|
||||
for ext in piotool.SRC_BUILD_EXT
|
||||
for ext in piobuild.SRC_BUILD_EXT
|
||||
],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)],
|
||||
)
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -26,8 +24,8 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import find_mbed_disk, find_serial_port, is_pattern_port
|
||||
from platformio.device.list import list_serial_ports
|
||||
from platformio.device.finder import SerialPortFinder, find_mbed_disk, is_pattern_port
|
||||
from platformio.device.list.util import list_serial_ports
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
@@ -109,14 +107,15 @@ def AutodetectUploadPort(*args, **kwargs):
|
||||
else:
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
sys.stderr.write("\n%s\n\n" % e)
|
||||
except exception.InvalidUdevRules as exc:
|
||||
sys.stderr.write("\n%s\n\n" % exc)
|
||||
env.Replace(
|
||||
UPLOAD_PORT=find_serial_port(
|
||||
initial_port=initial_port,
|
||||
UPLOAD_PORT=SerialPortFinder(
|
||||
board_config=env.BoardConfig() if "BOARD" in env else None,
|
||||
upload_protocol=upload_protocol,
|
||||
)
|
||||
prefer_gdb_port="blackmagic" in upload_protocol,
|
||||
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)),
|
||||
).find(initial_port)
|
||||
)
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
|
||||
@@ -23,7 +23,7 @@ from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class ContentCache(object):
|
||||
class ContentCache:
|
||||
def __init__(self, namespace=None):
|
||||
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
|
||||
self._db_path = os.path.join(self.cache_dir, "db.data")
|
||||
|
||||
@@ -198,7 +198,7 @@ def print_processing_header(tool, envname, envdump):
|
||||
"Checking %s > %s (%s)"
|
||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||
)
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from platformio.project.helpers import get_project_dir
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class DefectItem(object):
|
||||
class DefectItem:
|
||||
|
||||
SEVERITY_HIGH = 1
|
||||
SEVERITY_MEDIUM = 2
|
||||
|
||||
@@ -18,7 +18,7 @@ from platformio.check.tools.cppcheck import CppcheckCheckTool
|
||||
from platformio.check.tools.pvsstudio import PvsStudioCheckTool
|
||||
|
||||
|
||||
class CheckToolFactory(object):
|
||||
class CheckToolFactory:
|
||||
@staticmethod
|
||||
def new(tool, project_dir, config, envname, options):
|
||||
cls = None
|
||||
|
||||
@@ -25,7 +25,7 @@ from platformio.package.meta import PackageSpec
|
||||
from platformio.project.helpers import load_build_metadata
|
||||
|
||||
|
||||
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, project_dir, config, envname, options):
|
||||
self.config = config
|
||||
self.envname = envname
|
||||
|
||||
@@ -67,8 +67,8 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
|
||||
src_files = []
|
||||
for scope in project_files:
|
||||
src_files.extend(project_files[scope])
|
||||
for items in project_files.values():
|
||||
src_files.extend(items)
|
||||
|
||||
cmd.extend(flags + src_files + ["--"])
|
||||
cmd.extend(
|
||||
|
||||
@@ -41,7 +41,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
grpboards[board["platform"]] = []
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
|
||||
@@ -19,7 +19,7 @@ import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, fs
|
||||
from platformio import fs
|
||||
from platformio.exception import CIBuildEnvsEmpty
|
||||
from platformio.project.commands.init import project_init_cmd, validate_boards
|
||||
from platformio.project.config import ProjectConfig
|
||||
@@ -39,8 +39,8 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
try:
|
||||
assert invalid_path is None
|
||||
return value
|
||||
except AssertionError:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path)
|
||||
except AssertionError as exc:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path) from exc
|
||||
|
||||
|
||||
@click.command("ci", short_help="Continuous Integration")
|
||||
@@ -62,6 +62,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
),
|
||||
)
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
@@ -74,17 +75,15 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
keep_build_dir,
|
||||
project_conf,
|
||||
project_option,
|
||||
environments,
|
||||
verbose,
|
||||
):
|
||||
|
||||
if not src and os.getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src:
|
||||
raise click.BadParameter("Missing argument 'src'")
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
|
||||
if not keep_build_dir and os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not os.path.isdir(build_dir):
|
||||
@@ -115,7 +114,9 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
)
|
||||
|
||||
# process project
|
||||
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
||||
ctx.invoke(
|
||||
cmd_run, project_dir=build_dir, environment=environments, verbose=verbose
|
||||
)
|
||||
finally:
|
||||
if not keep_build_dir:
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from platformio.device.filters.base import (
|
||||
from platformio.device.monitor.filters.base import (
|
||||
DeviceMonitorFilterBase as DeviceMonitorFilter,
|
||||
)
|
||||
|
||||
@@ -17,16 +17,18 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from urllib.parse import quote
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio import exception, fs
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
|
||||
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.list import package_list_cmd
|
||||
from platformio.package.commands.search import package_search_cmd
|
||||
from platformio.package.commands.show import package_show_cmd
|
||||
from platformio.package.commands.uninstall import package_uninstall_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.exception import NotGlobalLibDir
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.proc import is_ci
|
||||
@@ -43,6 +45,20 @@ def get_project_global_lib_dir():
|
||||
return ProjectConfig.get_instance().get("platformio", "globallib_dir")
|
||||
|
||||
|
||||
def invoke_command(ctx, cmd, **kwargs):
|
||||
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
|
||||
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
|
||||
for input_dir in input_dirs:
|
||||
cmd_kwargs = kwargs.copy()
|
||||
if is_platformio_project(input_dir):
|
||||
cmd_kwargs["project_dir"] = input_dir
|
||||
cmd_kwargs["environments"] = project_environments
|
||||
else:
|
||||
cmd_kwargs["global"] = True
|
||||
cmd_kwargs["storage_dir"] = input_dir
|
||||
ctx.invoke(cmd, **cmd_kwargs)
|
||||
|
||||
|
||||
@click.group(short_help="Library manager", hidden=True)
|
||||
@click.option(
|
||||
"-d",
|
||||
@@ -146,55 +162,14 @@ def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
"the next releases. \nPlease use `pio pkg install` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
|
||||
|
||||
installed_pkgs = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not silent and (libraries or storage_dir in storage_libdeps):
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
|
||||
if libraries:
|
||||
installed_pkgs = {
|
||||
library: lm.install(library, force=force) for library in libraries
|
||||
}
|
||||
|
||||
elif storage_dir in storage_libdeps:
|
||||
for library in storage_libdeps[storage_dir]:
|
||||
lm.install(library, force=force)
|
||||
|
||||
if save and installed_pkgs:
|
||||
_save_deps(ctx, installed_pkgs)
|
||||
|
||||
|
||||
def _save_deps(ctx, pkgs, action="add"):
|
||||
specs = []
|
||||
for library, pkg in pkgs.items():
|
||||
spec = PackageSpec(library)
|
||||
if spec.external:
|
||||
specs.append(spec)
|
||||
else:
|
||||
specs.append(
|
||||
PackageSpec(
|
||||
owner=pkg.metadata.spec.owner,
|
||||
name=pkg.metadata.spec.name,
|
||||
requirements=spec.requirements
|
||||
or (
|
||||
("^%s" % pkg.metadata.version)
|
||||
if not pkg.metadata.version.build
|
||||
else pkg.metadata.version
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
|
||||
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
|
||||
for input_dir in input_dirs:
|
||||
if not is_platformio_project(input_dir):
|
||||
continue
|
||||
save_project_libdeps(input_dir, specs, project_environments, action=action)
|
||||
return invoke_command(
|
||||
ctx,
|
||||
package_install_cmd,
|
||||
libraries=libraries,
|
||||
no_save=not save,
|
||||
force=force,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Remove libraries")
|
||||
@@ -214,16 +189,13 @@ def lib_uninstall(ctx, libraries, save, silent):
|
||||
"the next releases. \nPlease use `pio pkg uninstall` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
uninstalled_pkgs = {}
|
||||
for storage_dir in storage_dirs:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
uninstalled_pkgs = {library: lm.uninstall(library) for library in libraries}
|
||||
|
||||
if save and uninstalled_pkgs:
|
||||
_save_deps(ctx, uninstalled_pkgs, action="remove")
|
||||
invoke_command(
|
||||
ctx,
|
||||
package_uninstall_cmd,
|
||||
libraries=libraries,
|
||||
no_save=not save,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed libraries")
|
||||
@@ -255,60 +227,51 @@ def lib_update( # pylint: disable=too-many-arguments
|
||||
"the next releases. \nPlease use `pio pkg update` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(
|
||||
ctx,
|
||||
package_update_cmd,
|
||||
libraries=libraries,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not json_output:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lib_deps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, {}).get(storage_dir, [])
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
_libraries = libraries or lib_deps or lm.get_installed()
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in _libraries:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(library, PackageItem):
|
||||
pkg = library
|
||||
else:
|
||||
spec = PackageSpec(library)
|
||||
pkg = lm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = lm.outdated(pkg, spec)
|
||||
if not outdated.is_outdated(allow_incompatible=True):
|
||||
continue
|
||||
manifest = lm.legacy_load_manifest(pkg)
|
||||
manifest["versionWanted"] = (
|
||||
str(outdated.wanted) if outdated.wanted else None
|
||||
)
|
||||
manifest["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(manifest)
|
||||
json_result[storage_dir] = result
|
||||
else:
|
||||
for library in _libraries:
|
||||
to_spec = (
|
||||
None if isinstance(library, PackageItem) else PackageSpec(library)
|
||||
)
|
||||
try:
|
||||
lm.update(library, to_spec=to_spec)
|
||||
except UnknownPackageError as e:
|
||||
if library not in lib_deps:
|
||||
raise e
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
result = []
|
||||
for library in _libraries:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(library, PackageItem):
|
||||
pkg = library
|
||||
else:
|
||||
spec = PackageSpec(library)
|
||||
pkg = lm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = lm.outdated(pkg, spec)
|
||||
if not outdated.is_outdated(allow_incompatible=True):
|
||||
continue
|
||||
manifest = lm.legacy_load_manifest(pkg)
|
||||
manifest["versionWanted"] = (
|
||||
str(outdated.wanted) if outdated.wanted else None
|
||||
)
|
||||
)
|
||||
manifest["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(manifest)
|
||||
|
||||
return True
|
||||
json_result[storage_dir] = result
|
||||
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@@ -321,29 +284,18 @@ def lib_list(ctx, json_output):
|
||||
"the next releases. \nPlease use `pio pkg list` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(ctx, package_list_cmd, only_libraries=True)
|
||||
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not json_output:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
items = lm.legacy_get_installed()
|
||||
if json_output:
|
||||
json_result[storage_dir] = items
|
||||
elif items:
|
||||
for item in sorted(items, key=lambda i: i["name"]):
|
||||
print_lib_item(item)
|
||||
else:
|
||||
click.echo("No items found")
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
json_result[storage_dir] = lm.legacy_get_installed()
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
|
||||
return True
|
||||
)
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for a library")
|
||||
@@ -363,14 +315,10 @@ def lib_list(ctx, json_output):
|
||||
is_flag=True,
|
||||
help="Do not prompt, automatically paginate with delay",
|
||||
)
|
||||
def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
@click.pass_context
|
||||
def lib_search( # pylint: disable=unused-argument
|
||||
ctx, query, json_output, page, noninteractive, **filters
|
||||
):
|
||||
if not query:
|
||||
query = []
|
||||
if not isinstance(query, list):
|
||||
@@ -380,72 +328,30 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
for value in values:
|
||||
query.append('%s:"%s"' % (key, value))
|
||||
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
query.append("type:library")
|
||||
return ctx.invoke(package_search_cmd, query=" ".join(query), page=page)
|
||||
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data(
|
||||
"get",
|
||||
"/v2/lib/search",
|
||||
params=dict(query=" ".join(query), page=page),
|
||||
x_cache_valid="1d",
|
||||
)
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(result))
|
||||
return
|
||||
|
||||
if result["total"] == 0:
|
||||
click.secho(
|
||||
"Nothing has been found by your request\n"
|
||||
"Try a less-specific search or use truncation (or wildcard) "
|
||||
"operator",
|
||||
fg="yellow",
|
||||
nl=False,
|
||||
)
|
||||
click.secho(" *", fg="green")
|
||||
click.secho("For example: DS*, PCA*, DHT* and etc.\n", fg="yellow")
|
||||
click.echo(
|
||||
"For more examples and advanced search syntax, please use documentation:"
|
||||
)
|
||||
click.secho(
|
||||
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
fg="cyan",
|
||||
)
|
||||
return
|
||||
|
||||
click.secho(
|
||||
"Found %d libraries:\n" % result["total"],
|
||||
fg="green" if result["total"] else "yellow",
|
||||
)
|
||||
|
||||
while True:
|
||||
for item in result["items"]:
|
||||
print_lib_item(item)
|
||||
|
||||
if int(result["page"]) * int(result["perpage"]) >= int(result["total"]):
|
||||
break
|
||||
|
||||
if noninteractive:
|
||||
click.echo()
|
||||
click.secho(
|
||||
"Loading next %d libraries... Press Ctrl+C to stop!"
|
||||
% result["perpage"],
|
||||
fg="yellow",
|
||||
)
|
||||
click.echo()
|
||||
time.sleep(5)
|
||||
elif not click.confirm("Show next libraries?"):
|
||||
break
|
||||
result = regclient.fetch_json_data(
|
||||
"get",
|
||||
"/v2/lib/search",
|
||||
params=dict(query=" ".join(query), page=int(result["page"]) + 1),
|
||||
x_cache_valid="1d",
|
||||
)
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
|
||||
@cli.command("builtin", short_help="List built-in libraries")
|
||||
@click.option("--storage", multiple=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_builtin(storage, json_output):
|
||||
items = get_builtin_libs(storage)
|
||||
items = LibraryPackageManager.get_builtin_libs(storage)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
@@ -465,13 +371,16 @@ def lib_builtin(storage, json_output):
|
||||
@cli.command("show", short_help="Show detailed info about a library")
|
||||
@click.argument("library", metavar="[LIBRARY]")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_show(library, json_output):
|
||||
@click.pass_context
|
||||
def lib_show(ctx, library, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg show` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_show_cmd, pkg_type="library", spec=library)
|
||||
|
||||
lm = LibraryPackageManager()
|
||||
lm.set_log_level(logging.ERROR if json_output else logging.DEBUG)
|
||||
lib_id = lm.reveal_registry_package_id(library)
|
||||
@@ -479,86 +388,7 @@ def lib_show(library, json_output):
|
||||
lib = regclient.fetch_json_data(
|
||||
"get", "/v2/lib/info/%d" % lib_id, x_cache_valid="1h"
|
||||
)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
title = "{ownername}/{name}".format(**lib)
|
||||
click.secho(title, fg="cyan")
|
||||
click.echo("=" * len(title))
|
||||
click.echo(lib["description"])
|
||||
click.echo()
|
||||
|
||||
click.secho("ID: %d" % lib["id"])
|
||||
click.echo(
|
||||
"Version: %s, released %s"
|
||||
% (
|
||||
lib["version"]["name"],
|
||||
util.parse_datetime(lib["version"]["released"]).strftime("%c"),
|
||||
)
|
||||
)
|
||||
click.echo("Manifest: %s" % lib["confurl"])
|
||||
for key in ("homepage", "repository", "license"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
if isinstance(lib[key], list):
|
||||
click.echo("%s: %s" % (key.capitalize(), ", ".join(lib[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.capitalize(), lib[key]))
|
||||
|
||||
blocks = []
|
||||
|
||||
_authors = []
|
||||
for author in lib.get("authors", []):
|
||||
_data = []
|
||||
for key in ("name", "email", "url", "maintainer"):
|
||||
if not author.get(key):
|
||||
continue
|
||||
if key == "email":
|
||||
_data.append("<%s>" % author[key])
|
||||
elif key == "maintainer":
|
||||
_data.append("(maintainer)")
|
||||
else:
|
||||
_data.append(author[key])
|
||||
_authors.append(" ".join(_data))
|
||||
if _authors:
|
||||
blocks.append(("Authors", _authors))
|
||||
|
||||
blocks.append(("Keywords", lib["keywords"]))
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
blocks.append(("Compatible %s" % key, [i["title"] for i in lib[key]]))
|
||||
blocks.append(("Headers", lib["headers"]))
|
||||
blocks.append(("Examples", lib["examples"]))
|
||||
blocks.append(
|
||||
(
|
||||
"Versions",
|
||||
[
|
||||
"%s, released %s"
|
||||
% (v["name"], util.parse_datetime(v["released"]).strftime("%c"))
|
||||
for v in lib["versions"]
|
||||
],
|
||||
)
|
||||
)
|
||||
blocks.append(
|
||||
(
|
||||
"Unique Downloads",
|
||||
[
|
||||
"Today: %s" % lib["dlstats"]["day"],
|
||||
"Week: %s" % lib["dlstats"]["week"],
|
||||
"Month: %s" % lib["dlstats"]["month"],
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
for (title, rows) in blocks:
|
||||
click.echo()
|
||||
click.secho(title, bold=True)
|
||||
click.echo("-" * len(title))
|
||||
for row in rows:
|
||||
click.echo(row)
|
||||
|
||||
return True
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
|
||||
@cli.command("register", short_help="Deprecated")
|
||||
@@ -572,76 +402,18 @@ def lib_register(config_url): # pylint: disable=unused-argument
|
||||
@cli.command("stats", short_help="Library Registry Statistics")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_stats(json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease visit "
|
||||
"https://registry.platformio.org\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return None
|
||||
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data("get", "/v2/lib/stats", x_cache_valid="1h")
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
for key in ("updated", "added"):
|
||||
tabular_data = [
|
||||
(
|
||||
click.style(item["name"], fg="cyan"),
|
||||
util.parse_datetime(item["date"]).strftime("%c"),
|
||||
"https://platformio.org/lib/show/%s/%s"
|
||||
% (item["id"], quote(item["name"])),
|
||||
)
|
||||
for item in result.get(key, [])
|
||||
]
|
||||
table = tabulate(
|
||||
tabular_data,
|
||||
headers=[click.style("RECENTLY " + key.upper(), bold=True), "Date", "URL"],
|
||||
)
|
||||
click.echo(table)
|
||||
click.echo()
|
||||
|
||||
for key in ("lastkeywords", "topkeywords"):
|
||||
tabular_data = [
|
||||
(
|
||||
click.style(name, fg="cyan"),
|
||||
"https://platformio.org/lib/search?query=" + quote("keyword:%s" % name),
|
||||
)
|
||||
for name in result.get(key, [])
|
||||
]
|
||||
table = tabulate(
|
||||
tabular_data,
|
||||
headers=[
|
||||
click.style(
|
||||
("RECENT" if key == "lastkeywords" else "POPULAR") + " KEYWORDS",
|
||||
bold=True,
|
||||
),
|
||||
"URL",
|
||||
],
|
||||
)
|
||||
click.echo(table)
|
||||
click.echo()
|
||||
|
||||
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth", "Month")):
|
||||
tabular_data = [
|
||||
(
|
||||
click.style(item["name"], fg="cyan"),
|
||||
"https://platformio.org/lib/show/%s/%s"
|
||||
% (item["id"], quote(item["name"])),
|
||||
)
|
||||
for item in result.get(key, [])
|
||||
]
|
||||
table = tabulate(
|
||||
tabular_data,
|
||||
headers=[click.style("FEATURED: " + title.upper(), bold=True), "URL"],
|
||||
)
|
||||
click.echo(table)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def print_storage_header(storage_dirs, storage_dir):
|
||||
if storage_dirs and storage_dirs[0] != storage_dir:
|
||||
click.echo("")
|
||||
click.echo(
|
||||
click.style("Library Storage: ", bold=True)
|
||||
+ click.style(storage_dir, fg="blue")
|
||||
)
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
|
||||
def print_lib_item(item):
|
||||
@@ -1,104 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import ci_strings_are_equal
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import InvalidProjectConfError
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def get_builtin_libs(storage_names=None):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
|
||||
items = []
|
||||
storage_names = storage_names or []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
p = PlatformFactory.new(pkg)
|
||||
for storage in p.get_lib_storages():
|
||||
if storage_names and storage["name"] not in storage_names:
|
||||
continue
|
||||
lm = LibraryPackageManager(storage["path"])
|
||||
items.append(
|
||||
{
|
||||
"name": storage["name"],
|
||||
"path": storage["path"],
|
||||
"items": lm.legacy_get_installed(),
|
||||
}
|
||||
)
|
||||
return items
|
||||
|
||||
|
||||
def is_builtin_lib(name):
|
||||
for storage in get_builtin_libs():
|
||||
for lib in storage["items"]:
|
||||
if lib.get("name") == name:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def ignore_deps_by_specs(deps, specs):
|
||||
result = []
|
||||
for dep in deps:
|
||||
depspec = PackageSpec(dep)
|
||||
if depspec.external:
|
||||
result.append(dep)
|
||||
continue
|
||||
ignore_conditions = []
|
||||
for spec in specs:
|
||||
if depspec.owner:
|
||||
ignore_conditions.append(
|
||||
ci_strings_are_equal(depspec.owner, spec.owner)
|
||||
and ci_strings_are_equal(depspec.name, spec.name)
|
||||
)
|
||||
else:
|
||||
ignore_conditions.append(ci_strings_are_equal(depspec.name, spec.name))
|
||||
if not any(ignore_conditions):
|
||||
result.append(dep)
|
||||
return result
|
||||
|
||||
|
||||
def save_project_libdeps(project_dir, specs, environments=None, action="add"):
|
||||
config = ProjectConfig.get_instance(os.path.join(project_dir, "platformio.ini"))
|
||||
config.validate(environments)
|
||||
for env in config.envs():
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
candidates = []
|
||||
try:
|
||||
candidates = ignore_deps_by_specs(
|
||||
config.get("env:" + env, "lib_deps"), specs
|
||||
)
|
||||
except InvalidProjectConfError:
|
||||
pass
|
||||
if action == "add":
|
||||
candidates.extend(spec.as_dependency() for spec in specs)
|
||||
if candidates:
|
||||
result = []
|
||||
for item in candidates:
|
||||
item = item.strip()
|
||||
if item and item not in result:
|
||||
result.append(item)
|
||||
config.set("env:" + env, "lib_deps", result)
|
||||
elif config.has_option("env:" + env, "lib_deps"):
|
||||
config.remove_option("env:" + env, "lib_deps")
|
||||
config.save()
|
||||
@@ -18,9 +18,13 @@ import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.commands.boards import print_boards
|
||||
from platformio.exception import UserSideException
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.list import package_list_cmd
|
||||
from platformio.package.commands.search import package_search_cmd
|
||||
from platformio.package.commands.show import package_show_cmd
|
||||
from platformio.package.commands.uninstall import package_uninstall_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.package.version import get_original_version
|
||||
@@ -36,13 +40,17 @@ def cli():
|
||||
@cli.command("search", short_help="Search for development platform")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_search(query, json_output):
|
||||
@click.pass_context
|
||||
def platform_search(ctx, query, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
query = query or ""
|
||||
return ctx.invoke(package_search_cmd, query=f"type:platform {query}".strip())
|
||||
|
||||
platforms = []
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
@@ -55,17 +63,23 @@ def platform_search(query, json_output):
|
||||
platform["name"], with_boards=False, expose_packages=False
|
||||
)
|
||||
)
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
click.echo(json.dumps(platforms))
|
||||
return None
|
||||
|
||||
|
||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_frameworks(query, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease visit https://docs.platformio.org"
|
||||
"/en/latest/frameworks/index.html\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return
|
||||
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
frameworks = []
|
||||
for framework in regclient.fetch_json_data(
|
||||
@@ -85,21 +99,21 @@ def platform_frameworks(query, json_output):
|
||||
frameworks.append(framework)
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
||||
if json_output:
|
||||
click.echo(json.dumps(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
click.echo(json.dumps(frameworks))
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_list(json_output):
|
||||
@click.pass_context
|
||||
def platform_list(ctx, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg list` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_list_cmd, **{"global": True, "only_platforms": True})
|
||||
|
||||
platforms = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
@@ -108,81 +122,27 @@ def platform_list(json_output):
|
||||
)
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
click.echo(json.dumps(platforms))
|
||||
return None
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about development platform")
|
||||
@click.argument("platform")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
@click.pass_context
|
||||
def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branches
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg show` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_show_cmd, pkg_type="platform", spec=platform)
|
||||
|
||||
data = _get_platform_data(platform)
|
||||
if not data:
|
||||
raise UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
|
||||
click.echo(
|
||||
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
|
||||
)
|
||||
click.echo("=" * (3 + len(dep + data["title"])))
|
||||
click.echo(data["description"])
|
||||
click.echo()
|
||||
if "version" in data:
|
||||
click.echo("Version: %s" % data["version"])
|
||||
if data["homepage"]:
|
||||
click.echo("Home: %s" % data["homepage"])
|
||||
if data["repository"]:
|
||||
click.echo("Repository: %s" % data["repository"])
|
||||
if data["url"]:
|
||||
click.echo("Vendor: %s" % data["url"])
|
||||
if data["license"]:
|
||||
click.echo("License: %s" % data["license"])
|
||||
if data["frameworks"]:
|
||||
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
|
||||
|
||||
if not data["packages"]:
|
||||
return None
|
||||
|
||||
if not isinstance(data["packages"][0], dict):
|
||||
click.echo("Packages: %s" % ", ".join(data["packages"]))
|
||||
else:
|
||||
click.echo()
|
||||
click.secho("Packages", bold=True)
|
||||
click.echo("--------")
|
||||
for item in data["packages"]:
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
|
||||
click.echo("-" * (8 + len(item["name"])))
|
||||
if item["type"]:
|
||||
click.echo("Type: %s" % item["type"])
|
||||
click.echo("Requirements: %s" % item["requirements"])
|
||||
click.echo(
|
||||
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
|
||||
)
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item["version"])
|
||||
if "originalVersion" in item:
|
||||
click.echo("Original version: %s" % item["originalVersion"])
|
||||
if "description" in item:
|
||||
click.echo("Description: %s" % item["description"])
|
||||
|
||||
if data["boards"]:
|
||||
click.echo()
|
||||
click.secho("Boards", bold=True)
|
||||
click.echo("------")
|
||||
print_boards(data["boards"])
|
||||
|
||||
return True
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install new development platform")
|
||||
@@ -198,7 +158,9 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-locals
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
without_package,
|
||||
@@ -212,76 +174,37 @@ def platform_install( # pylint: disable=too-many-arguments,too-many-locals
|
||||
"the next releases. \nPlease use `pio pkg install` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
def _find_pkg_names(p, candidates):
|
||||
result = []
|
||||
for candidate in candidates:
|
||||
found = False
|
||||
# lookup by package types
|
||||
for _name, _opts in p.packages.items():
|
||||
if _opts.get("type") == candidate:
|
||||
result.append(_name)
|
||||
found = True
|
||||
if (
|
||||
p.frameworks
|
||||
and candidate.startswith("framework-")
|
||||
and candidate[10:] in p.frameworks
|
||||
):
|
||||
result.append(p.frameworks[candidate[10:]]["package"])
|
||||
found = True
|
||||
if not found:
|
||||
result.append(candidate)
|
||||
return result
|
||||
|
||||
pm = PlatformPackageManager()
|
||||
pm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
for platform in platforms:
|
||||
if with_package or without_package or with_all_packages:
|
||||
pkg = pm.install(platform, skip_dependencies=True)
|
||||
p = PlatformFactory.new(pkg)
|
||||
if with_all_packages:
|
||||
with_package = list(p.packages)
|
||||
with_package = set(_find_pkg_names(p, with_package or []))
|
||||
without_package = set(_find_pkg_names(p, without_package or []))
|
||||
upkgs = with_package | without_package
|
||||
ppkgs = set(p.packages)
|
||||
if not upkgs.issubset(ppkgs):
|
||||
raise UnknownPackageError(", ".join(upkgs - ppkgs))
|
||||
for name, options in p.packages.items():
|
||||
if name in without_package:
|
||||
continue
|
||||
if name in with_package or not (
|
||||
skip_default_package or options.get("optional", False)
|
||||
):
|
||||
p.pm.install(p.get_package_spec(name), force=force)
|
||||
else:
|
||||
pkg = pm.install(platform, skip_dependencies=skip_default_package)
|
||||
|
||||
if pkg and not silent:
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of the packages will be installed later "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green",
|
||||
)
|
||||
ctx.invoke(
|
||||
package_install_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
"skip_dependencies": (
|
||||
not with_all_packages
|
||||
and (with_package or without_package or skip_default_package)
|
||||
),
|
||||
"silent": silent,
|
||||
"force": force,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
def platform_uninstall(platforms):
|
||||
@click.pass_context
|
||||
def platform_uninstall(ctx, platforms):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg uninstall` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
pm = PlatformPackageManager()
|
||||
pm.set_log_level(logging.DEBUG)
|
||||
for platform in platforms:
|
||||
if pm.uninstall(platform):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully removed!" % platform,
|
||||
fg="green",
|
||||
)
|
||||
ctx.invoke(
|
||||
package_uninstall_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed development platforms")
|
||||
@@ -300,9 +223,12 @@ def platform_uninstall(platforms):
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
platforms, only_check, dry_run, silent, json_output, **_
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
||||
if only_check and not json_output:
|
||||
raise UserSideException(
|
||||
"This command is deprecated, please use `pio pkg outdated` instead"
|
||||
@@ -314,54 +240,42 @@ def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
"the next releases. \nPlease use `pio pkg update` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(
|
||||
package_update_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
"silent": silent,
|
||||
},
|
||||
)
|
||||
|
||||
pm = PlatformPackageManager()
|
||||
pm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
platforms = platforms or pm.get_installed()
|
||||
only_check = dry_run or only_check
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(platform, PackageItem):
|
||||
pkg = platform
|
||||
else:
|
||||
spec = PackageSpec(platform)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = pm.outdated(pkg, spec)
|
||||
if (
|
||||
not outdated.is_outdated(allow_incompatible=True)
|
||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||
):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg, with_boards=False, expose_packages=False
|
||||
)
|
||||
if outdated.is_outdated(allow_incompatible=True):
|
||||
data["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(data)
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
result = []
|
||||
for platform in platforms:
|
||||
click.echo(
|
||||
"Platform %s"
|
||||
% click.style(
|
||||
platform.metadata.name
|
||||
if isinstance(platform, PackageItem)
|
||||
else platform,
|
||||
fg="cyan",
|
||||
)
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(platform, PackageItem):
|
||||
pkg = platform
|
||||
else:
|
||||
spec = PackageSpec(platform)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = pm.outdated(pkg, spec)
|
||||
if (
|
||||
not outdated.is_outdated(allow_incompatible=True)
|
||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||
):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg, with_boards=False, expose_packages=False
|
||||
)
|
||||
click.echo("--------")
|
||||
pm.update(platform)
|
||||
click.echo()
|
||||
|
||||
if outdated.is_outdated(allow_incompatible=True):
|
||||
data["versionLatest"] = str(outdated.latest) if outdated.latest else None
|
||||
result.append(data)
|
||||
click.echo(json.dumps(result))
|
||||
return True
|
||||
|
||||
|
||||
@@ -370,32 +284,6 @@ def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
#
|
||||
|
||||
|
||||
def _print_platforms(platforms):
|
||||
for platform in platforms:
|
||||
click.echo(
|
||||
"{name} ~ {title}".format(
|
||||
name=click.style(platform["name"], fg="cyan"), title=platform["title"]
|
||||
)
|
||||
)
|
||||
click.echo("=" * (3 + len(platform["name"] + platform["title"])))
|
||||
click.echo(platform["description"])
|
||||
click.echo()
|
||||
if "homepage" in platform:
|
||||
click.echo("Home: %s" % platform["homepage"])
|
||||
if "frameworks" in platform and platform["frameworks"]:
|
||||
click.echo("Frameworks: %s" % ", ".join(platform["frameworks"]))
|
||||
if "packages" in platform:
|
||||
click.echo("Packages: %s" % ", ".join(platform["packages"]))
|
||||
if "version" in platform:
|
||||
if "__src_url" in platform:
|
||||
click.echo(
|
||||
"Version: %s (%s)" % (platform["version"], platform["__src_url"])
|
||||
)
|
||||
else:
|
||||
click.echo("Version: " + platform["version"])
|
||||
click.echo()
|
||||
|
||||
|
||||
def _get_registry_platforms():
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
return regclient.fetch_json_data("get", "/v2/platforms", x_cache_valid="1d")
|
||||
|
||||
@@ -71,9 +71,9 @@ def cli(dev):
|
||||
click.secho(
|
||||
"Warning! Please restart IDE to affect PIO Home changes", fg="yellow"
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
except Exception as exc:
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(exc)])) from exc
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if any(m in r["err"].lower() for m in permission_errors) and not IS_WINDOWS:
|
||||
click.secho(
|
||||
@@ -127,8 +127,8 @@ def get_latest_version():
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return get_pypi_latest_version()
|
||||
except:
|
||||
raise exception.GetLatestVersionError()
|
||||
except Exception as exc:
|
||||
raise exception.GetLatestVersionError() from exc
|
||||
|
||||
|
||||
def get_develop_latest_version():
|
||||
|
||||
@@ -41,6 +41,22 @@ def is_bytes(x):
|
||||
return isinstance(x, (bytes, memoryview, bytearray))
|
||||
|
||||
|
||||
def isascii(text):
|
||||
if sys.version_info >= (3, 7):
|
||||
return text.isascii()
|
||||
for c in text or "":
|
||||
if ord(c) > 127:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_terminal():
|
||||
try:
|
||||
return sys.stdout.isatty()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return False
|
||||
|
||||
|
||||
def ci_strings_are_equal(a, b):
|
||||
if a == b:
|
||||
return True
|
||||
|
||||
@@ -129,11 +129,11 @@ def _debug_in_project_dir(
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
except exception.InvalidUdevRules as exc:
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream("~", str(e) + "\n")
|
||||
helpers.escape_gdbmi_stream("~", str(exc) + "\n")
|
||||
if helpers.is_gdbmi_mode()
|
||||
else str(e) + "\n",
|
||||
else str(exc) + "\n",
|
||||
nl=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -18,14 +18,13 @@ import os
|
||||
from platformio import fs, proc, util
|
||||
from platformio.compat import string_types
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.debug.helpers import reveal_debug_port
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import load_build_metadata
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, platform, project_config, env_name):
|
||||
def __init__(self, platform, project_config, env_name, port=None):
|
||||
self.platform = platform
|
||||
self.project_config = project_config
|
||||
self.env_name = env_name
|
||||
@@ -49,6 +48,7 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
self._load_cmds = None
|
||||
self._port = None
|
||||
|
||||
self.port = port
|
||||
self.server = self._configure_server()
|
||||
|
||||
try:
|
||||
@@ -119,11 +119,9 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
return reveal_debug_port(
|
||||
return (
|
||||
self.env_options.get("debug_port", self.tool_settings.get("port"))
|
||||
or self._port,
|
||||
self.tool_name,
|
||||
self.tool_settings,
|
||||
or self._port
|
||||
)
|
||||
|
||||
@port.setter
|
||||
@@ -147,7 +145,7 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True)
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
|
||||
if data:
|
||||
return data
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
@@ -205,8 +203,8 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
def get_init_script(self, debugger):
|
||||
try:
|
||||
return getattr(self, "%s_INIT_SCRIPT" % debugger.upper())
|
||||
except AttributeError:
|
||||
raise NotImplementedError
|
||||
except AttributeError as exc:
|
||||
raise NotImplementedError from exc
|
||||
|
||||
def reveal_patterns(self, source, recursive=True):
|
||||
program_path = self.program_path or ""
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.device.finder import SerialPortFinder, is_pattern_port
|
||||
|
||||
|
||||
class BlackmagicDebugConfig(DebugConfigBase):
|
||||
@@ -47,3 +49,24 @@ while ($busy)
|
||||
end
|
||||
set language auto
|
||||
"""
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
# pylint: disable=assignment-from-no-return
|
||||
initial_port = DebugConfigBase.port.fget(self)
|
||||
if initial_port and not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
port = SerialPortFinder(
|
||||
board_config=self.board_config,
|
||||
upload_protocol=self.tool_name,
|
||||
prefer_gdb_port=True,
|
||||
).find(initial_port)
|
||||
if port:
|
||||
return port
|
||||
raise DebugInvalidOptionsError(
|
||||
"Please specify `debug_port` for the working environment"
|
||||
)
|
||||
|
||||
@port.setter
|
||||
def port(self, value):
|
||||
self._port = value
|
||||
|
||||
@@ -19,7 +19,7 @@ from platformio.debug.config.generic import GenericDebugConfig
|
||||
from platformio.debug.config.native import NativeDebugConfig
|
||||
|
||||
|
||||
class DebugConfigFactory(object):
|
||||
class DebugConfigFactory:
|
||||
@staticmethod
|
||||
def get_clsname(name):
|
||||
name = re.sub(r"[^\da-z\_\-]+", "", name, flags=re.I)
|
||||
|
||||
@@ -34,5 +34,6 @@ $INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.port = ":3333"
|
||||
|
||||
@@ -38,8 +38,9 @@ $INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2331"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.port = ":2331"
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
|
||||
@@ -32,5 +32,6 @@ $INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2000"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.port = ":2000"
|
||||
|
||||
@@ -33,5 +33,6 @@ $INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":1234"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.port = ":1234"
|
||||
|
||||
@@ -35,8 +35,9 @@ monitor start
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
self.port = ":3333"
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
|
||||
@@ -16,14 +16,12 @@ import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from fnmatch import fnmatch
|
||||
from hashlib import sha1
|
||||
from io import BytesIO
|
||||
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.compat import IS_WINDOWS, is_bytes
|
||||
from platformio.compat import is_bytes
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.device.list import list_serial_ports
|
||||
from platformio.run.cli import cli as cmd_run
|
||||
from platformio.run.cli import print_processing_header
|
||||
from platformio.test.helpers import list_test_names
|
||||
@@ -161,44 +159,3 @@ def is_prog_obsolete(prog_path):
|
||||
with open(prog_hash_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(new_digest)
|
||||
return True
|
||||
|
||||
|
||||
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
||||
def _get_pattern():
|
||||
if not env_debug_port:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
||||
return env_debug_port
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_serial_port(hwids):
|
||||
for item in list_serial_ports(filter_hwid=True):
|
||||
if not _is_match_pattern(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if tool_name.startswith("blackmagic"):
|
||||
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
for hwid in hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
return port
|
||||
return None
|
||||
|
||||
if env_debug_port and not _get_pattern():
|
||||
return env_debug_port
|
||||
if not tool_settings.get("require_debug_port"):
|
||||
return None
|
||||
|
||||
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
||||
if not debug_port:
|
||||
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
|
||||
return debug_port
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
|
||||
import click
|
||||
|
||||
from platformio.device.commands.list import device_list_cmd
|
||||
from platformio.device.commands.monitor import device_monitor_cmd
|
||||
from platformio.device.list.command import device_list_cmd
|
||||
from platformio.device.monitor.command import device_monitor_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
|
||||
@@ -1,184 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import click
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.device.filters.base import register_filters
|
||||
from platformio.device.finder import find_serial_port
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@click.command("monitor", short_help="Monitor device (Serial/Socket)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option(
|
||||
"--baud",
|
||||
"-b",
|
||||
type=int,
|
||||
help="Set baud rate, default=%d" % ProjectOptions["env.monitor_speed"].default,
|
||||
)
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N",
|
||||
)
|
||||
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
|
||||
)
|
||||
@click.option(
|
||||
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
|
||||
)
|
||||
@click.option(
|
||||
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
|
||||
)
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8",
|
||||
)
|
||||
@click.option("--filter", "-f", multiple=True, help="Add filters/text transformations")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF",
|
||||
)
|
||||
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)",
|
||||
)
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)",
|
||||
)
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment",
|
||||
)
|
||||
def device_monitor_cmd(**kwargs): # pylint: disable=too-many-branches
|
||||
project_options = {}
|
||||
platform = None
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
try:
|
||||
project_options = get_project_options(kwargs["environment"])
|
||||
kwargs = apply_project_monitor_options(kwargs, project_options)
|
||||
if "platform" in project_options:
|
||||
platform = PlatformFactory.new(project_options["platform"])
|
||||
except NotPlatformIOProjectError:
|
||||
pass
|
||||
register_filters(platform=platform, options=kwargs)
|
||||
kwargs["port"] = find_serial_port(
|
||||
initial_port=kwargs["port"],
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
upload_protocol=project_options.get("upload_port"),
|
||||
)
|
||||
|
||||
# override system argv with patched options
|
||||
sys.argv = ["monitor"] + project_options_to_monitor_argv(
|
||||
kwargs,
|
||||
project_options,
|
||||
ignore=("port", "baud", "rts", "dtr", "environment", "project_dir"),
|
||||
)
|
||||
|
||||
if not kwargs["quiet"]:
|
||||
click.echo(
|
||||
"--- Available filters and text transformations: %s"
|
||||
% ", ".join(sorted(miniterm.TRANSFORMATIONS.keys()))
|
||||
)
|
||||
click.echo("--- More details at https://bit.ly/pio-monitor-filters")
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs["port"],
|
||||
default_baudrate=kwargs["baud"]
|
||||
or ProjectOptions["env.monitor_speed"].default,
|
||||
default_rts=kwargs["rts"],
|
||||
default_dtr=kwargs["dtr"],
|
||||
)
|
||||
except Exception as e:
|
||||
raise exception.MinitermException(e)
|
||||
|
||||
|
||||
def get_project_options(environment=None):
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
environment = environment or config.get_default_env()
|
||||
return config.items(env=environment, as_dict=True)
|
||||
|
||||
|
||||
def apply_project_monitor_options(cli_options, project_options):
|
||||
for k in ("port", "speed", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if k == "speed":
|
||||
k = "baud"
|
||||
if cli_options[k] is None and k2 in project_options:
|
||||
cli_options[k] = project_options[k2]
|
||||
if k != "port":
|
||||
cli_options[k] = int(cli_options[k])
|
||||
return cli_options
|
||||
|
||||
|
||||
def project_options_to_monitor_argv(cli_options, project_options, ignore=None):
|
||||
confmon_flags = project_options.get("monitor_flags", [])
|
||||
result = confmon_flags[::]
|
||||
|
||||
for f in project_options.get("monitor_filters", []):
|
||||
result.extend(["--filter", f])
|
||||
|
||||
for k, v in cli_options.items():
|
||||
if v is None or (ignore and k in ignore):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
if k in confmon_flags:
|
||||
continue
|
||||
if isinstance(v, bool):
|
||||
if v:
|
||||
result.append(k)
|
||||
elif isinstance(v, tuple):
|
||||
for i in v:
|
||||
result.extend([k, i])
|
||||
else:
|
||||
result.extend([k, str(v)])
|
||||
return result
|
||||
@@ -14,11 +14,44 @@
|
||||
|
||||
import os
|
||||
from fnmatch import fnmatch
|
||||
from functools import lru_cache
|
||||
|
||||
import click
|
||||
import serial
|
||||
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.device.list import list_logical_devices, list_serial_ports
|
||||
from platformio.device.list.util import list_logical_devices, list_serial_ports
|
||||
from platformio.fs import get_platformio_udev_rules_path
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.util import retry
|
||||
|
||||
BLACK_MAGIC_HWIDS = [
|
||||
"1D50:6018",
|
||||
]
|
||||
|
||||
|
||||
def parse_udev_rules_hwids(path):
|
||||
result = []
|
||||
with open(path, mode="r", encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
attrs = {}
|
||||
for attr in line.split(","):
|
||||
attr = attr.replace("==", "=").replace('"', "").strip()
|
||||
if "=" not in attr:
|
||||
continue
|
||||
name, value = attr.split("=", 1)
|
||||
attrs[name] = value
|
||||
hwid = "%s:%s" % (
|
||||
attrs.get("ATTRS{idVendor}", "*"),
|
||||
attrs.get("ATTRS{idProduct}", "*"),
|
||||
)
|
||||
if hwid != "*:*":
|
||||
result.append(hwid.upper())
|
||||
return result
|
||||
|
||||
|
||||
def is_pattern_port(port):
|
||||
@@ -27,71 +60,6 @@ def is_pattern_port(port):
|
||||
return set(["*", "?", "[", "]"]) & set(port)
|
||||
|
||||
|
||||
def match_serial_port(pattern):
|
||||
for item in list_serial_ports():
|
||||
if fnmatch(item["port"], pattern):
|
||||
return item["port"]
|
||||
return None
|
||||
|
||||
|
||||
def is_serial_port_ready(port, timeout=1):
|
||||
try:
|
||||
serial.Serial(port, timeout=timeout).close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def find_serial_port(
|
||||
initial_port, board_config=None, upload_protocol=None, ensure_ready=False
|
||||
):
|
||||
if initial_port:
|
||||
if not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
return match_serial_port(initial_port)
|
||||
port = None
|
||||
if upload_protocol and upload_protocol.startswith("blackmagic"):
|
||||
port = find_blackmagic_serial_port()
|
||||
if not port and board_config:
|
||||
port = find_board_serial_port(board_config)
|
||||
if port:
|
||||
return port
|
||||
|
||||
# pick the last PID:VID USB device
|
||||
usb_port = None
|
||||
for item in list_serial_ports():
|
||||
if ensure_ready and not is_serial_port_ready(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if "VID:PID" in item["hwid"]:
|
||||
usb_port = port
|
||||
return usb_port or port
|
||||
|
||||
|
||||
def find_blackmagic_serial_port():
|
||||
for item in list_serial_ports():
|
||||
port = item["port"]
|
||||
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
return None
|
||||
|
||||
|
||||
def find_board_serial_port(board_config):
|
||||
board_hwids = board_config.get("build.hwids", [])
|
||||
if not board_hwids:
|
||||
return None
|
||||
for item in list_serial_ports(filter_hwid=True):
|
||||
port = item["port"]
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
return port
|
||||
return None
|
||||
|
||||
|
||||
def find_mbed_disk(initial_port):
|
||||
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
|
||||
for item in list_logical_devices():
|
||||
@@ -109,3 +77,181 @@ def find_mbed_disk(initial_port):
|
||||
if item["name"] and any(l in item["name"].lower() for l in msdlabels):
|
||||
return item["path"]
|
||||
return None
|
||||
|
||||
|
||||
def is_serial_port_ready(port, timeout=1):
|
||||
try:
|
||||
serial.Serial(port, timeout=timeout).close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class SerialPortFinder:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
ensure_ready=False,
|
||||
prefer_gdb_port=False,
|
||||
timeout=2,
|
||||
verbose=False,
|
||||
):
|
||||
self.board_config = board_config
|
||||
self.upload_protocol = upload_protocol
|
||||
self.ensure_ready = ensure_ready
|
||||
self.prefer_gdb_port = prefer_gdb_port
|
||||
self.timeout = timeout
|
||||
self.verbose = verbose
|
||||
|
||||
@staticmethod
|
||||
def normalize_board_hwid(value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = ("%s:%s" % (value[0], value[1])).replace("0x", "")
|
||||
return value.upper()
|
||||
|
||||
@staticmethod
|
||||
def match_serial_port(pattern):
|
||||
for item in list_serial_ports():
|
||||
if fnmatch(item["port"], pattern):
|
||||
return item["port"]
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def match_device_hwid(patterns):
|
||||
if not patterns:
|
||||
return None
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if not item.vid or not item.pid:
|
||||
continue
|
||||
hwid = "{:04X}:{:04X}".format(item.vid, item.pid)
|
||||
for pattern in patterns:
|
||||
if fnmatch(hwid, pattern):
|
||||
return item
|
||||
return None
|
||||
|
||||
def find(self, initial_port=None):
|
||||
if initial_port:
|
||||
if not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
return self.match_serial_port(initial_port)
|
||||
|
||||
if self.upload_protocol and self.upload_protocol.startswith("blackmagic"):
|
||||
return self._find_blackmagic_port()
|
||||
|
||||
device = None
|
||||
if self.board_config and self.board_config.get("build.hwids", []):
|
||||
device = self._find_board_device()
|
||||
if not device:
|
||||
device = self._find_known_device()
|
||||
if device:
|
||||
return self._reveal_device_port(device)
|
||||
|
||||
# pick the best PID:VID USB device
|
||||
port = best_port = None
|
||||
for item in list_serial_ports():
|
||||
if self.ensure_ready and not is_serial_port_ready(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if "VID:PID" in item["hwid"]:
|
||||
best_port = port
|
||||
return best_port or port
|
||||
|
||||
def _reveal_device_port(self, device):
|
||||
candidates = []
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if item.vid == device.vid and item.pid == device.pid:
|
||||
candidates.append(item)
|
||||
if len(candidates) == 1:
|
||||
return device.device
|
||||
for item in candidates:
|
||||
if ("GDB" if self.prefer_gdb_port else "UART") in item.description:
|
||||
return item.device
|
||||
candidates = sorted(candidates, key=lambda item: item.device)
|
||||
# first port is GDB? BlackMagic, ESP-Prog
|
||||
return candidates[0 if self.prefer_gdb_port else -1].device
|
||||
|
||||
def _find_blackmagic_port(self):
|
||||
device = self.match_device_hwid(BLACK_MAGIC_HWIDS)
|
||||
if not device:
|
||||
return None
|
||||
port = self._reveal_device_port(device)
|
||||
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
return "\\\\.\\%s" % port
|
||||
return port
|
||||
|
||||
def _find_board_device(self):
|
||||
hwids = [
|
||||
self.normalize_board_hwid(hwid)
|
||||
for hwid in self.board_config.get("build.hwids", [])
|
||||
]
|
||||
try:
|
||||
|
||||
@retry(timeout=self.timeout)
|
||||
def wrapper():
|
||||
device = self.match_device_hwid(hwids)
|
||||
if device:
|
||||
return device
|
||||
raise retry.RetryNextException()
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
|
||||
if self.verbose:
|
||||
click.secho(
|
||||
"TimeoutError: Could not automatically find serial port "
|
||||
"for the `%s` board based on the declared HWIDs=%s"
|
||||
% (self.board_config.get("name", "unknown"), hwids),
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _find_known_device(self):
|
||||
hwids = list(BLACK_MAGIC_HWIDS)
|
||||
|
||||
# load from UDEV rules
|
||||
udev_rules_path = get_platformio_udev_rules_path()
|
||||
if os.path.isfile(udev_rules_path):
|
||||
hwids.extend(parse_udev_rules_hwids(udev_rules_path))
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _fetch_hwids_from_platforms():
|
||||
"""load from installed dev-platforms"""
|
||||
result = []
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for board_config in p.get_boards().values():
|
||||
for board_hwid in board_config.get("build.hwids", []):
|
||||
board_hwid = self.normalize_board_hwid(board_hwid)
|
||||
if board_hwid not in result:
|
||||
result.append(board_hwid)
|
||||
return result
|
||||
|
||||
try:
|
||||
|
||||
@retry(timeout=self.timeout)
|
||||
def wrapper():
|
||||
device = self.match_device_hwid(hwids)
|
||||
if not device:
|
||||
device = self.match_device_hwid(_fetch_hwids_from_platforms())
|
||||
if device:
|
||||
return device
|
||||
raise retry.RetryNextException()
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
|
||||
if self.verbose:
|
||||
click.secho(
|
||||
"TimeoutError: Could not automatically find serial port "
|
||||
"based on the known UART bridges",
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
@@ -16,7 +16,7 @@ import json
|
||||
|
||||
import click
|
||||
|
||||
from platformio.device.list import (
|
||||
from platformio.device.list.util import (
|
||||
list_logical_devices,
|
||||
list_mdns_services,
|
||||
list_serial_ports,
|
||||
@@ -24,12 +24,15 @@ from platformio import __version__, exception, proc
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
|
||||
|
||||
def list_serial_ports(filter_hwid=False):
|
||||
def list_serial_ports(filter_hwid=False, as_objects=False):
|
||||
try:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from serial.tools.list_ports import comports
|
||||
except ImportError:
|
||||
raise exception.GetSerialPortsError(os.name)
|
||||
except ImportError as exc:
|
||||
raise exception.GetSerialPortsError(os.name) from exc
|
||||
|
||||
if as_objects:
|
||||
return comports()
|
||||
|
||||
result = []
|
||||
for p, d, h in comports():
|
||||
@@ -81,7 +84,7 @@ def list_logical_devices():
|
||||
|
||||
|
||||
def list_mdns_services():
|
||||
class mDNSListener(object):
|
||||
class mDNSListener:
|
||||
def __init__(self):
|
||||
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
|
||||
self._found_types = []
|
||||
177
platformio/device/monitor/command.py
Normal file
177
platformio/device/monitor/command.py
Normal file
@@ -0,0 +1,177 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import SerialPortFinder
|
||||
from platformio.device.monitor.filters.base import register_filters
|
||||
from platformio.device.monitor.terminal import get_available_filters, start_terminal
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@click.command("monitor", short_help="Monitor device (Serial/Socket)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option(
|
||||
"-b",
|
||||
"--baud",
|
||||
type=ProjectOptions["env.monitor_speed"].type,
|
||||
help="Set baud/speed [default=%d]" % ProjectOptions["env.monitor_speed"].default,
|
||||
)
|
||||
@click.option(
|
||||
"--parity",
|
||||
type=ProjectOptions["env.monitor_parity"].type,
|
||||
help="Enable parity checking [default=%s]"
|
||||
% ProjectOptions["env.monitor_parity"].default,
|
||||
)
|
||||
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control")
|
||||
@click.option("--xonxoff", is_flag=True, help="Enable software flow control")
|
||||
@click.option(
|
||||
"--rts",
|
||||
type=ProjectOptions["env.monitor_rts"].type,
|
||||
help="Set initial RTS line state",
|
||||
)
|
||||
@click.option(
|
||||
"--dtr",
|
||||
type=ProjectOptions["env.monitor_dtr"].type,
|
||||
help="Set initial DTR line state",
|
||||
)
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
help=(
|
||||
"Set the encoding for the serial port "
|
||||
"(e.g. hexlify, Latin1, UTF-8) [default=%s]"
|
||||
% ProjectOptions["env.monitor_encoding"].default
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--filter",
|
||||
"filters",
|
||||
multiple=True,
|
||||
help="Apply filters/text transformations",
|
||||
)
|
||||
@click.option(
|
||||
"--eol",
|
||||
type=ProjectOptions["env.monitor_eol"].type,
|
||||
help="End of line mode [default=%s]" % ProjectOptions["env.monitor_eol"].default,
|
||||
)
|
||||
@click.option("--raw", is_flag=True, help=ProjectOptions["env.monitor_raw"].description)
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
show_default=True,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application [default=3 (Ctrl+C)]",
|
||||
)
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control terminal (menu) [default=20 (DEC)]",
|
||||
)
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages",
|
||||
)
|
||||
@click.option(
|
||||
"--no-reconnect",
|
||||
is_flag=True,
|
||||
help="Disable automatic reconnection if the established connection fails",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and the specified environment",
|
||||
)
|
||||
def device_monitor_cmd(**options):
|
||||
with fs.cd(options["project_dir"]):
|
||||
platform = None
|
||||
project_options = {}
|
||||
try:
|
||||
project_options = get_project_options(options["environment"])
|
||||
if "platform" in project_options:
|
||||
platform = PlatformFactory.new(project_options["platform"])
|
||||
except NotPlatformIOProjectError:
|
||||
pass
|
||||
|
||||
options = apply_project_monitor_options(options, project_options)
|
||||
register_filters(platform=platform, options=options)
|
||||
options["port"] = SerialPortFinder(
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
).find(initial_port=options["port"])
|
||||
|
||||
if options["menu_char"] == options["exit_char"]:
|
||||
raise exception.UserSideException(
|
||||
"--exit-char can not be the same as --menu-char"
|
||||
)
|
||||
|
||||
# check for unknown filters
|
||||
if options["filters"]:
|
||||
known_filters = set(get_available_filters())
|
||||
unknown_filters = set(options["filters"]) - known_filters
|
||||
if unknown_filters:
|
||||
options["filters"] = list(known_filters & set(options["filters"]))
|
||||
click.secho(
|
||||
("Warning! Skipping unknown filters `%s`. Known filters are `%s`")
|
||||
% (", ".join(unknown_filters), ", ".join(sorted(known_filters))),
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
start_terminal(options)
|
||||
|
||||
|
||||
def get_project_options(environment=None):
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
environment = environment or config.get_default_env()
|
||||
return config.items(env=environment, as_dict=True)
|
||||
|
||||
|
||||
def apply_project_monitor_options(initial_options, project_options):
|
||||
for option_meta in ProjectOptions.values():
|
||||
if option_meta.group != "monitor":
|
||||
continue
|
||||
cli_key = option_meta.name.split("_", 1)[1]
|
||||
if cli_key == "speed":
|
||||
cli_key = "baud"
|
||||
# value set from CLI, skip overriding
|
||||
if initial_options[cli_key] not in (None, (), []) and (
|
||||
option_meta.type != click.BOOL or f"--{cli_key}" in sys.argv[1:]
|
||||
):
|
||||
continue
|
||||
initial_options[cli_key] = project_options.get(
|
||||
option_meta.name, option_meta.default
|
||||
)
|
||||
return initial_options
|
||||
@@ -17,7 +17,6 @@ import os
|
||||
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import get_object_members, load_python_module
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
@@ -70,10 +69,7 @@ def register_filters(platform=None, options=None):
|
||||
os.path.join(pkg.path, "monitor"), prefix="filter_", options=options
|
||||
)
|
||||
# default filters
|
||||
load_monitor_filters(
|
||||
os.path.join(fs.get_source_dir(), "device", "filters"),
|
||||
options=options,
|
||||
)
|
||||
load_monitor_filters(os.path.dirname(__file__), options=options)
|
||||
|
||||
|
||||
def load_monitor_filters(monitor_dir, prefix=None, options=None):
|
||||
@@ -91,7 +87,7 @@ def load_monitor_filters(monitor_dir, prefix=None, options=None):
|
||||
def load_monitor_filter(path, options=None):
|
||||
name = os.path.basename(path)
|
||||
name = name[: name.find(".")]
|
||||
module = load_python_module("platformio.device.filters.%s" % name, path)
|
||||
module = load_python_module("platformio.device.monitor.filters.%s" % name, path)
|
||||
for cls in get_object_members(module).values():
|
||||
if (
|
||||
not inspect.isclass(cls)
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
import serial
|
||||
|
||||
from platformio.device.filters.base import DeviceMonitorFilterBase
|
||||
from platformio.device.monitor.filters.base import DeviceMonitorFilterBase
|
||||
|
||||
|
||||
class Hexlify(DeviceMonitorFilterBase):
|
||||
@@ -16,7 +16,7 @@ import io
|
||||
import os.path
|
||||
from datetime import datetime
|
||||
|
||||
from platformio.device.filters.base import DeviceMonitorFilterBase
|
||||
from platformio.device.monitor.filters.base import DeviceMonitorFilterBase
|
||||
|
||||
|
||||
class LogToFile(DeviceMonitorFilterBase):
|
||||
@@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.device.filters.base import DeviceMonitorFilterBase
|
||||
from platformio.device.monitor.filters.base import DeviceMonitorFilterBase
|
||||
|
||||
|
||||
class SendOnEnter(DeviceMonitorFilterBase):
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from platformio.device.filters.base import DeviceMonitorFilterBase
|
||||
from platformio.device.monitor.filters.base import DeviceMonitorFilterBase
|
||||
|
||||
|
||||
class Timestamp(DeviceMonitorFilterBase):
|
||||
189
platformio/device/monitor/terminal.py
Normal file
189
platformio/device/monitor/terminal.py
Normal file
@@ -0,0 +1,189 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import click
|
||||
import serial
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
|
||||
class Terminal(miniterm.Miniterm):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.pio_unexpected_exception = None
|
||||
|
||||
def reader(self):
|
||||
try:
|
||||
super().reader()
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self.pio_unexpected_exception = exc
|
||||
|
||||
def writer(self):
|
||||
try:
|
||||
super().writer()
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self.pio_unexpected_exception = exc
|
||||
|
||||
|
||||
def get_available_filters():
|
||||
return sorted(miniterm.TRANSFORMATIONS.keys())
|
||||
|
||||
|
||||
def start_terminal(options):
|
||||
retries = 0
|
||||
is_port_valid = False
|
||||
while True:
|
||||
term = None
|
||||
try:
|
||||
term = new_terminal(options)
|
||||
is_port_valid = True
|
||||
options["port"] = term.serial.name
|
||||
if retries:
|
||||
click.echo("\t Connected!", err=True)
|
||||
elif not options["quiet"]:
|
||||
print_terminal_settings(term)
|
||||
retries = 0 # reset
|
||||
term.start()
|
||||
try:
|
||||
term.join(True)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
term.join()
|
||||
|
||||
# cleanup
|
||||
term.console.cleanup()
|
||||
|
||||
# restore original standard streams
|
||||
sys.stdin = sys.__stdin__
|
||||
sys.stdout = sys.__stdout__
|
||||
sys.stderr = sys.__stderr__
|
||||
|
||||
term.close()
|
||||
|
||||
if term.pio_unexpected_exception:
|
||||
click.secho(
|
||||
"Disconnected (%s)" % term.pio_unexpected_exception,
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
if not options["no_reconnect"]:
|
||||
raise UserSideException(term.pio_unexpected_exception)
|
||||
|
||||
return
|
||||
except UserSideException as exc:
|
||||
if not is_port_valid:
|
||||
raise exc
|
||||
if not retries:
|
||||
click.echo("Reconnecting to %s " % options["port"], err=True, nl=False)
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
else:
|
||||
click.echo(".", err=True, nl=False)
|
||||
retries += 1
|
||||
threading.Event().wait(retries / 2)
|
||||
|
||||
|
||||
def new_terminal(options):
|
||||
term = Terminal(
|
||||
new_serial_instance(options),
|
||||
echo=options["echo"],
|
||||
eol=options["eol"].lower(),
|
||||
filters=list(reversed(options["filters"] or ["default"])),
|
||||
)
|
||||
term.exit_character = chr(options["exit_char"])
|
||||
term.menu_character = chr(options["menu_char"])
|
||||
term.raw = options["raw"]
|
||||
term.set_rx_encoding(options["encoding"])
|
||||
term.set_tx_encoding(options["encoding"])
|
||||
return term
|
||||
|
||||
|
||||
def print_terminal_settings(terminal):
|
||||
click.echo(
|
||||
"--- Terminal on {p.name} | "
|
||||
"{p.baudrate} {p.bytesize}-{p.parity}-{p.stopbits}".format(p=terminal.serial)
|
||||
)
|
||||
click.echo(
|
||||
"--- Available filters and text transformations: %s"
|
||||
% ", ".join(get_available_filters())
|
||||
)
|
||||
click.echo("--- More details at https://bit.ly/pio-monitor-filters")
|
||||
click.echo(
|
||||
"--- Quit: {} | Menu: {} | Help: {} followed by {}".format(
|
||||
miniterm.key_description(terminal.exit_character),
|
||||
miniterm.key_description(terminal.menu_character),
|
||||
miniterm.key_description(terminal.menu_character),
|
||||
miniterm.key_description("\x08"),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def new_serial_instance(options): # pylint: disable=too-many-branches
|
||||
serial_instance = None
|
||||
port = options["port"]
|
||||
while serial_instance is None:
|
||||
# no port given on command line -> ask user now
|
||||
if port is None or port == "-":
|
||||
try:
|
||||
port = miniterm.ask_for_port()
|
||||
except KeyboardInterrupt as exc:
|
||||
click.echo("", err=True)
|
||||
raise UserSideException("User aborted and port is not given") from exc
|
||||
else:
|
||||
if not port:
|
||||
raise UserSideException("Port is not given")
|
||||
try:
|
||||
serial_instance = serial.serial_for_url(
|
||||
port,
|
||||
options["baud"],
|
||||
parity=options["parity"],
|
||||
rtscts=options["rtscts"],
|
||||
xonxoff=options["xonxoff"],
|
||||
do_not_open=True,
|
||||
)
|
||||
|
||||
if not hasattr(serial_instance, "cancel_read"):
|
||||
# enable timeout for alive flag polling if cancel_read is not available
|
||||
serial_instance.timeout = 1
|
||||
|
||||
if options["dtr"] is not None:
|
||||
if not options["quiet"]:
|
||||
click.echo(
|
||||
"--- forcing DTR {}".format(
|
||||
"active" if options["dtr"] else "inactive"
|
||||
)
|
||||
)
|
||||
serial_instance.dtr = options["dtr"]
|
||||
|
||||
if options["rts"] is not None:
|
||||
if not options["quiet"]:
|
||||
click.echo(
|
||||
"--- forcing RTS {}".format(
|
||||
"active" if options["rts"] else "inactive"
|
||||
)
|
||||
)
|
||||
serial_instance.rts = options["rts"]
|
||||
|
||||
if isinstance(serial_instance, serial.Serial):
|
||||
serial_instance.exclusive = True
|
||||
|
||||
serial_instance.open()
|
||||
except serial.SerialException as exc:
|
||||
raise UserSideException(exc) from exc
|
||||
|
||||
return serial_instance
|
||||
@@ -30,10 +30,6 @@ class ReturnErrorCode(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class MinitermException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class UserSideException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from platformio import exception, proc
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class cd(object):
|
||||
class cd:
|
||||
def __init__(self, new_path):
|
||||
self.new_path = new_path
|
||||
self.prev_path = os.getcwd()
|
||||
@@ -54,8 +54,8 @@ def load_json(file_path):
|
||||
try:
|
||||
with open(file_path, mode="r", encoding="utf8") as f:
|
||||
return json.load(f)
|
||||
except ValueError:
|
||||
raise exception.InvalidJSONFile(file_path)
|
||||
except ValueError as exc:
|
||||
raise exception.InvalidJSONFile(file_path) from exc
|
||||
|
||||
|
||||
def humanize_file_size(filesize):
|
||||
@@ -97,6 +97,12 @@ def calculate_folder_size(path):
|
||||
return result
|
||||
|
||||
|
||||
def get_platformio_udev_rules_path():
|
||||
return os.path.abspath(
|
||||
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
|
||||
)
|
||||
|
||||
|
||||
def ensure_udev_rules():
|
||||
from platformio.util import get_systype # pylint: disable=import-outside-toplevel
|
||||
|
||||
@@ -119,9 +125,7 @@ def ensure_udev_rules():
|
||||
if not any(os.path.isfile(p) for p in installed_rules):
|
||||
raise exception.MissedUdevRules
|
||||
|
||||
origin_path = os.path.abspath(
|
||||
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
|
||||
)
|
||||
origin_path = get_platformio_udev_rules_path()
|
||||
if not os.path.isfile(origin_path):
|
||||
return None
|
||||
|
||||
@@ -227,9 +231,9 @@ def rmtree(path):
|
||||
if st_mode & stat.S_IREAD:
|
||||
os.chmod(path, st_mode | stat.S_IWRITE)
|
||||
func(path)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
click.secho(
|
||||
"%s \nPlease manually remove the file `%s`" % (str(e), path),
|
||||
"%s \nPlease manually remove the file `%s`" % (str(exc), path),
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
|
||||
@@ -14,15 +14,15 @@
|
||||
|
||||
import socket
|
||||
|
||||
import requests
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.http import HTTPSession
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class AsyncSession(requests.Session):
|
||||
class AsyncSession(HTTPSession):
|
||||
async def request( # pylint: disable=signature-differs,invalid-overridden-method
|
||||
self, *args, **kwargs
|
||||
):
|
||||
|
||||
@@ -23,7 +23,7 @@ class AccountRPC:
|
||||
try:
|
||||
client = AccountClient()
|
||||
return getattr(client, method)(*args, **kwargs)
|
||||
except Exception as e: # pylint: disable=bare-except
|
||||
except Exception as exc: # pylint: disable=bare-except
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4003, message="PIO Account Call Error", data=str(e)
|
||||
)
|
||||
code=4003, message="PIO Account Call Error", data=str(exc)
|
||||
) from exc
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
@@ -22,9 +20,9 @@ from functools import cmp_to_key
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __default_requests_timeout__, fs
|
||||
from platformio import fs
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.device.list import list_logical_devices
|
||||
from platformio.device.list.util import list_logical_devices
|
||||
from platformio.home import helpers
|
||||
from platformio.http import ensure_internet_on
|
||||
|
||||
@@ -52,13 +50,9 @@ class OSRPC:
|
||||
|
||||
session = helpers.requests_session()
|
||||
if data:
|
||||
r = await session.post(
|
||||
uri, data=data, headers=headers, timeout=__default_requests_timeout__
|
||||
)
|
||||
r = await session.post(uri, data=data, headers=headers)
|
||||
else:
|
||||
r = await session.get(
|
||||
uri, headers=headers, timeout=__default_requests_timeout__
|
||||
)
|
||||
r = await session.get(uri, headers=headers)
|
||||
|
||||
r.raise_for_status()
|
||||
result = r.text
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
@@ -29,7 +27,7 @@ from platformio.compat import get_locale_encoding, is_bytes
|
||||
from platformio.home import helpers
|
||||
|
||||
|
||||
class MultiThreadingStdStream(object):
|
||||
class MultiThreadingStdStream:
|
||||
def __init__(self, parent_stream):
|
||||
self._buffers = {threading.get_ident(): parent_stream}
|
||||
|
||||
@@ -94,10 +92,10 @@ class PIOCoreRPC:
|
||||
# fall-back to subprocess method
|
||||
result = await PIOCoreRPC._call_subprocess(args, options)
|
||||
return PIOCoreRPC._process_result(result, to_json)
|
||||
except Exception as e: # pylint: disable=bare-except
|
||||
except Exception as exc: # pylint: disable=bare-except
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4003, message="PIO Core Call Error", data=str(e)
|
||||
)
|
||||
code=4003, message="PIO Core Call Error", data=str(exc)
|
||||
) from exc
|
||||
|
||||
@staticmethod
|
||||
async def _call_subprocess(args, options):
|
||||
@@ -139,8 +137,8 @@ class PIOCoreRPC:
|
||||
return text
|
||||
try:
|
||||
return json.loads(out)
|
||||
except ValueError as e:
|
||||
click.secho("%s => `%s`" % (e, out), fg="red", err=True)
|
||||
except ValueError as exc:
|
||||
click.secho("%s => `%s`" % (exc, out), fg="red", err=True)
|
||||
# if PIO Core prints unhandled warnings
|
||||
for line in out.split("\n"):
|
||||
line = line.strip()
|
||||
@@ -150,4 +148,4 @@ class PIOCoreRPC:
|
||||
return json.loads(line)
|
||||
except ValueError:
|
||||
pass
|
||||
raise e
|
||||
raise exc
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
@@ -26,8 +24,8 @@ from platformio.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectError
|
||||
from platformio.project.generator import ProjectGenerator
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
from platformio.project.integration.generator import ProjectGenerator
|
||||
from platformio.project.options import get_config_options_schema
|
||||
|
||||
|
||||
@@ -247,7 +245,7 @@ class ProjectRPC:
|
||||
if not isinstance(platforms, list):
|
||||
platforms = [platforms]
|
||||
c_based_platforms = ["intel_mcs51", "ststm8"]
|
||||
is_cpp_project = not (set(platforms) & set(c_based_platforms))
|
||||
is_cpp_project = not set(platforms) & set(c_based_platforms)
|
||||
except exception.PlatformioException:
|
||||
pass
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import socket
|
||||
from urllib.parse import urljoin
|
||||
@@ -21,10 +20,12 @@ from urllib.parse import urljoin
|
||||
import requests.adapters
|
||||
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
|
||||
|
||||
from platformio import __check_internet_hosts__, __default_requests_timeout__, app, util
|
||||
from platformio import __check_internet_hosts__, app, util
|
||||
from platformio.cache import ContentCache, cleanup_content_cache
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
|
||||
class HTTPClientError(PlatformioException):
|
||||
def __init__(self, message, response=None):
|
||||
@@ -45,27 +46,39 @@ class InternetIsOffline(UserSideException):
|
||||
)
|
||||
|
||||
|
||||
class EndpointSession(requests.Session):
|
||||
def __init__(self, base_url, *args, **kwargs):
|
||||
class HTTPSession(requests.Session):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._x_base_url = kwargs.pop("x_base_url") if "x_base_url" in kwargs else None
|
||||
super().__init__(*args, **kwargs)
|
||||
self.base_url = base_url
|
||||
self.headers.update({"User-Agent": app.get_user_agent()})
|
||||
self.verify = app.get_setting("enable_proxy_strict_ssl")
|
||||
|
||||
def request( # pylint: disable=signature-differs,arguments-differ
|
||||
self, method, url, *args, **kwargs
|
||||
):
|
||||
# print(self.base_url, method, url, args, kwargs)
|
||||
return super().request(method, urljoin(self.base_url, url), *args, **kwargs)
|
||||
# print("HTTPSession::request", self._x_base_url, method, url, args, kwargs)
|
||||
if "timeout" not in kwargs:
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
return super().request(
|
||||
method,
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class EndpointSessionIterator(object):
|
||||
class HTTPSessionIterator:
|
||||
def __init__(self, endpoints):
|
||||
if not isinstance(endpoints, list):
|
||||
endpoints = [endpoints]
|
||||
self.endpoints = endpoints
|
||||
self.endpoints_iter = iter(endpoints)
|
||||
# https://urllib3.readthedocs.io/en/stable/reference/urllib3.util.html
|
||||
self.retry = Retry(
|
||||
total=math.ceil(6 / len(self.endpoints)),
|
||||
backoff_factor=1,
|
||||
total=5,
|
||||
backoff_factor=1, # [0, 2, 4, 8, 16] secs
|
||||
# method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
|
||||
status_forcelist=[413, 429, 500, 502, 503, 504],
|
||||
)
|
||||
@@ -75,16 +88,15 @@ class EndpointSessionIterator(object):
|
||||
|
||||
def __next__(self):
|
||||
base_url = next(self.endpoints_iter)
|
||||
session = EndpointSession(base_url)
|
||||
session.headers.update({"User-Agent": app.get_user_agent()})
|
||||
session = HTTPSession(x_base_url=base_url)
|
||||
adapter = requests.adapters.HTTPAdapter(max_retries=self.retry)
|
||||
session.mount(base_url, adapter)
|
||||
return session
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
class HTTPClient:
|
||||
def __init__(self, endpoints):
|
||||
self._session_iter = EndpointSessionIterator(endpoints)
|
||||
self._session_iter = HTTPSessionIterator(endpoints)
|
||||
self._session = None
|
||||
self._next_session()
|
||||
|
||||
@@ -122,21 +134,14 @@ class HTTPClient(object):
|
||||
)
|
||||
kwargs["headers"] = headers
|
||||
|
||||
# set default timeout
|
||||
if "timeout" not in kwargs:
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
|
||||
while True:
|
||||
try:
|
||||
return getattr(self._session, method)(path, **kwargs)
|
||||
except (
|
||||
requests.exceptions.ConnectionError,
|
||||
requests.exceptions.Timeout,
|
||||
) as e:
|
||||
except requests.exceptions.RequestException as exc:
|
||||
try:
|
||||
self._next_session()
|
||||
except: # pylint: disable=bare-except
|
||||
raise HTTPClientError(str(e))
|
||||
except Exception as exc2:
|
||||
raise HTTPClientError(str(exc2)) from exc
|
||||
|
||||
def fetch_json_data(self, method, path, **kwargs):
|
||||
if method not in ("get", "head", "options"):
|
||||
@@ -204,13 +209,8 @@ def ensure_internet_on(raise_exception=False):
|
||||
|
||||
|
||||
def fetch_remote_content(*args, **kwargs):
|
||||
kwargs["headers"] = kwargs.get("headers", {})
|
||||
if "User-Agent" not in kwargs["headers"]:
|
||||
kwargs["headers"]["User-Agent"] = app.get_user_agent()
|
||||
|
||||
if "timeout" not in kwargs:
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
|
||||
r = requests.get(*args, **kwargs)
|
||||
r.raise_for_status()
|
||||
return r.text
|
||||
with HTTPSession() as s:
|
||||
r = s.get(*args, **kwargs)
|
||||
r.raise_for_status()
|
||||
r.close()
|
||||
return r.text
|
||||
|
||||
@@ -32,9 +32,8 @@ from platformio.package.version import pepver_to_semver
|
||||
from platformio.system.prune import calculate_unnecessary_system_data
|
||||
|
||||
|
||||
def on_platformio_start(ctx, force, caller):
|
||||
def on_platformio_start(ctx, caller):
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
app.set_session_var("force_option", force)
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
@@ -79,7 +78,7 @@ def set_caller(caller=None):
|
||||
return app.set_session_var("caller_id", caller)
|
||||
|
||||
|
||||
class Upgrader(object):
|
||||
class Upgrader:
|
||||
def __init__(self, from_version, to_version):
|
||||
self.from_version = pepver_to_semver(from_version)
|
||||
self.to_version = pepver_to_semver(to_version)
|
||||
@@ -126,7 +125,7 @@ class Upgrader(object):
|
||||
|
||||
|
||||
def after_upgrade(ctx):
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
last_version = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version == __version__:
|
||||
return
|
||||
@@ -222,7 +221,7 @@ def check_platformio_upgrade():
|
||||
if pepver_to_semver(latest_version) <= pepver_to_semver(__version__):
|
||||
return
|
||||
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
|
||||
click.echo("")
|
||||
click.echo("*" * terminal_width)
|
||||
@@ -267,7 +266,7 @@ def check_prune_system():
|
||||
if (unnecessary_size / 1024) < threshold_mb:
|
||||
return
|
||||
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.echo()
|
||||
click.echo("*" * terminal_width)
|
||||
click.secho(
|
||||
|
||||
@@ -67,7 +67,7 @@ def package_exec_cmd(obj, package, call, args):
|
||||
if force_click_stream:
|
||||
click.echo(result.stdout.decode().strip(), err=result.returncode != 0)
|
||||
except Exception as exc:
|
||||
raise UserSideException(exc)
|
||||
raise UserSideException(exc) from exc
|
||||
|
||||
if result and result.returncode != 0:
|
||||
raise ReturnErrorCode(result.returncode)
|
||||
|
||||
@@ -23,7 +23,9 @@ from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.meta import PackageCompatibility, PackageSpec
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.savedeps import pkg_to_save_spec, save_project_dependencies
|
||||
from platformio.test.result import TestSuite
|
||||
@@ -59,7 +61,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
@click.option("-f", "--force", is_flag=True, help="Reinstall package if it exists")
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
def package_install_cmd(**options):
|
||||
if options.get("global"):
|
||||
if options.get("global") or options.get("storage_dir"):
|
||||
install_global_dependencies(options)
|
||||
else:
|
||||
install_project_dependencies(options)
|
||||
@@ -100,9 +102,7 @@ def install_project_dependencies(options):
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
if not options.get("silent"):
|
||||
click.echo(
|
||||
"Resolving %s environment packages..." % click.style(env, fg="cyan")
|
||||
)
|
||||
click.echo("Resolving %s dependencies..." % click.style(env, fg="cyan"))
|
||||
already_up_to_date = not install_project_env_dependencies(env, options)
|
||||
if not options.get("silent") and already_up_to_date:
|
||||
click.secho("Already up-to-date.", fg="green")
|
||||
@@ -204,8 +204,24 @@ def _install_project_env_libraries(project_env, options):
|
||||
_uninstall_project_unused_libdeps(project_env, options)
|
||||
already_up_to_date = not options.get("force")
|
||||
config = ProjectConfig.get_instance()
|
||||
|
||||
compatibility_qualifiers = {}
|
||||
if config.get(f"env:{project_env}", "platform"):
|
||||
try:
|
||||
p = PlatformFactory.new(config.get(f"env:{project_env}", "platform"))
|
||||
compatibility_qualifiers["platforms"] = [p.name]
|
||||
except UnknownPlatform:
|
||||
pass
|
||||
if config.get(f"env:{project_env}", "framework"):
|
||||
compatibility_qualifiers["frameworks"] = config.get(
|
||||
f"env:{project_env}", "framework"
|
||||
)
|
||||
|
||||
env_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), project_env)
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), project_env),
|
||||
compatibility=PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None,
|
||||
)
|
||||
private_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "lib_dir"))
|
||||
|
||||
@@ -163,9 +163,7 @@ def list_project_packages(options):
|
||||
for env in config.envs():
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
click.echo(
|
||||
"Resolving %s environment packages..." % click.style(env, fg="cyan")
|
||||
)
|
||||
click.echo("Resolving %s dependencies..." % click.style(env, fg="cyan"))
|
||||
found = False
|
||||
if not only_packages or only_platform_packages:
|
||||
_found = print_project_env_platform_packages(env, options)
|
||||
|
||||
@@ -39,7 +39,7 @@ def package_pack_cmd(package, output):
|
||||
ManifestSchema().load_manifest(
|
||||
ManifestParserFactory.new_from_archive(archive_path).as_dict()
|
||||
)
|
||||
except ManifestValidationError as e:
|
||||
except ManifestValidationError as exc:
|
||||
os.remove(archive_path)
|
||||
raise e
|
||||
raise exc
|
||||
click.secho('Wrote a tarball to "%s"' % archive_path, fg="green")
|
||||
|
||||
@@ -22,6 +22,7 @@ from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.compat import isascii
|
||||
from platformio.exception import UserSideException
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema
|
||||
@@ -36,8 +37,8 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
return value
|
||||
try:
|
||||
datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
except ValueError as e:
|
||||
raise click.BadParameter(e)
|
||||
except ValueError as exc:
|
||||
raise click.BadParameter(exc)
|
||||
return value
|
||||
|
||||
|
||||
@@ -71,14 +72,21 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
help="Notify by email when package is processed",
|
||||
)
|
||||
@click.option(
|
||||
"--non-interactive",
|
||||
"--no-interactive",
|
||||
is_flag=True,
|
||||
help="Do not show interactive prompt",
|
||||
)
|
||||
@click.option(
|
||||
"--non-interactive",
|
||||
is_flag=True,
|
||||
help="Do not show interactive prompt",
|
||||
hidden=True,
|
||||
)
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
package, owner, type_, released_at, private, notify, non_interactive
|
||||
package, owner, type_, released_at, private, notify, no_interactive, non_interactive
|
||||
):
|
||||
click.secho("Preparing a package...", fg="cyan")
|
||||
no_interactive = no_interactive or non_interactive
|
||||
owner = owner or AccountClient().get_logged_username()
|
||||
do_not_pack = (
|
||||
not os.path.isdir(package)
|
||||
@@ -118,7 +126,7 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
# look for duplicates
|
||||
check_package_duplicates(owner, type_, name, version, manifest.get("system"))
|
||||
|
||||
if not non_interactive:
|
||||
if not no_interactive:
|
||||
click.confirm(
|
||||
"Are you sure you want to publish the %s %s to the registry?\n"
|
||||
% (
|
||||
@@ -148,7 +156,7 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
def check_archive_file_names(archive_path):
|
||||
with tarfile.open(archive_path, mode="r:gz") as tf:
|
||||
for name in tf.getnames():
|
||||
if not name.isascii():
|
||||
if not isascii(name) or not name.isprintable():
|
||||
click.secho(
|
||||
f"Warning! The `{name}` file contains non-ASCII chars and can "
|
||||
"lead to the unpacking issues on a user machine",
|
||||
|
||||
@@ -59,14 +59,6 @@ def package_show_cmd(spec, pkg_type):
|
||||
)
|
||||
)
|
||||
|
||||
click.echo()
|
||||
type_plural = "libraries" if data["type"] == "library" else (data["type"] + "s")
|
||||
click.secho(
|
||||
"https://registry.platformio.org/%s/%s/%s"
|
||||
% (type_plural, data["owner"]["username"], quote(data["name"])),
|
||||
fg="blue",
|
||||
)
|
||||
|
||||
# Description
|
||||
click.echo()
|
||||
click.echo(data["description"])
|
||||
@@ -87,7 +79,17 @@ def package_show_cmd(spec, pkg_type):
|
||||
("frameworks", "Compatible Frameworks"),
|
||||
("keywords", "Keywords"),
|
||||
]
|
||||
extra = []
|
||||
type_plural = "libraries" if data["type"] == "library" else (data["type"] + "s")
|
||||
extra = [
|
||||
(
|
||||
"Registry",
|
||||
click.style(
|
||||
"https://registry.platformio.org/%s/%s/%s"
|
||||
% (type_plural, data["owner"]["username"], quote(data["name"])),
|
||||
fg="blue",
|
||||
),
|
||||
)
|
||||
]
|
||||
for key, title in fields:
|
||||
if "." in key:
|
||||
k1, k2 = key.split(".")
|
||||
@@ -127,7 +129,11 @@ def fetch_package_data(spec, pkg_type=None):
|
||||
return client.get_package(
|
||||
pkg_type, spec.owner, spec.name, version=spec.requirements
|
||||
)
|
||||
qualifiers = dict(names=spec.name.lower())
|
||||
qualifiers = {}
|
||||
if spec.id:
|
||||
qualifiers["ids"] = str(spec.id)
|
||||
if spec.name:
|
||||
qualifiers["names"] = spec.name.lower()
|
||||
if pkg_type:
|
||||
qualifiers["types"] = pkg_type
|
||||
if spec.owner:
|
||||
|
||||
@@ -92,9 +92,7 @@ def uninstall_project_dependencies(options):
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
if not options["silent"]:
|
||||
click.echo(
|
||||
"Resolving %s environment packages..." % click.style(env, fg="cyan")
|
||||
)
|
||||
click.echo("Resolving %s dependencies..." % click.style(env, fg="cyan"))
|
||||
already_up_to_date = not uninstall_project_env_dependencies(env, options)
|
||||
if not options["silent"] and already_up_to_date:
|
||||
click.secho("Already up-to-date.", fg="green")
|
||||
|
||||
@@ -95,9 +95,7 @@ def update_project_dependencies(options):
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
if not options["silent"]:
|
||||
click.echo(
|
||||
"Resolving %s environment packages..." % click.style(env, fg="cyan")
|
||||
)
|
||||
click.echo("Resolving %s dependencies..." % click.style(env, fg="cyan"))
|
||||
already_up_to_date = not update_project_env_dependencies(env, options)
|
||||
if not options["silent"] and already_up_to_date:
|
||||
click.secho("Already up-to-date.", fg="green")
|
||||
|
||||
@@ -13,36 +13,35 @@
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import math
|
||||
from email.utils import parsedate_tz
|
||||
from email.utils import parsedate
|
||||
from os.path import getsize, join
|
||||
from time import mktime
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import __default_requests_timeout__, app, fs
|
||||
from platformio import fs
|
||||
from platformio.compat import is_terminal
|
||||
from platformio.http import HTTPSession
|
||||
from platformio.package.exception import PackageException
|
||||
|
||||
|
||||
class FileDownloader(object):
|
||||
class FileDownloader:
|
||||
def __init__(self, url, dest_dir=None):
|
||||
self._request = None
|
||||
self._http_session = HTTPSession()
|
||||
self._http_response = None
|
||||
# make connection
|
||||
self._request = requests.get(
|
||||
self._http_response = self._http_session.get(
|
||||
url,
|
||||
stream=True,
|
||||
headers={"User-Agent": app.get_user_agent()},
|
||||
timeout=__default_requests_timeout__,
|
||||
)
|
||||
if self._request.status_code != 200:
|
||||
if self._http_response.status_code != 200:
|
||||
raise PackageException(
|
||||
"Got the unrecognized status code '{0}' when downloaded {1}".format(
|
||||
self._request.status_code, url
|
||||
self._http_response.status_code, url
|
||||
)
|
||||
)
|
||||
|
||||
disposition = self._request.headers.get("content-disposition")
|
||||
disposition = self._http_response.headers.get("content-disposition")
|
||||
if disposition and "filename=" in disposition:
|
||||
self._fname = (
|
||||
disposition[disposition.index("filename=") + 9 :]
|
||||
@@ -63,32 +62,55 @@ class FileDownloader(object):
|
||||
return self._destination
|
||||
|
||||
def get_lmtime(self):
|
||||
return self._request.headers.get("last-modified")
|
||||
return self._http_response.headers.get("last-modified")
|
||||
|
||||
def get_size(self):
|
||||
if "content-length" not in self._request.headers:
|
||||
if "content-length" not in self._http_response.headers:
|
||||
return -1
|
||||
return int(self._request.headers["content-length"])
|
||||
return int(self._http_response.headers["content-length"])
|
||||
|
||||
def start(self, with_progress=True, silent=False):
|
||||
label = "Downloading"
|
||||
itercontent = self._request.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
||||
fp = open(self._destination, "wb") # pylint: disable=consider-using-with
|
||||
file_size = self.get_size()
|
||||
itercontent = self._http_response.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
||||
try:
|
||||
if not with_progress or self.get_size() == -1:
|
||||
if not silent:
|
||||
click.echo("%s..." % label)
|
||||
for chunk in itercontent:
|
||||
if chunk:
|
||||
with open(self._destination, "wb") as fp:
|
||||
if file_size == -1 or not with_progress or silent:
|
||||
if not silent:
|
||||
click.echo(f"{label}...")
|
||||
for chunk in itercontent:
|
||||
fp.write(chunk)
|
||||
else:
|
||||
chunks = int(math.ceil(self.get_size() / float(io.DEFAULT_BUFFER_SIZE)))
|
||||
with click.progressbar(length=chunks, label=label) as pb:
|
||||
for _ in pb:
|
||||
fp.write(next(itercontent))
|
||||
|
||||
elif not is_terminal():
|
||||
click.echo(f"{label} 0%", nl=False)
|
||||
print_percent_step = 10
|
||||
printed_percents = 0
|
||||
downloaded_size = 0
|
||||
for chunk in itercontent:
|
||||
fp.write(chunk)
|
||||
downloaded_size += len(chunk)
|
||||
if (downloaded_size / file_size * 100) >= (
|
||||
printed_percents + print_percent_step
|
||||
):
|
||||
printed_percents += print_percent_step
|
||||
click.echo(f" {printed_percents}%", nl=False)
|
||||
click.echo("")
|
||||
|
||||
else:
|
||||
with click.progressbar(
|
||||
length=file_size,
|
||||
iterable=itercontent,
|
||||
label=label,
|
||||
update_min_steps=min(
|
||||
256 * 1024, file_size / 100
|
||||
), # every 256Kb or less,
|
||||
) as pb:
|
||||
for chunk in pb:
|
||||
pb.update(len(chunk))
|
||||
fp.write(chunk)
|
||||
finally:
|
||||
fp.close()
|
||||
self._request.close()
|
||||
self._http_response.close()
|
||||
self._http_session.close()
|
||||
|
||||
if self.get_lmtime():
|
||||
self._preserve_filemtime(self.get_lmtime())
|
||||
@@ -132,10 +154,10 @@ class FileDownloader(object):
|
||||
return True
|
||||
|
||||
def _preserve_filemtime(self, lmdate):
|
||||
timedata = parsedate_tz(lmdate)
|
||||
lmtime = mktime(timedata[:9])
|
||||
lmtime = mktime(parsedate(lmdate))
|
||||
fs.change_filemtime(self._destination, lmtime)
|
||||
|
||||
def __del__(self):
|
||||
if self._request:
|
||||
self._request.close()
|
||||
self._http_session.close()
|
||||
if self._http_response:
|
||||
self._http_response.close()
|
||||
|
||||
@@ -44,7 +44,7 @@ class LockFileTimeoutError(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class LockFile(object):
|
||||
class LockFile:
|
||||
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
|
||||
self.timeout = timeout
|
||||
self.delay = delay
|
||||
@@ -72,10 +72,10 @@ class LockFile(object):
|
||||
msvcrt.locking( # pylint: disable=used-before-assignment
|
||||
self._fp.fileno(), msvcrt.LK_NBLCK, 1
|
||||
)
|
||||
except (BlockingIOError, IOError):
|
||||
except (BlockingIOError, IOError) as exc:
|
||||
self._fp.close()
|
||||
self._fp = None
|
||||
raise LockFileExists
|
||||
raise LockFileExists from exc
|
||||
return True
|
||||
|
||||
def _unlock(self):
|
||||
|
||||
@@ -25,7 +25,7 @@ from platformio.package.download import FileDownloader
|
||||
from platformio.package.lockfile import LockFile
|
||||
|
||||
|
||||
class PackageManagerDownloadMixin(object):
|
||||
class PackageManagerDownloadMixin:
|
||||
|
||||
DOWNLOAD_CACHE_EXPIRE = 86400 * 30 # keep package in a local cache for 1 month
|
||||
|
||||
@@ -70,7 +70,7 @@ class PackageManagerDownloadMixin(object):
|
||||
fd = FileDownloader(url)
|
||||
fd.set_destination(tmp_path)
|
||||
fd.start(with_progress=with_progress, silent=silent)
|
||||
except IOError as e:
|
||||
except IOError as exc:
|
||||
raise_error = not with_progress
|
||||
if with_progress:
|
||||
try:
|
||||
@@ -86,7 +86,7 @@ class PackageManagerDownloadMixin(object):
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
raise e
|
||||
raise exc
|
||||
if checksum:
|
||||
fd.verify(checksum)
|
||||
os.close(tmp_fd)
|
||||
|
||||
@@ -21,12 +21,12 @@ import click
|
||||
|
||||
from platformio import app, compat, fs, util
|
||||
from platformio.package.exception import PackageException, UnknownPackageError
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem
|
||||
from platformio.package.unpack import FileUnpacker
|
||||
from platformio.package.vcsclient import VCSClientFactory
|
||||
|
||||
|
||||
class PackageManagerInstallMixin(object):
|
||||
class PackageManagerInstallMixin:
|
||||
|
||||
_INSTALL_HISTORY = None # avoid circle dependencies
|
||||
|
||||
@@ -36,9 +36,9 @@ class PackageManagerInstallMixin(object):
|
||||
try:
|
||||
with FileUnpacker(src) as fu:
|
||||
return fu.unpack(dst, with_progress=with_progress)
|
||||
except IOError as e:
|
||||
except IOError as exc:
|
||||
if not with_progress:
|
||||
raise e
|
||||
raise exc
|
||||
with FileUnpacker(src) as fu:
|
||||
return fu.unpack(dst, with_progress=False)
|
||||
|
||||
@@ -55,9 +55,9 @@ class PackageManagerInstallMixin(object):
|
||||
def _install(
|
||||
self,
|
||||
spec,
|
||||
search_qualifiers=None,
|
||||
skip_dependencies=False,
|
||||
force=False,
|
||||
compatibility: PackageCompatibility = None,
|
||||
):
|
||||
spec = self.ensure_spec(spec)
|
||||
|
||||
@@ -97,7 +97,12 @@ class PackageManagerInstallMixin(object):
|
||||
if spec.external:
|
||||
pkg = self.install_from_uri(spec.uri, spec)
|
||||
else:
|
||||
pkg = self.install_from_registry(spec, search_qualifiers)
|
||||
pkg = self.install_from_registry(
|
||||
spec,
|
||||
search_qualifiers=compatibility.to_search_qualifiers()
|
||||
if compatibility
|
||||
else None,
|
||||
)
|
||||
|
||||
if not pkg or not pkg.metadata:
|
||||
raise PackageException(
|
||||
@@ -137,20 +142,29 @@ class PackageManagerInstallMixin(object):
|
||||
if dependency.get("owner"):
|
||||
self.log.warning(
|
||||
click.style(
|
||||
"Warning! Could not install dependency %s for package '%s'"
|
||||
% (dependency, pkg.metadata.name),
|
||||
"Warning! Could not install `%s` dependency "
|
||||
"for the`%s` package" % (dependency, pkg.metadata.name),
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
|
||||
def install_dependency(self, dependency):
|
||||
spec = self.dependency_to_spec(dependency)
|
||||
search_qualifiers = {
|
||||
key: value
|
||||
for key, value in dependency.items()
|
||||
if key in ("authors", "platforms", "frameworks")
|
||||
}
|
||||
return self._install(spec, search_qualifiers=search_qualifiers or None)
|
||||
dependency_compatibility = PackageCompatibility.from_dependency(dependency)
|
||||
if self.compatibility and not dependency_compatibility.is_compatible(
|
||||
self.compatibility
|
||||
):
|
||||
self.log.debug(
|
||||
click.style(
|
||||
"Skip incompatible `%s` dependency with `%s`"
|
||||
% (dependency, self.compatibility),
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
return None
|
||||
return self._install(
|
||||
spec=self.dependency_to_spec(dependency),
|
||||
compatibility=dependency_compatibility,
|
||||
)
|
||||
|
||||
def install_from_uri(self, uri, spec, checksum=None):
|
||||
spec = self.ensure_spec(spec)
|
||||
|
||||
@@ -18,7 +18,7 @@ from platformio import fs
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
|
||||
|
||||
class PackageManagerLegacyMixin(object):
|
||||
class PackageManagerLegacyMixin:
|
||||
def build_legacy_spec(self, pkg_dir):
|
||||
# find src manifest
|
||||
src_manifest_name = ".piopkgmanager.json"
|
||||
|
||||
@@ -23,7 +23,7 @@ from platformio.registry.client import RegistryClient
|
||||
from platformio.registry.mirror import RegistryFileMirrorIterator
|
||||
|
||||
|
||||
class PackageManagerRegistryMixin(object):
|
||||
class PackageManagerRegistryMixin:
|
||||
def install_from_registry(self, spec, search_qualifiers=None):
|
||||
if spec.owner and spec.name and not search_qualifiers:
|
||||
package = self.fetch_registry_package(spec)
|
||||
@@ -41,7 +41,7 @@ class PackageManagerRegistryMixin(object):
|
||||
if not package or not version:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
|
||||
pkgfile = self._pick_compatible_pkg_file(version["files"]) if version else None
|
||||
pkgfile = self.pick_compatible_pkg_file(version["files"]) if version else None
|
||||
if not pkgfile:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
|
||||
@@ -56,9 +56,9 @@ class PackageManagerRegistryMixin(object):
|
||||
),
|
||||
checksum or pkgfile["checksum"]["sha256"],
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self.log.warning(
|
||||
click.style("Warning! Package Mirror: %s" % e, fg="yellow")
|
||||
click.style("Warning! Package Mirror: %s" % exc, fg="yellow")
|
||||
)
|
||||
self.log.warning(
|
||||
click.style("Looking for another mirror...", fg="yellow")
|
||||
@@ -162,7 +162,7 @@ class PackageManagerRegistryMixin(object):
|
||||
time.sleep(1)
|
||||
return (None, None)
|
||||
|
||||
def filter_incompatible_registry_versions(self, versions, spec=None):
|
||||
def get_compatible_registry_versions(self, versions, spec=None, custom_system=None):
|
||||
assert not spec or isinstance(spec, PackageSpec)
|
||||
result = []
|
||||
for version in versions:
|
||||
@@ -170,22 +170,27 @@ class PackageManagerRegistryMixin(object):
|
||||
if spec and spec.requirements and semver not in spec.requirements:
|
||||
continue
|
||||
if not any(
|
||||
self.is_system_compatible(f.get("system")) for f in version["files"]
|
||||
self.is_system_compatible(f.get("system"), custom_system=custom_system)
|
||||
for f in version["files"]
|
||||
):
|
||||
continue
|
||||
result.append(version)
|
||||
return result
|
||||
|
||||
def pick_best_registry_version(self, versions, spec=None):
|
||||
def pick_best_registry_version(self, versions, spec=None, custom_system=None):
|
||||
best = None
|
||||
for version in self.filter_incompatible_registry_versions(versions, spec):
|
||||
for version in self.get_compatible_registry_versions(
|
||||
versions, spec, custom_system
|
||||
):
|
||||
semver = cast_version_to_semver(version["name"])
|
||||
if not best or (semver > cast_version_to_semver(best["name"])):
|
||||
best = version
|
||||
return best
|
||||
|
||||
def _pick_compatible_pkg_file(self, version_files):
|
||||
def pick_compatible_pkg_file(self, version_files, custom_system=None):
|
||||
for item in version_files:
|
||||
if self.is_system_compatible(item.get("system")):
|
||||
if self.is_system_compatible(
|
||||
item.get("system"), custom_system=custom_system
|
||||
):
|
||||
return item
|
||||
return None
|
||||
|
||||
@@ -20,7 +20,7 @@ from platformio.package.exception import PackageException
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
|
||||
|
||||
class PackageManagerSymlinkMixin(object):
|
||||
class PackageManagerSymlinkMixin:
|
||||
@staticmethod
|
||||
def is_symlink(path):
|
||||
return path and path.endswith(".pio-link") and os.path.isfile(path)
|
||||
|
||||
@@ -22,7 +22,7 @@ from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
|
||||
|
||||
class PackageManagerUninstallMixin(object):
|
||||
class PackageManagerUninstallMixin:
|
||||
def uninstall(self, spec, skip_dependencies=False):
|
||||
try:
|
||||
self.lock()
|
||||
|
||||
@@ -21,7 +21,7 @@ from platformio.package.meta import PackageItem, PackageOutdatedResult, PackageS
|
||||
from platformio.package.vcsclient import VCSBaseException, VCSClientFactory
|
||||
|
||||
|
||||
class PackageManagerUpdateMixin(object):
|
||||
class PackageManagerUpdateMixin:
|
||||
def outdated(self, pkg, spec=None):
|
||||
assert isinstance(pkg, PackageItem)
|
||||
assert pkg.metadata
|
||||
|
||||
@@ -59,9 +59,10 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
):
|
||||
_MEMORY_CACHE = {}
|
||||
|
||||
def __init__(self, pkg_type, package_dir):
|
||||
def __init__(self, pkg_type, package_dir, compatibility=None):
|
||||
self.pkg_type = pkg_type
|
||||
self.package_dir = package_dir
|
||||
self.compatibility = compatibility
|
||||
self.log = self._setup_logger()
|
||||
|
||||
self._MEMORY_CACHE = {}
|
||||
@@ -115,10 +116,10 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
self._MEMORY_CACHE.clear()
|
||||
|
||||
@staticmethod
|
||||
def is_system_compatible(value):
|
||||
def is_system_compatible(value, custom_system=None):
|
||||
if not value or "*" in value:
|
||||
return True
|
||||
return util.items_in_list(value, util.get_systype())
|
||||
return util.items_in_list(value, custom_system or util.get_systype())
|
||||
|
||||
@staticmethod
|
||||
def ensure_dir_exists(path):
|
||||
@@ -187,9 +188,9 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
result = ManifestParserFactory.new_from_file(item).as_dict()
|
||||
self.memcache_set(cache_key, result)
|
||||
return result
|
||||
except ManifestException as e:
|
||||
except ManifestException as exc:
|
||||
if not PlatformioCLI.in_silence():
|
||||
self.log.warning(click.style(str(e), fg="yellow"))
|
||||
self.log.warning(click.style(str(exc), fg="yellow"))
|
||||
raise MissingPackageManifestError(", ".join(self.manifest_names))
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -156,7 +156,7 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
|
||||
raise UserSideException(
|
||||
"\n\nPlease ensure that the next packages are installed:\n\n"
|
||||
"sudo apt install python3-dev libffi-dev libssl-dev\n"
|
||||
)
|
||||
) from exc
|
||||
raise exc
|
||||
|
||||
# build manifests
|
||||
|
||||
@@ -15,19 +15,21 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from platformio.commands.lib.helpers import is_builtin_lib
|
||||
from platformio import util
|
||||
from platformio.package.exception import MissingPackageManifestError
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors
|
||||
def __init__(self, package_dir=None):
|
||||
def __init__(self, package_dir=None, **kwargs):
|
||||
super().__init__(
|
||||
PackageType.LIBRARY,
|
||||
package_dir
|
||||
or ProjectConfig.get_instance().get("platformio", "globallib_dir"),
|
||||
**kwargs
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -84,7 +86,39 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
# skip built-in dependencies
|
||||
not_builtin_conds = [spec.external, spec.owner]
|
||||
if not any(not_builtin_conds):
|
||||
not_builtin_conds.append(not is_builtin_lib(spec.name))
|
||||
not_builtin_conds.append(not self.is_builtin_lib(spec.name))
|
||||
if any(not_builtin_conds):
|
||||
return super().install_dependency(dependency)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@util.memoized(expire="60s")
|
||||
def get_builtin_libs(storage_names=None):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
|
||||
items = []
|
||||
storage_names = storage_names or []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
p = PlatformFactory.new(pkg)
|
||||
for storage in p.get_lib_storages():
|
||||
if storage_names and storage["name"] not in storage_names:
|
||||
continue
|
||||
lm = LibraryPackageManager(storage["path"])
|
||||
items.append(
|
||||
{
|
||||
"name": storage["name"],
|
||||
"path": storage["path"],
|
||||
"items": lm.legacy_get_installed(),
|
||||
}
|
||||
)
|
||||
return items
|
||||
|
||||
@classmethod
|
||||
def is_builtin_lib(cls, name):
|
||||
for storage in cls.get_builtin_libs():
|
||||
for lib in storage["items"]:
|
||||
if lib.get("name") == name:
|
||||
return True
|
||||
return False
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user