mirror of
https://github.com/platformio/platformio-core.git
synced 2025-07-29 17:47:14 +02:00
Compare commits
177 Commits
Author | SHA1 | Date | |
---|---|---|---|
fc12dda765 | |||
41fb1ca8bd | |||
bba5bc9d0f | |||
f8f3e9863e | |||
eb20f3410a | |||
681b90e6b2 | |||
c016d6827b | |||
f840577066 | |||
475c5d2a3c | |||
f9cbf6cb97 | |||
d728e0e873 | |||
7876626f04 | |||
f6aa95a4fe | |||
4596acab81 | |||
09f1269440 | |||
939f8e0812 | |||
1d0d89a4fa | |||
2908efd337 | |||
9dc6ed031f | |||
21c4f091e2 | |||
2c94fb2aad | |||
2312ca929d | |||
6f0b1fbb91 | |||
c8eea40dd0 | |||
53cd43b676 | |||
981266646c | |||
0acf968b2d | |||
1e5a728f3c | |||
c4d178e50e | |||
b991d9f25c | |||
82d380d895 | |||
9344f3cd81 | |||
6ee9cc04fb | |||
a0c959be28 | |||
df896ad401 | |||
743fc8e636 | |||
fa255ff8b3 | |||
97a7cdd2a2 | |||
02a63a6954 | |||
7f38e222c9 | |||
f71317dad9 | |||
4444a0db99 | |||
e8ffa244e5 | |||
7e9b637143 | |||
0d9ee75b05 | |||
66fe55668e | |||
5c3ae15bee | |||
58a1d5d96e | |||
ab15da4f4b | |||
71bb84f3f2 | |||
faff0fb56c | |||
0fb064eba3 | |||
bea5e87543 | |||
62e9589851 | |||
7d86eebe77 | |||
0bba598c61 | |||
4b446b0d72 | |||
f43f41cc53 | |||
00c5d30ce9 | |||
269d5e0a3e | |||
331ff2dc9c | |||
a24cf50413 | |||
8d33a3d151 | |||
d9ff250f82 | |||
f2d206ca54 | |||
d7c9dc2411 | |||
3e6725bb5f | |||
c3e287672e | |||
a387f9708a | |||
07bfa8ce4a | |||
3dfb936f3c | |||
e39438791c | |||
c7060f93e8 | |||
8794b2a3a1 | |||
cabe7d8c11 | |||
56cc7ce270 | |||
5b13aeda52 | |||
674d00183e | |||
598d2b24de | |||
4c4fb5029e | |||
2c4055d9e1 | |||
efbe3d4aa6 | |||
c785c8c6f3 | |||
9b4a045413 | |||
7c650c2c08 | |||
1422b77298 | |||
1af508272b | |||
5073313c33 | |||
18b6aad369 | |||
097de2be98 | |||
188c65ef7b | |||
b2a04f265e | |||
15d53c95c0 | |||
0d57a799b5 | |||
464b167e65 | |||
380652eb52 | |||
4350c4ca48 | |||
8835a03cd9 | |||
61ba8afee6 | |||
199e3d8958 | |||
9f09657997 | |||
2e64056787 | |||
83d2173748 | |||
1503eb5d41 | |||
6db3eb8e33 | |||
355222b0c0 | |||
2fbd766fd9 | |||
fb5e99473f | |||
de4ba4cbe1 | |||
42f1197de8 | |||
2337dbd2cd | |||
17360b0ed2 | |||
5ee79f1724 | |||
20067c5736 | |||
d43c5696cc | |||
8970f36f1a | |||
75716d26ff | |||
d0ca48661c | |||
4121882a9d | |||
9c38cf6621 | |||
6395a032e5 | |||
a0387bd16e | |||
4d4aec4f57 | |||
7bbfaab891 | |||
337e7fe43a | |||
38f03224d3 | |||
48655ad728 | |||
5de541e493 | |||
527d61296f | |||
2141a09736 | |||
190ebcccfe | |||
a1d9798594 | |||
bf3942e7cc | |||
5ec5660fa6 | |||
fb09077c38 | |||
ec5bf1b5e7 | |||
bf7fb15941 | |||
b35f1ea572 | |||
7e6cb84c87 | |||
bf769e1a9e | |||
476bf20923 | |||
3277ac3a18 | |||
93ce9b0c5e | |||
b11925a9ec | |||
7f8784e2a8 | |||
ca7a100392 | |||
32c2e33edf | |||
30fc00098d | |||
1dd62361c7 | |||
c870c09d67 | |||
30b00e7a9d | |||
c763f4b3a3 | |||
9800fb7b2c | |||
3b66f4270c | |||
dc14bd7362 | |||
4be5185ed3 | |||
1ea0adf6af | |||
7cb40ef3b0 | |||
044bf61a4d | |||
e0f9cb8c26 | |||
d6d1c6b327 | |||
4c177c1ad3 | |||
490af8ac37 | |||
ca48e6c172 | |||
7533c369d4 | |||
cd8024c762 | |||
0b4aedbeeb | |||
3d2ac4698c | |||
e0a3b81877 | |||
af21c50aec | |||
1cbc424488 | |||
887e542cb2 | |||
780c62d925 | |||
122ebed16d | |||
158aabbdf2 | |||
a8c3f2bdf6 | |||
ba5f61f92b |
11
.github/workflows/core.yml
vendored
11
.github/workflows/core.yml
vendored
@ -7,13 +7,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
python-version: "3.6"
|
||||
- os: windows-latest
|
||||
python-version: "3.10"
|
||||
os: [ubuntu-20.04, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.9", "3.11"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@ -23,7 +18,7 @@ jobs:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
|
2
.github/workflows/deployment.yml
vendored
2
.github/workflows/deployment.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
|
||||
|
14
.github/workflows/docs.yml
vendored
14
.github/workflows/docs.yml
vendored
@ -7,13 +7,13 @@ jobs:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.7
|
||||
python-version: 3.9
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
|
2
.github/workflows/examples.yml
vendored
2
.github/workflows/examples.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
|
||||
|
8
.github/workflows/projects.yml
vendored
8
.github/workflows/projects.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
config_dir: "smartknob/firmware"
|
||||
config_dir: "smartknob"
|
||||
env_name: "view"
|
||||
- espurna:
|
||||
repository: "xoseperez/espurna"
|
||||
@ -32,7 +32,7 @@ jobs:
|
||||
repository: "1technophile/OpenMQTTGateway"
|
||||
folder: "OpenMQTTGateway"
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
@ -45,7 +45,7 @@ jobs:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
@ -53,7 +53,7 @@ jobs:
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
|
@ -8,4 +8,5 @@ disable=
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
consider-using-f-string,
|
||||
cyclic-import
|
||||
cyclic-import,
|
||||
use-dict-literal
|
||||
|
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
See https://piolabs.com/legal/code-of-conduct.html
|
48
HISTORY.rst
48
HISTORY.rst
@ -2,9 +2,11 @@ Release Notes
|
||||
=============
|
||||
|
||||
.. |PIOCONF| replace:: `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__ configuration file
|
||||
.. |LIBRARYJSON| replace:: `library.json <https://docs.platformio.org/en/latest/manifests/library-json/index.html>`__
|
||||
.. |LDF| replace:: `LDF <https://docs.platformio.org/en/latest/librarymanager/ldf.html>`__
|
||||
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
@ -13,6 +15,50 @@ PlatformIO Core 6
|
||||
|
||||
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
|
||||
|
||||
6.1.7 (2023-05-08)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced a new ``--sample-code`` option to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command, which allows users to include sample code in the newly created project
|
||||
* Added validation for `project working environment names <https://docs.platformio.org/en/latest/projectconf/sections/env/index.html#working-env-name>`__ to ensure that they only contain lowercase letters ``a-z``, numbers ``0-9``, and special characters ``_`` (underscore) and ``-`` (hyphen)
|
||||
* Added the ability to show a detailed library dependency tree only in `verbose mode <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-pio-run-v>`__, which can help you understand the relationship between libraries and troubleshoot issues more effectively (`issue #4517 <https://github.com/platformio/platformio-core/issues/4517>`_)
|
||||
* Added the ability to run only the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ when using the `pio run -t monitor <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, saving you time and resources by skipping the build process
|
||||
* Implemented a new feature to store device monitor logs in the project's ``logs`` folder, making it easier to access and review device monitor logs for your projects (`issue #4596 <https://github.com/platformio/platformio-core/issues/4596>`_)
|
||||
* Improved support for projects located on Windows network drives, including Network Shared Folder, Dropbox, OneDrive, Google Drive, and other similar services (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
|
||||
* Improved source file filtering functionality for the `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__ feature, making it easier to analyze only the code you need to
|
||||
* Upgraded the build engine to the latest version of SCons (4.5.2) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.5.2>`__)
|
||||
* Implemented a fix for shell injection vulnerabilities when converting INO files to CPP, ensuring your code is safe and secure (`issue #4532 <https://github.com/platformio/platformio-core/issues/4532>`_)
|
||||
* Restored the project generator for the `NetBeans IDE <https://docs.platformio.org/en/latest/integration/ide/netbeans.html>`__, providing you with more flexibility and options for your development workflow
|
||||
* Resolved installation issues with PIO Remote on Raspberry Pi and other small form-factor PCs (`issue #4425 <https://github.com/platformio/platformio-core/issues/4425>`_, `issue #4493 <https://github.com/platformio/platformio-core/issues/4493>`_, `issue #4607 <https://github.com/platformio/platformio-core/issues/4607>`_)
|
||||
* Resolved an issue where the `build_cache_dir <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/directory/build_cache_dir.html>`__ setting was not being recognized consistently across multiple environments (`issue #4574 <https://github.com/platformio/platformio-core/issues/4574>`_)
|
||||
* Resolved an issue where organization details could not be updated using the `pio org update <https://docs.platformio.org/en/latest/core/userguide/org/cmd_update.html>`__ command
|
||||
* Resolved an issue where the incorrect debugging environment was generated for VSCode in "Auto" mode (`issue #4597 <https://github.com/platformio/platformio-core/issues/4597>`_)
|
||||
* Resolved an issue where native tests would fail if a custom program name was specified (`issue #4546 <https://github.com/platformio/platformio-core/issues/4546>`_)
|
||||
* Resolved an issue where the PlatformIO |DEBUGGING| solution was not escaping the tool installation process into MI2 correctly (`issue #4565 <https://github.com/platformio/platformio-core/issues/4565>`_)
|
||||
* Resolved an issue where multiple targets were not executed sequentially (`issue #4604 <https://github.com/platformio/platformio-core/issues/4604>`_)
|
||||
* Resolved an issue where upgrading PlatformIO Core fails on Windows with Python 3.11 (`issue #4540 <https://github.com/platformio/platformio-core/issues/4540>`_)
|
||||
|
||||
6.1.6 (2023-01-23)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.11
|
||||
* Added a new `name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/description.html>`__ configuration option to customize a project name (`pull #4498 <https://github.com/platformio/platformio-core/pull/4498>`_)
|
||||
* Made assets (templates, ``99-platformio-udev.rules``) part of Python's module (`issue #4458 <https://github.com/platformio/platformio-core/issues/4458>`_)
|
||||
* Updated `Clang-Tidy <https://docs.platformio.org/en/latest/plus/check-tools/clang-tidy.html>`__ check tool to v15.0.5 with new diagnostics and bugfixes
|
||||
* Removed dependency on the "zeroconf" package and install it only when a user lists mDNS devices (issue with zeroconf's LGPL license)
|
||||
* Show the real error message instead of "Can not remove temporary directory" when |PIOCONF| is broken (`issue #4480 <https://github.com/platformio/platformio-core/issues/4480>`_)
|
||||
* Fixed an issue with an incorrect test summary when a testcase name includes a colon (`issue #4508 <https://github.com/platformio/platformio-core/issues/4508>`_)
|
||||
* Fixed an issue when `extends <https://docs.platformio.org/en/latest/projectconf/sections/env/options/advanced/extends.html>`__ did not override options in the right order (`issue #4462 <https://github.com/platformio/platformio-core/issues/4462>`_)
|
||||
* Fixed an issue when `pio pkg list <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_list.html>`__ and `pio pkg uninstall <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_uninstall.html>`__ commands fail if there are circular dependencies in the |LIBRARYJSON| manifests (`issue #4475 <https://github.com/platformio/platformio-core/issues/4475>`_)
|
||||
|
||||
6.1.5 (2022-11-01)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added a new `enable_proxy_strict_ssl <https://docs.platformio.org/en/latest/core/userguide/cmd_settings.html>`__ setting to disable the proxy server certificate verification (`issue #4432 <https://github.com/platformio/platformio-core/issues/4432>`_)
|
||||
* Documented `PlatformIO Core Proxy Configuration <https://docs.platformio.org/en/latest/core/installation/proxy-configuration.html>`__
|
||||
* Speeded up device port finder by avoiding loading board HWIDs from development platforms
|
||||
* Improved caching of build metadata in debug mode
|
||||
* Fixed an issue when `pio pkg install --storage-dir <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html>`__ command requires PlatformIO project (`issue #4410 <https://github.com/platformio/platformio-core/issues/4410>`_)
|
||||
|
||||
6.1.4 (2022-08-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@ -37,7 +83,7 @@ PlatformIO Core 6
|
||||
* Export a ``PIO_UNIT_TESTING`` macro to the project source files and dependent libraries in the |UNITTESTING| mode
|
||||
* Improved detection of Windows architecture (`issue #4353 <https://github.com/platformio/platformio-core/issues/4353>`_)
|
||||
* Warn about unknown `device monitor filters <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #4362 <https://github.com/platformio/platformio-core/issues/4362>`_)
|
||||
* Fixed a regression bug when `libArchive <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/libarchive.html>`__ option declared in the `library.json <https://docs.platformio.org/en/latest/manifests/library-json/index.html>`__ manifest was ignored (`issue #4351 <https://github.com/platformio/platformio-core/issues/4351>`_)
|
||||
* Fixed a regression bug when `libArchive <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/libarchive.html>`__ option declared in the |LIBRARYJSON| manifest was ignored (`issue #4351 <https://github.com/platformio/platformio-core/issues/4351>`_)
|
||||
* Fixed an issue when the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command didn't work with Python 3.6 (`issue #4352 <https://github.com/platformio/platformio-core/issues/4352>`_)
|
||||
|
||||
6.1.1 (2022-07-11)
|
||||
|
@ -1 +0,0 @@
|
||||
include LICENSE
|
34
SECURITY.md
Normal file
34
SECURITY.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We are committed to ensuring the security and protection of PlatformIO Core.
|
||||
To this end, we support only the following versions:
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 6.1.x | :white_check_mark: |
|
||||
| < 6.1 | :x: |
|
||||
|
||||
Unsupported versions of the PlatformIO Core may have known vulnerabilities or security issues that could compromise the security of our organization's systems and data.
|
||||
Therefore, it is important that all developers use only supported versions of the PlatformIO Core.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We take the security of our systems and data very seriously. We encourage responsible disclosure of any vulnerabilities or security issues that you may find in our systems or applications. If you believe you have discovered a vulnerability, please report it to us immediately.
|
||||
|
||||
To report a vulnerability, please send an email to our security team at contact@piolabs.com. Please include as much information as possible, including:
|
||||
|
||||
- A description of the vulnerability and how it can be exploited
|
||||
- Steps to reproduce the vulnerability
|
||||
- Any additional information that can help us understand and reproduce the vulnerability
|
||||
|
||||
Once we receive your report, our security team will acknowledge receipt within 24 hours and will work to validate the reported vulnerability. We will provide periodic updates on the progress of the vulnerability assessment, and will notify you once a fix has been deployed.
|
||||
|
||||
If the vulnerability is accepted, we will work to remediate the issue as quickly as possible. We may also provide credit or recognition to the individual who reported the vulnerability, at our discretion.
|
||||
|
||||
If the vulnerability is declined, we will provide a justification for our decision and may offer guidance on how to improve the report or how to test the system more effectively.
|
||||
|
||||
Please note that we will not take any legal action against individuals who report vulnerabilities in good faith and in accordance with this policy.
|
||||
|
||||
Thank you for helping us keep our systems and data secure.
|
2
docs
2
docs
Submodule docs updated: b38923e39b...98609771ba
2
examples
2
examples
Submodule examples updated: f98cb5a9be...3e23b5ac43
@ -12,9 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (6, 1, 4)
|
||||
VERSION = (6, 1, 7)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@ -44,14 +42,12 @@ __registry_mirror_hosts__ = [
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-scons": "~4.40400.0",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40502.0",
|
||||
"tool-cppcheck": "~1.270.0",
|
||||
"tool-clangtidy": "~1.120001.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
|
@ -21,22 +21,18 @@ from platformio.http import HTTPClient, HTTPClientError
|
||||
|
||||
|
||||
class AccountError(PlatformioException):
|
||||
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthorized(AccountError):
|
||||
|
||||
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError):
|
||||
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
|
||||
|
||||
def __init__(self):
|
||||
@ -298,7 +294,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
@ -351,6 +347,6 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
@ -22,29 +22,27 @@ from platformio.account.validate import validate_email, validate_orgname
|
||||
@click.argument("cur_orgname")
|
||||
@click.option(
|
||||
"--orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
callback=lambda _, __, value: validate_orgname(value) if value else value,
|
||||
help="A new orgname",
|
||||
)
|
||||
@click.option("--email")
|
||||
@click.option(
|
||||
"--email",
|
||||
callback=lambda _, __, value: validate_email(value) if value else value,
|
||||
)
|
||||
@click.option("--displayname")
|
||||
def org_update_cmd(cur_orgname, **kwargs):
|
||||
client = AccountClient()
|
||||
org = client.get_org(cur_orgname)
|
||||
del org["owners"]
|
||||
new_org = org.copy()
|
||||
new_org = {
|
||||
key: value if value is not None else org[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for field in org:
|
||||
new_org[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=org[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_org[field])
|
||||
if field == "orgname":
|
||||
validate_orgname(new_org[field])
|
||||
else:
|
||||
new_org.update(
|
||||
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
|
||||
)
|
||||
for key in kwargs:
|
||||
new_org[key] = click.prompt(key.capitalize(), default=org[key])
|
||||
if key == "email":
|
||||
validate_email(new_org[key])
|
||||
if key == "orgname":
|
||||
validate_orgname(new_org[key])
|
||||
client.update_org(cur_orgname, new_org)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully updated." % cur_orgname,
|
||||
|
@ -22,9 +22,7 @@ from platformio.account.validate import validate_orgname_teamname
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(
|
||||
value, teamname_validate=True
|
||||
),
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
|
@ -26,7 +26,7 @@ from platformio.account.validate import validate_orgname_teamname, validate_team
|
||||
)
|
||||
@click.option(
|
||||
"--name",
|
||||
callback=lambda _, __, value: validate_teamname(value),
|
||||
callback=lambda _, __, value: validate_teamname(value) if value else value,
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option(
|
||||
@ -36,18 +36,14 @@ def team_update_cmd(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
team = client.get_team(orgname, teamname)
|
||||
del team["id"]
|
||||
del team["members"]
|
||||
new_team = team.copy()
|
||||
new_team = {
|
||||
key: value if value is not None else team[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for field in team:
|
||||
new_team[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=team[field]
|
||||
)
|
||||
if field == "name":
|
||||
validate_teamname(new_team[field])
|
||||
else:
|
||||
new_team.update({key: value for key, value in kwargs.items() if value})
|
||||
for key in kwargs:
|
||||
new_team[key] = click.prompt(key.capitalize(), default=team[key])
|
||||
if key == "name":
|
||||
validate_teamname(new_team[key])
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
|
@ -18,8 +18,10 @@ import click
|
||||
|
||||
|
||||
def validate_username(value, field="username"):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid %s format. "
|
||||
"%s must contain only alphanumeric characters "
|
||||
@ -30,16 +32,22 @@ def validate_username(value, field="username"):
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
@ -48,27 +56,11 @@ def validate_password(value):
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_orgname_teamname(value, teamname_validate=False):
|
||||
if ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name in the next"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
teamname = str(value.strip().split(":", 1)[1])
|
||||
if teamname_validate:
|
||||
validate_teamname(teamname)
|
||||
return value
|
||||
|
||||
|
||||
def validate_teamname(value):
|
||||
if not value:
|
||||
return value
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid team name format. "
|
||||
"Team name must only contain alphanumeric characters, "
|
||||
@ -77,3 +69,16 @@ def validate_teamname(value):
|
||||
" not be longer than 20 characters."
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name using the following"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
orgname, teamname = value.split(":", 1)
|
||||
validate_orgname(orgname)
|
||||
validate_teamname(teamname)
|
||||
return value
|
||||
|
@ -58,6 +58,10 @@ DEFAULT_SETTINGS = {
|
||||
"value": get_default_projects_dir(),
|
||||
"validator": projects_dir_validate,
|
||||
},
|
||||
"enable_proxy_strict_ssl": {
|
||||
"description": "Verify the proxy server certificate against the list of supplied CAs",
|
||||
"value": True,
|
||||
},
|
||||
}
|
||||
|
||||
SESSION_VARS = {
|
||||
|
487
platformio/assets/schema/library.json
Normal file
487
platformio/assets/schema/library.json
Normal file
@ -0,0 +1,487 @@
|
||||
{
|
||||
"$id": "https://example.com/library.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "library.json schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"maxLength": 50,
|
||||
"description": "A name of a library.\nMust be unique in the PlatformIO Registry\nShould be slug style for simplicity, consistency, and compatibility. Example: HelloWorld\nCan contain a-z, digits, and dashes (but not start/end with them)\nConsecutive dashes and [:;/,@<>] chars are not allowed.",
|
||||
"required": true
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"description": "A version of a current library source code. Can contain a-z, digits, dots or dash and should be Semantic Versioning compatible.",
|
||||
"required": true
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "The field helps users to identify and search for your library with a brief description. Describe the hardware devices (sensors, boards and etc.) which are suitable with it.",
|
||||
"required": true
|
||||
},
|
||||
"keywords": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Used for search by keyword. Helps to make your library easier to discover without people needing to know its name.\nThe keyword should be lowercased, can contain a-z, digits and dash (but not start/end with them). A list from the keywords can be specified with separator , or declared as Array.",
|
||||
"required": true
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "Home page of a library (if is different from repository url).",
|
||||
"required": false
|
||||
},
|
||||
"repository": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"git",
|
||||
"hg",
|
||||
"svn"
|
||||
],
|
||||
"description": "only “git”, “hg” or “svn” are supported"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"branch": {
|
||||
"type": "string",
|
||||
"description": "if is not specified, default branch will be used. This field will be ignored if tag/release exists with the value of version."
|
||||
}
|
||||
},
|
||||
"description": "The repository in which the source code can be found.",
|
||||
"required": false
|
||||
},
|
||||
"authors": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "An author contact information\nIf authors field is not defined, PlatformIO will try to fetch data from VCS provider (Github, Gitlab, etc) if repository is declared.",
|
||||
"required": false
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "A SPDX license ID or SPDX Expression. You can check the full list of SPDX license IDs (see “Identifier” column).",
|
||||
"required": false
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible frameworks. The available framework names are defined in the Frameworks section.\nIf the library is compatible with the all frameworks, then do not declare this field or you use *",
|
||||
"required": false
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible development platforms. The available platform name are defined in Development Platforms section.\nIf the library is compatible with the all platforms, then do not declare this field or use *.\nPlatformIO does not check platforms for compatibility in default mode. See Compatibility Mode for details. If you need a strict checking for compatible platforms for a library, please set libCompatMode to strict.",
|
||||
"required": false
|
||||
},
|
||||
"headers": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "MyLibrary.h"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "FooCore.h, FooFeature.h"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of header files that can be included in a project source files using #include <...> directive.",
|
||||
"required": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"base": {
|
||||
"type": "string"
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "A list of example patterns.",
|
||||
"required": "false"
|
||||
},
|
||||
"dependencies": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of dependent libraries that will be automatically installed.",
|
||||
"required": false
|
||||
},
|
||||
"export": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Export only files that matched declared patterns.\n* - matches everything\n? - matches any single character\n[seq] - matches any character in seq\n[!seq] - matches any character not in seq"
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Exclude the directories and files which match with exclude patterns."
|
||||
}
|
||||
},
|
||||
"description": "This option is useful if you need to exclude extra data (test code, docs, images, PDFs, etc). It allows one to reduce the size of the final archive.\nTo check which files will be included in the final packages, please use pio pkg pack command.",
|
||||
"required": false
|
||||
},
|
||||
"scripts": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postinstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script AFTER the package has been installed.\nRun a custom Python script located in the package “scripts” folder AFTER the package is installed. Please note that you don’t need to specify a Python interpreter for Python scripts"
|
||||
},
|
||||
"preuninstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script BEFORE the package is removed.\nRun a custom Bash script BEFORE the package is uninstalled. The script is declared as a list of command arguments and is located at the root of a package"
|
||||
}
|
||||
},
|
||||
"description": "Execute custom scripts during the special Package Management CLI life cycle events",
|
||||
"required": false
|
||||
},
|
||||
"build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"flags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Extra flags to control preprocessing, compilation, assembly, and linking processes. More details build_flags.\nKeep in mind when operating with the -I flag (directories to be searched for header files). The path should be relative to the root directory where the library.json manifest is located."
|
||||
},
|
||||
"unflags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Remove base/initial flags which were set by development platform. More details build_unflags."
|
||||
},
|
||||
"includeDir": {
|
||||
"type": "string",
|
||||
"description": "Custom directory to be searched for header files. A default value is include and means that folder is located at the root of a library.\nThe Library Dependency Finder (LDF) will pick a library automatically only when a project or other dependent libraries include any header file located in includeDir or srcDir.",
|
||||
"required": false
|
||||
},
|
||||
"srcDir": {
|
||||
"type": "string",
|
||||
"description": "Custom location of library source code. A default value is src and means that folder is located in the root of a library.",
|
||||
"required": "false"
|
||||
},
|
||||
"srcFilter": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Specify which source files should be included/excluded from build process. The path in filter should be relative to the srcDir option of a library.\nSee syntax for build_src_filter.\nPlease note that you can generate source filter “on-the-fly” using extraScript",
|
||||
"required": false
|
||||
},
|
||||
"extraScript": {
|
||||
"type": "string",
|
||||
"description": "Launch extra script before a build process.",
|
||||
"required": "false"
|
||||
},
|
||||
"libArchive": {
|
||||
"type": "boolean",
|
||||
"description": "Create an archive (*.a, static library) from the object files and link it into a firmware (program). This is default behavior of PlatformIO Build System (\"libArchive\": true).\nSetting \"libArchive\": false will instruct PlatformIO Build System to link object files directly (in-line). This could be useful if you need to override weak symbols defined in framework or other libraries.\nYou can disable library archiving globally using lib_archive option in “platformio.ini” (Project Configuration File).",
|
||||
"required": "false"
|
||||
},
|
||||
"libLDFMode": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"off"
|
||||
],
|
||||
"description": "“Manual mode”, does not process source files of a project and dependencies. Builds only the libraries that are specified in manifests (library.json, module.json) or using lib_deps option."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain"
|
||||
],
|
||||
"description": "[DEFAULT] Parses ALL C/C++ source files of the project and follows only by nested includes (#include ..., chain...) from the libraries. It also parses C, CC, CPP files from libraries which have the same name as included header file. Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep"
|
||||
],
|
||||
"description": "Parses ALL C/C++ source files of the project and parses ALL C/C++ source files of the each found dependency (recursively). Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain+"
|
||||
],
|
||||
"description": "The same behavior as for the chain but evaluates C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep+"
|
||||
],
|
||||
"description": "The same behavior as for the deep but evaluates C/C++ Preprocessor conditional syntax."
|
||||
}
|
||||
],
|
||||
"description": "Specify Library Dependency Finder Mode. See Dependency Finder Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"libCompatMode": {
|
||||
"type": "string",
|
||||
"description": "Specify Library Compatibility Mode. See Compatibility Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"builder": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"PlatformIOLibBuilder"
|
||||
],
|
||||
"description": "Default Builder"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"ArduinoLibBuilder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"MbedLibBuilder"
|
||||
]
|
||||
}
|
||||
],
|
||||
"description": "Override default PlatformIOLibBuilder with another builder.",
|
||||
"required": false
|
||||
}
|
||||
},
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
@ -76,6 +76,16 @@ ATTRS{idVendor}=="28e9", ATTRS{idProduct}=="0189", MODE="0666", ENV{ID_MM_DEVICE
|
||||
# FireBeetle-ESP32
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7522", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Wio Terminal
|
||||
ATTRS{idVendor}=="2886", ATTRS{idProduct}=="[08]02d", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raspberry Pi Pico
|
||||
ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
#
|
||||
@ -87,44 +97,29 @@ SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port", MODE="0666", ENV{ID
|
||||
# opendous and estick
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT232/FT245 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT2232 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6010", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT4232 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6011", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT232H VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# Original FT232/FT245/FT2232/FT232H/FT4232
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="60[01][104]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# DISTORTEC JTAG-lock-pick Tiny 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TUMPA, TUMPA Lite
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a98", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a99", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a9[89]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# XDS100v2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca0", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca1", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris Evaluation Board FTDI (several)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd9", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris In-Circuit Debug Interface FTDI (ICDI) Board
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcda", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd[9a]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# egnite Turtelizer 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Section5 ICEbear
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c140", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c141", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c14[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Amontec JTAGkey and JTAGkey-tiny
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
@ -176,6 +171,3 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raspberry Pi Pico
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666"
|
@ -16,7 +16,7 @@
|
||||
% "request": "launch",
|
||||
% "name": "PIO Debug (skip Pre-Debug)",
|
||||
% "executable": _escape_path(prog_path),
|
||||
% "projectEnvName": env_name,
|
||||
% "projectEnvName": env_name if forced_env_name else default_debug_env_name,
|
||||
% "toolchainBinDir": _escape_path(os.path.dirname(gdb_path)),
|
||||
% "internalConsoleOptions": "openOnSessionStart",
|
||||
% }
|
||||
@ -28,7 +28,7 @@
|
||||
% debug["name"] = "PIO Debug"
|
||||
% debug["preLaunchTask"] = {
|
||||
% "type": "PlatformIO",
|
||||
% "task": ("Pre-Debug (%s)" % env_name) if len(config.envs()) > 1 and original_env_name else "Pre-Debug",
|
||||
% "task": ("Pre-Debug (%s)" % env_name) if len(config.envs()) > 1 and forced_env_name else "Pre-Debug",
|
||||
% }
|
||||
% noloading = predebug.copy()
|
||||
% noloading["name"] = "PIO Debug (without uploading)"
|
@ -28,9 +28,9 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import app, compat, fs
|
||||
from platformio import app, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.proc import get_pythonexe_path, where_is_program
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
AllowSubstExceptions(NameError)
|
||||
@ -99,6 +99,7 @@ if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
env.SConscriptChdir(False)
|
||||
|
||||
# Load variables from CLI
|
||||
env.Replace(
|
||||
@ -139,29 +140,11 @@ if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if compat.IS_WINDOWS and sys.version_info >= (3, 8) and os.getcwd().startswith("\\\\"):
|
||||
click.secho("!!! WARNING !!!\t\t" * 3, fg="red")
|
||||
click.secho(
|
||||
"Your project is located on a mapped network drive but the "
|
||||
"current command-line shell does not support the UNC paths.",
|
||||
fg="yellow",
|
||||
)
|
||||
click.secho(
|
||||
"Please move your project to a physical drive or check this workaround: "
|
||||
"https://bit.ly/3kuU5mP\n",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
is_clean_all = "cleanall" in COMMAND_LINE_TARGETS
|
||||
if env.GetOption("clean") or is_clean_all:
|
||||
env.PioClean(is_clean_all)
|
||||
env.Exit(0)
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
@ -175,15 +158,18 @@ if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(
|
||||
os.path.join(
|
||||
"$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1])
|
||||
"$BUILD_CACHE_DIR" if env.subst("$BUILD_CACHE_DIR") else "$BUILD_DIR",
|
||||
".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]),
|
||||
)
|
||||
)
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("pre"), exports="env")
|
||||
|
||||
if env.IsCleanTarget():
|
||||
env.CleanProject(fullclean=int(ARGUMENTS.get("FULLCLEAN", 0)))
|
||||
env.Exit(0)
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
@ -192,8 +178,7 @@ if "UPLOAD_FLAGS" in env:
|
||||
if env.GetProjectOption("upload_command"):
|
||||
env.Replace(UPLOADCMD=env.GetProjectOption("upload_command"))
|
||||
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("post"), exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
@ -209,6 +194,13 @@ if env.get("SIZETOOL") and not (
|
||||
Default("checkprogsize")
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
env.Alias("compiledb", env.CompilationDatabase("$COMPILATIONDB_PATH"))
|
||||
|
||||
# Print configured protocols
|
||||
@ -258,3 +250,9 @@ if "sizedata" in COMMAND_LINE_TARGETS:
|
||||
)
|
||||
|
||||
Default("sizedata")
|
||||
|
||||
# issue #4604: process targets sequentially
|
||||
for index, target in enumerate(
|
||||
[t for t in COMMAND_LINE_TARGETS if not t.startswith("__")][1:]
|
||||
):
|
||||
env.Depends(target, COMMAND_LINE_TARGETS[index])
|
||||
|
@ -1,224 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
|
||||
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
|
||||
import SCons
|
||||
|
||||
from platformio.builder.tools.piobuild import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
# Implements the ability for SCons to emit a compilation database for the MongoDB project. See
|
||||
# http://clang.llvm.org/docs/JSONCompilationDatabase.html for details on what a compilation
|
||||
# database is, and why you might want one. The only user visible entry point here is
|
||||
# 'env.CompilationDatabase'. This method takes an optional 'target' to name the file that
|
||||
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
|
||||
# which is the name that most clang tools search for by default.
|
||||
|
||||
# Is there a better way to do this than this global? Right now this exists so that the
|
||||
# emitter we add can record all of the things it emits, so that the scanner for the top level
|
||||
# compilation database can access the complete list, and also so that the writer has easy
|
||||
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
|
||||
# communicate more gracefully?
|
||||
__COMPILATION_DB_ENTRIES = []
|
||||
|
||||
|
||||
# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even
|
||||
# integrate with the cache, but there doesn't seem to be much call for it.
|
||||
class __CompilationDbNode(SCons.Node.Python.Value):
|
||||
def __init__(self, value):
|
||||
SCons.Node.Python.Value.__init__(self, value)
|
||||
self.Decider(changed_since_last_build_node)
|
||||
|
||||
|
||||
def changed_since_last_build_node(*args, **kwargs):
|
||||
"""Dummy decider to force always building"""
|
||||
return True
|
||||
|
||||
|
||||
def makeEmitCompilationDbEntry(comstr):
|
||||
"""
|
||||
Effectively this creates a lambda function to capture:
|
||||
* command line
|
||||
* source
|
||||
* target
|
||||
:param comstr: unevaluated command line
|
||||
:return: an emitter which has captured the above
|
||||
"""
|
||||
user_action = SCons.Action.Action(comstr)
|
||||
|
||||
def EmitCompilationDbEntry(target, source, env):
|
||||
"""
|
||||
This emitter will be added to each c/c++ object build to capture the info needed
|
||||
for clang tools
|
||||
:param target: target node(s)
|
||||
:param source: source node(s)
|
||||
:param env: Environment for use building this node
|
||||
:return: target(s), source(s)
|
||||
"""
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
|
||||
dbtarget = __CompilationDbNode(source)
|
||||
|
||||
entry = env.__COMPILATIONDB_Entry(
|
||||
target=dbtarget,
|
||||
source=[],
|
||||
__COMPILATIONDB_UTARGET=target,
|
||||
__COMPILATIONDB_USOURCE=source,
|
||||
__COMPILATIONDB_UACTION=user_action,
|
||||
__COMPILATIONDB_ENV=env,
|
||||
)
|
||||
|
||||
# Technically, these next two lines should not be required: it should be fine to
|
||||
# cache the entries. However, they don't seem to update properly. Since they are quick
|
||||
# to re-generate disable caching and sidestep this problem.
|
||||
env.AlwaysBuild(entry)
|
||||
env.NoCache(entry)
|
||||
|
||||
__COMPILATION_DB_ENTRIES.append(dbtarget)
|
||||
|
||||
return target, source
|
||||
|
||||
return EmitCompilationDbEntry
|
||||
|
||||
|
||||
def CompilationDbEntryAction(target, source, env, **kw):
|
||||
"""
|
||||
Create a dictionary with evaluated command line, target, source
|
||||
and store that info as an attribute on the target
|
||||
(Which has been stored in __COMPILATION_DB_ENTRIES array
|
||||
:param target: target node(s)
|
||||
:param source: source node(s)
|
||||
:param env: Environment for use building this node
|
||||
:param kw:
|
||||
:return: None
|
||||
"""
|
||||
|
||||
command = env["__COMPILATIONDB_UACTION"].strfunction(
|
||||
target=env["__COMPILATIONDB_UTARGET"],
|
||||
source=env["__COMPILATIONDB_USOURCE"],
|
||||
env=env["__COMPILATIONDB_ENV"],
|
||||
)
|
||||
|
||||
entry = {
|
||||
"directory": env.Dir("#").abspath,
|
||||
"command": command,
|
||||
"file": str(env["__COMPILATIONDB_USOURCE"][0]),
|
||||
}
|
||||
|
||||
target[0].write(entry)
|
||||
|
||||
|
||||
def WriteCompilationDb(target, source, env):
|
||||
entries = []
|
||||
|
||||
for s in __COMPILATION_DB_ENTRIES:
|
||||
item = s.read()
|
||||
item["file"] = os.path.abspath(item["file"])
|
||||
entries.append(item)
|
||||
|
||||
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
|
||||
json.dump(
|
||||
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
||||
)
|
||||
|
||||
|
||||
def ScanCompilationDb(node, env, path):
|
||||
return __COMPILATION_DB_ENTRIES
|
||||
|
||||
|
||||
def generate(env, **kwargs):
|
||||
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
|
||||
|
||||
env["COMPILATIONDB_COMSTR"] = kwargs.get(
|
||||
"COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
|
||||
)
|
||||
|
||||
components_by_suffix = itertools.chain(
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_C_EXT],
|
||||
[
|
||||
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CCCOM"),
|
||||
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCCCOM"),
|
||||
],
|
||||
),
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_CXX_EXT],
|
||||
[
|
||||
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CXXCOM"),
|
||||
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCXXCOM"),
|
||||
],
|
||||
),
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_ASM_EXT],
|
||||
[(static_obj, SCons.Defaults.StaticObjectEmitter, "$ASCOM")],
|
||||
),
|
||||
)
|
||||
|
||||
for entry in components_by_suffix:
|
||||
suffix = entry[0]
|
||||
builder, base_emitter, command = entry[1]
|
||||
|
||||
# Assumes a dictionary emitter
|
||||
emitter = builder.emitter[suffix]
|
||||
builder.emitter[suffix] = SCons.Builder.ListEmitter(
|
||||
[emitter, makeEmitCompilationDbEntry(command)]
|
||||
)
|
||||
|
||||
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
|
||||
action=SCons.Action.Action(CompilationDbEntryAction, None),
|
||||
)
|
||||
|
||||
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
|
||||
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
|
||||
target_scanner=SCons.Scanner.Scanner(
|
||||
function=ScanCompilationDb, node_class=None
|
||||
),
|
||||
)
|
||||
|
||||
def CompilationDatabase(env, target):
|
||||
result = env.__COMPILATIONDB_Database(target=target, source=[])
|
||||
|
||||
env.AlwaysBuild(result)
|
||||
env.NoCache(result)
|
||||
|
||||
return result
|
||||
|
||||
env.AddMethod(CompilationDatabase, "CompilationDatabase")
|
||||
|
||||
|
||||
def exists(env):
|
||||
return True
|
@ -239,7 +239,7 @@ def ProcessUnFlags(env, flags):
|
||||
for scope in unflag_scopes:
|
||||
for unflags in parsed.values():
|
||||
for unflag in unflags:
|
||||
for current in env.get(scope, []):
|
||||
for current in list(env.get(scope, [])):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
not isinstance(unflag, (tuple, list))
|
||||
|
@ -25,7 +25,6 @@ from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
|
||||
|
||||
class InoToCPPConverter:
|
||||
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
@ -103,7 +102,7 @@ class InoToCPPConverter:
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
out_file = re.sub(r"[\"\'\;]+", "", self._main_ino) + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = self.read_safe_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
|
@ -24,7 +24,7 @@ from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def IsIntegrationDump(_):
|
||||
return set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
return set(["__idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def DumpIntegrationIncludes(env):
|
||||
@ -147,6 +147,7 @@ def _subst_cmd(env, cmd):
|
||||
def DumpIntegrationData(*args):
|
||||
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
|
||||
data = {
|
||||
"build_type": globalenv.GetBuildType(),
|
||||
"env_name": globalenv["PIOENV"],
|
||||
"libsource_dirs": [
|
||||
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
|
||||
|
@ -29,7 +29,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from platformio import exception, fs
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.http import HTTPClientError, InternetIsOffline
|
||||
from platformio.http import HTTPClientError, InternetConnectionError
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
@ -109,7 +109,6 @@ class LibBuilderFactory:
|
||||
|
||||
|
||||
class LibBuilderBase:
|
||||
|
||||
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
|
||||
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
|
||||
# Max depth of nested includes:
|
||||
@ -298,11 +297,12 @@ class LibBuilderBase:
|
||||
with fs.cd(self.path):
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
self.env.SConscriptChdir(True)
|
||||
self.env.SConscript(
|
||||
os.path.abspath(self.extra_script),
|
||||
exports={"env": self.env, "pio_lib_builder": self},
|
||||
)
|
||||
self.env.SConscriptChdir(False)
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
|
||||
def process_dependencies(self):
|
||||
@ -982,7 +982,11 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
try:
|
||||
lm.install(spec)
|
||||
did_install = True
|
||||
except (HTTPClientError, UnknownPackageError, InternetIsOffline) as exc:
|
||||
except (
|
||||
HTTPClientError,
|
||||
UnknownPackageError,
|
||||
InternetConnectionError,
|
||||
) as exc:
|
||||
click.secho("Warning! %s" % exc, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
@ -1157,7 +1161,7 @@ def ConfigureProjectLibBuilder(env):
|
||||
click.echo("Path: %s" % lb.path, nl=False)
|
||||
click.echo(")", nl=False)
|
||||
click.echo("")
|
||||
if lb.depbuilders:
|
||||
if lb.verbose and lb.depbuilders:
|
||||
_print_deps_tree(lb, level + 1)
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
|
@ -27,7 +27,11 @@ def VerboseAction(_, act, actstr):
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def PioClean(env, clean_all=False):
|
||||
def IsCleanTarget(env):
|
||||
return env.GetOption("clean")
|
||||
|
||||
|
||||
def CleanProject(env, fullclean=False):
|
||||
def _relpath(path):
|
||||
if compat.IS_WINDOWS:
|
||||
prefix = os.getcwd()[:2].lower()
|
||||
@ -51,7 +55,7 @@ def PioClean(env, clean_all=False):
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
|
||||
if clean_all and os.path.isdir(libdeps_dir):
|
||||
if fullclean and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
@ -103,7 +107,8 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(IsCleanTarget)
|
||||
env.AddMethod(CleanProject)
|
||||
env.AddMethod(AddTarget)
|
||||
env.AddMethod(AddPlatformTarget)
|
||||
env.AddMethod(AddCustomTarget)
|
||||
|
@ -38,18 +38,15 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--pattern", multiple=True)
|
||||
@click.option("--pattern", multiple=True, hidden=True)
|
||||
@click.option("-f", "--src-filters", multiple=True)
|
||||
@click.option("--flags", multiple=True)
|
||||
@click.option(
|
||||
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
|
||||
@ -67,6 +64,7 @@ def cli(
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
src_filters,
|
||||
pattern,
|
||||
flags,
|
||||
severity,
|
||||
@ -105,14 +103,24 @@ def cli(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_patterns = [
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
default_src_filters = [
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "src_dir")),
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "include_dir")),
|
||||
]
|
||||
|
||||
src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
"check_src_filters",
|
||||
env_options.get("check_patterns", default_src_filters),
|
||||
)
|
||||
)
|
||||
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
patterns=pattern or env_options.get("check_patterns", default_patterns),
|
||||
src_filters=src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
@ -265,7 +273,7 @@ def print_defects_stats(results):
|
||||
tabular_data.append(total)
|
||||
|
||||
headers = ["Component"]
|
||||
headers.extend([l.upper() for l in severity_labels])
|
||||
headers.extend([label.upper() for label in severity_labels])
|
||||
headers = [click.style(h, bold=True) for h in headers]
|
||||
click.echo(tabulate(tabular_data, headers=headers, numalign="center"))
|
||||
click.echo()
|
||||
|
@ -16,6 +16,7 @@ import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
# pylint: disable=too-many-instance-attributes, redefined-builtin
|
||||
@ -23,7 +24,6 @@ from platformio.project.helpers import get_project_dir
|
||||
|
||||
|
||||
class DefectItem:
|
||||
|
||||
SEVERITY_HIGH = 1
|
||||
SEVERITY_MEDIUM = 2
|
||||
SEVERITY_LOW = 4
|
||||
@ -79,7 +79,7 @@ class DefectItem:
|
||||
for key, value in DefectItem.SEVERITY_LABELS.items():
|
||||
if label == value:
|
||||
return key
|
||||
raise Exception("Unknown severity label -> %s" % label)
|
||||
raise PlatformioException("Unknown severity label -> %s" % label)
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
|
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
@ -30,6 +29,7 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
self.config = config
|
||||
self.envname = envname
|
||||
self.options = options
|
||||
self.project_dir = project_dir
|
||||
self.cc_flags = []
|
||||
self.cxx_flags = []
|
||||
self.cpp_includes = []
|
||||
@ -41,7 +41,7 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
self._defects = []
|
||||
self._on_defect_callback = None
|
||||
self._bad_input = False
|
||||
self._load_cpp_data(project_dir)
|
||||
self._load_cpp_data()
|
||||
|
||||
# detect all defects by default
|
||||
if not self.options.get("severity"):
|
||||
@ -56,8 +56,8 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
for s in self.options["severity"]
|
||||
]
|
||||
|
||||
def _load_cpp_data(self, project_dir):
|
||||
data = load_build_metadata(project_dir, self.envname)
|
||||
def _load_cpp_data(self):
|
||||
data = load_build_metadata(self.project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
|
||||
@ -99,6 +99,13 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
includes_file,
|
||||
)
|
||||
result = proc.exec_command(cmd, shell=True)
|
||||
|
||||
if result["returncode"] != 0:
|
||||
click.echo("Warning: Failed to extract toolchain defines!")
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
@ -201,7 +208,7 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(patterns):
|
||||
def get_project_target_files(project_dir, src_filters):
|
||||
c_extension = (".c",)
|
||||
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
|
||||
header_extensions = (".h", ".hh", ".hpp", ".hxx")
|
||||
@ -216,13 +223,9 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.abspath(path))
|
||||
|
||||
for pattern in patterns:
|
||||
for item in glob.glob(pattern, recursive=True):
|
||||
if not os.path.isdir(item):
|
||||
_add_file(item)
|
||||
for root, _, files in os.walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_add_file(os.path.join(root, f))
|
||||
src_filters = normalize_src_filters(src_filters)
|
||||
for f in fs.match_src_files(project_dir, src_filters):
|
||||
_add_file(f)
|
||||
|
||||
return result
|
||||
|
||||
@ -243,3 +246,22 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def normalize_src_filters(src_filters):
|
||||
def _normalize(src_filters):
|
||||
return (
|
||||
src_filters
|
||||
if src_filters.startswith(("+<", "-<"))
|
||||
else "+<%s>" % src_filters
|
||||
)
|
||||
|
||||
if isinstance(src_filters, (list, tuple)):
|
||||
return " ".join([_normalize(f) for f in src_filters])
|
||||
|
||||
return _normalize(src_filters)
|
||||
|
@ -64,7 +64,9 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
):
|
||||
cmd.append("--checks=*")
|
||||
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
|
||||
src_files = []
|
||||
for items in project_files.values():
|
||||
|
@ -96,7 +96,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
self._buffer = ""
|
||||
self._buffer = ""
|
||||
return None
|
||||
|
||||
self._buffer = ""
|
||||
@ -214,7 +214,9 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
|
||||
return
|
||||
|
||||
for files in self.get_project_target_files(self.options["patterns"]).values():
|
||||
for files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).values():
|
||||
for f in files:
|
||||
dump_file = f + ".dump"
|
||||
if os.path.isfile(dump_file):
|
||||
@ -243,7 +245,9 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
src_files_scope = ("c", "c++")
|
||||
if not any(project_files[t] for t in src_files_scope):
|
||||
click.echo("Error: Nothing to check.")
|
||||
|
@ -227,7 +227,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
self.options["patterns"]
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).items():
|
||||
if scope not in ("c", "c++"):
|
||||
continue
|
||||
|
@ -19,7 +19,6 @@ import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -42,7 +42,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
for platform, boards in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
|
@ -51,15 +51,13 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=tempfile.mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@ -109,8 +107,8 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
ctx.invoke(
|
||||
project_init_cmd,
|
||||
project_dir=build_dir,
|
||||
board=board,
|
||||
project_option=project_option,
|
||||
boards=board,
|
||||
project_options=project_option,
|
||||
)
|
||||
|
||||
# process project
|
||||
|
@ -65,9 +65,7 @@ def invoke_command(ctx, cmd, **kwargs):
|
||||
"--storage-dir",
|
||||
multiple=True,
|
||||
default=None,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
|
||||
help="Manage custom library storage",
|
||||
)
|
||||
@click.option(
|
||||
|
@ -13,23 +13,28 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from zipfile import ZipFile
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.http import fetch_remote_content
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.proc import exec_command, get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
from platformio.proc import get_pythonexe_path
|
||||
|
||||
PYPI_JSON_URL = "https://pypi.org/pypi/platformio/json"
|
||||
DEVELOP_ZIP_URL = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
DEVELOP_INIT_SCRIPT_URL = (
|
||||
"https://raw.githubusercontent.com/platformio/platformio-core"
|
||||
"/develop/platformio/__init__.py"
|
||||
)
|
||||
|
||||
|
||||
@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version")
|
||||
@click.option("--dev", is_flag=True, help="Use development branch")
|
||||
def cli(dev):
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(dev, verbose):
|
||||
update_core_packages()
|
||||
if not dev and __version__ == get_latest_version():
|
||||
return click.secho(
|
||||
@ -38,29 +43,26 @@ def cli(dev):
|
||||
fg="green",
|
||||
)
|
||||
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
click.secho("Please wait while upgrading PlatformIO Core ...", fg="yellow")
|
||||
|
||||
python_exe = get_pythonexe_path()
|
||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||
cmds = (
|
||||
["pip", "install", "--upgrade", download_dist_package(to_develop)],
|
||||
["platformio", "--version"],
|
||||
)
|
||||
pkg_spec = DEVELOP_ZIP_URL if to_develop else "platformio"
|
||||
|
||||
cmd = None
|
||||
r = {}
|
||||
try:
|
||||
for cmd in cmds:
|
||||
cmd = [get_pythonexe_path(), "-m"] + cmd
|
||||
r = exec_command(cmd)
|
||||
|
||||
# try pip with disabled cache
|
||||
if r["returncode"] != 0 and cmd[2] == "pip":
|
||||
cmd.insert(3, "--no-cache-dir")
|
||||
r = exec_command(cmd)
|
||||
|
||||
assert r["returncode"] == 0
|
||||
assert "version" in r["out"]
|
||||
actual_version = r["out"].strip().split("version", 1)[1].strip()
|
||||
subprocess.run(
|
||||
[python_exe, "-m", "pip", "install", "--upgrade", pkg_spec],
|
||||
check=True,
|
||||
capture_output=not verbose,
|
||||
)
|
||||
r = subprocess.run(
|
||||
[python_exe, "-m", "platformio", "--version"],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
assert "version" in r.stdout
|
||||
actual_version = r.stdout.split("version", 1)[1].strip()
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s" % actual_version,
|
||||
fg="green",
|
||||
@ -71,52 +73,24 @@ def cli(dev):
|
||||
click.secho(
|
||||
"Warning! Please restart IDE to affect PIO Home changes", fg="yellow"
|
||||
)
|
||||
except Exception as exc:
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(exc)])) from exc
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if any(m in r["err"].lower() for m in permission_errors) and not IS_WINDOWS:
|
||||
click.secho(
|
||||
"""
|
||||
-----------------
|
||||
Permission denied
|
||||
-----------------
|
||||
You need the `sudo` permission to install Python packages. Try
|
||||
|
||||
> sudo pip install -U platformio
|
||||
|
||||
WARNING! Don't use `sudo` for the rest PlatformIO commands.
|
||||
""",
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
raise exception.ReturnErrorCode(1)
|
||||
raise exception.UpgradeError("\n".join([str(cmd), r["out"], r["err"]]))
|
||||
except (AssertionError, subprocess.CalledProcessError) as exc:
|
||||
click.secho(
|
||||
"\nWarning!!! Could not automatically upgrade the PlatformIO Core.",
|
||||
fg="red",
|
||||
)
|
||||
click.secho(
|
||||
"Please upgrade it manually using the following command:\n",
|
||||
fg="red",
|
||||
)
|
||||
click.secho(f'"{python_exe}" -m pip install -U {pkg_spec}\n', fg="cyan")
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def download_dist_package(to_develop):
|
||||
if not to_develop:
|
||||
return "platformio"
|
||||
dl_url = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
cache_dir = get_project_cache_dir()
|
||||
if not os.path.isdir(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||
try:
|
||||
with open(pkg_name, "wb") as fp:
|
||||
r = exec_command(
|
||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
||||
)
|
||||
assert r["returncode"] == 0
|
||||
# check ZIP structure
|
||||
with ZipFile(pkg_name) as zp:
|
||||
assert zp.testzip() is None
|
||||
return pkg_name
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return dl_url
|
||||
def get_pkg_spec(to_develop):
|
||||
if to_develop:
|
||||
return
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
@ -133,10 +107,7 @@ def get_latest_version():
|
||||
|
||||
def get_develop_latest_version():
|
||||
version = None
|
||||
content = fetch_remote_content(
|
||||
"https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/platformio/__init__.py"
|
||||
)
|
||||
content = fetch_remote_content(DEVELOP_INIT_SCRIPT_URL)
|
||||
for line in content.split("\n"):
|
||||
line = line.strip()
|
||||
if not line.startswith("VERSION"):
|
||||
@ -153,5 +124,5 @@ def get_develop_latest_version():
|
||||
|
||||
|
||||
def get_pypi_latest_version():
|
||||
content = fetch_remote_content("https://pypi.org/pypi/platformio/json")
|
||||
content = fetch_remote_content(PYPI_JSON_URL)
|
||||
return json.loads(content)["info"]["version"]
|
||||
|
@ -85,10 +85,7 @@ def get_filesystem_encoding():
|
||||
|
||||
|
||||
def get_locale_encoding():
|
||||
try:
|
||||
return locale.getdefaultlocale()[1]
|
||||
except ValueError:
|
||||
return None
|
||||
return locale.getpreferredencoding()
|
||||
|
||||
|
||||
def get_object_members(obj, ignore_private=True):
|
||||
|
@ -28,9 +28,9 @@ from platformio.debug import helpers
|
||||
from platformio.debug.config.factory import DebugConfigFactory
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.debug.process.gdb import GDBClientProcess
|
||||
from platformio.exception import ReturnErrorCode
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
@ -44,16 +44,12 @@ from platformio.project.options import ProjectOptions
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
|
||||
@ -81,61 +77,57 @@ def cli(
|
||||
project_dir = os.getenv(name)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
return _debug_in_project_dir(
|
||||
project_config = ProjectConfig.get_instance(project_conf)
|
||||
project_config.validate(envs=[environment] if environment else None)
|
||||
env_name = environment or helpers.get_default_debug_env(project_config)
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
configure_args = (
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
environment,
|
||||
project_config,
|
||||
env_name,
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
)
|
||||
if helpers.is_gdbmi_mode():
|
||||
os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true"
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
debug_config = _configure(*configure_args)
|
||||
stream.close()
|
||||
else:
|
||||
debug_config = _configure(*configure_args)
|
||||
|
||||
_run(project_dir, debug_config, __unprocessed)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _debug_in_project_dir(
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
environment,
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
):
|
||||
project_config = ProjectConfig.get_instance(project_conf)
|
||||
project_config.validate(envs=[environment] if environment else None)
|
||||
env_name = environment or helpers.get_default_debug_env(project_config)
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
env_options = project_config.items(env=env_name, as_dict=True)
|
||||
if "platform" not in env_options:
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
|
||||
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed):
|
||||
platform = PlatformFactory.new(
|
||||
project_config.get(f"env:{env_name}", "platform"), autoinstall=True
|
||||
)
|
||||
debug_config = DebugConfigFactory.new(
|
||||
PlatformFactory.new(env_options["platform"], autoinstall=True),
|
||||
platform,
|
||||
project_config,
|
||||
env_name,
|
||||
)
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
return subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
raise ReturnErrorCode(
|
||||
subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
).returncode
|
||||
)
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as exc:
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream("~", str(exc) + "\n")
|
||||
if helpers.is_gdbmi_mode()
|
||||
else str(exc) + "\n",
|
||||
nl=False,
|
||||
)
|
||||
click.echo(str(exc))
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_config.load_cmds == ["preload"]
|
||||
@ -157,25 +149,10 @@ def _debug_in_project_dir(
|
||||
debug_config.load_cmds = []
|
||||
|
||||
if rebuild_prog:
|
||||
if helpers.is_gdbmi_mode():
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream(
|
||||
"~", "Preparing firmware for debugging...\n"
|
||||
),
|
||||
nl=False,
|
||||
)
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
)
|
||||
stream.close()
|
||||
else:
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
)
|
||||
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(
|
||||
ctx, os.getcwd(), project_config, env_name, preload, verbose
|
||||
)
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(debug_config.program_path)
|
||||
@ -183,6 +160,10 @@ def _debug_in_project_dir(
|
||||
if not os.path.isfile(debug_config.program_path):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
return debug_config
|
||||
|
||||
|
||||
def _run(project_dir, debug_config, __unprocessed):
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
@ -199,5 +180,3 @@ def _debug_in_project_dir(
|
||||
finally:
|
||||
client.close()
|
||||
loop.close()
|
||||
|
||||
return True
|
||||
|
@ -145,10 +145,10 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True)
|
||||
if data:
|
||||
return data
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
|
||||
if not data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
return data
|
||||
|
||||
def _configure_server(self):
|
||||
# user disabled server in platformio.ini
|
||||
|
@ -18,7 +18,6 @@ from platformio.device.finder import SerialPortFinder, is_pattern_port
|
||||
|
||||
|
||||
class BlackmagicDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
set language c
|
||||
|
@ -16,7 +16,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class GenericDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
|
@ -16,7 +16,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class JlinkDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
|
@ -16,7 +16,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class MspdebugDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
@ -17,7 +17,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class NativeDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
@ -16,7 +16,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class QemuDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
|
@ -16,7 +16,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class RenodeDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
|
@ -20,7 +20,6 @@ class DebugError(PlatformioException):
|
||||
|
||||
|
||||
class DebugSupportError(DebugError, UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"Currently, PlatformIO does not support debugging for `{0}`.\n"
|
||||
"Please request support at https://github.com/platformio/"
|
||||
|
@ -31,7 +31,6 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
|
||||
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
||||
|
||||
STDOUT = sys.stdout
|
||||
|
||||
def write(self, text):
|
||||
@ -91,7 +90,7 @@ def predebug_project(
|
||||
TestSuite(env_name, debug_testname),
|
||||
project_config,
|
||||
TestRunnerOptions(
|
||||
verbose=verbose,
|
||||
verbose=3 if verbose else 0,
|
||||
without_building=False,
|
||||
without_debugging=False,
|
||||
without_uploading=not preload,
|
||||
|
@ -53,7 +53,6 @@ class DebugSubprocessProtocol(asyncio.SubprocessProtocol):
|
||||
|
||||
|
||||
class DebugBaseProcess:
|
||||
|
||||
STDOUT_CHUNK_SIZE = 2048
|
||||
LOG_FILE = None
|
||||
|
||||
|
@ -24,7 +24,6 @@ from platformio.debug.process.client import DebugClientProcess
|
||||
|
||||
|
||||
class GDBClientProcess(DebugClientProcess):
|
||||
|
||||
PIO_SRC_NAME = ".pioinit"
|
||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||
|
||||
|
@ -26,7 +26,6 @@ from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class DebugServerProcess(DebugBaseProcess):
|
||||
|
||||
STD_BUFFER_SIZE = 1024
|
||||
|
||||
def __init__(self, debug_config):
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import os
|
||||
from fnmatch import fnmatch
|
||||
from functools import lru_cache
|
||||
|
||||
import click
|
||||
import serial
|
||||
@ -119,6 +120,8 @@ class SerialPortFinder:
|
||||
|
||||
@staticmethod
|
||||
def match_device_hwid(patterns):
|
||||
if not patterns:
|
||||
return None
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if not item.vid or not item.pid:
|
||||
continue
|
||||
@ -143,10 +146,10 @@ class SerialPortFinder:
|
||||
if not device:
|
||||
device = self._find_known_device()
|
||||
if device:
|
||||
port = self._reveal_device_port(device)
|
||||
return self._reveal_device_port(device)
|
||||
|
||||
# pick the best PID:VID USB device
|
||||
best_port = None
|
||||
port = best_port = None
|
||||
for item in list_serial_ports():
|
||||
if self.ensure_ready and not is_serial_port_ready(item["port"]):
|
||||
continue
|
||||
@ -215,20 +218,26 @@ class SerialPortFinder:
|
||||
if os.path.isfile(udev_rules_path):
|
||||
hwids.extend(parse_udev_rules_hwids(udev_rules_path))
|
||||
|
||||
# load from installed dev-platforms
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for board_config in p.get_boards().values():
|
||||
for board_hwid in board_config.get("build.hwids", []):
|
||||
board_hwid = self.normalize_board_hwid(board_hwid)
|
||||
if board_hwid not in hwids:
|
||||
hwids.append(board_hwid)
|
||||
@lru_cache(maxsize=1)
|
||||
def _fetch_hwids_from_platforms():
|
||||
"""load from installed dev-platforms"""
|
||||
result = []
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for board_config in p.get_boards().values():
|
||||
for board_hwid in board_config.get("build.hwids", []):
|
||||
board_hwid = self.normalize_board_hwid(board_hwid)
|
||||
if board_hwid not in result:
|
||||
result.append(board_hwid)
|
||||
return result
|
||||
|
||||
try:
|
||||
|
||||
@retry(timeout=self.timeout)
|
||||
def wrapper():
|
||||
device = self.match_device_hwid(hwids)
|
||||
if not device:
|
||||
device = self.match_device_hwid(_fetch_hwids_from_platforms())
|
||||
if device:
|
||||
return device
|
||||
raise retry.RetryNextException()
|
||||
|
@ -18,8 +18,6 @@ import re
|
||||
import time
|
||||
from glob import glob
|
||||
|
||||
import zeroconf
|
||||
|
||||
from platformio import __version__, exception, proc
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
|
||||
@ -84,6 +82,16 @@ def list_logical_devices():
|
||||
|
||||
|
||||
def list_mdns_services():
|
||||
try:
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
except ImportError:
|
||||
result = proc.exec_command(
|
||||
[proc.get_pythonexe_path(), "-m", "pip", "install", "zeroconf"]
|
||||
)
|
||||
if result.get("returncode") != 0:
|
||||
print(result.get("err"))
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
|
||||
class mDNSListener:
|
||||
def __init__(self):
|
||||
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
|
||||
|
@ -104,7 +104,7 @@ from platformio.project.options import ProjectOptions
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
@ -132,24 +132,24 @@ def device_monitor_cmd(**options):
|
||||
ensure_ready=True,
|
||||
).find(initial_port=options["port"])
|
||||
|
||||
if options["menu_char"] == options["exit_char"]:
|
||||
raise exception.UserSideException(
|
||||
"--exit-char can not be the same as --menu-char"
|
||||
)
|
||||
|
||||
# check for unknown filters
|
||||
if options["filters"]:
|
||||
known_filters = set(get_available_filters())
|
||||
unknown_filters = set(options["filters"]) - known_filters
|
||||
if unknown_filters:
|
||||
options["filters"] = list(known_filters & set(options["filters"]))
|
||||
click.secho(
|
||||
("Warning! Skipping unknown filters `%s`. Known filters are `%s`")
|
||||
% (", ".join(unknown_filters), ", ".join(sorted(known_filters))),
|
||||
fg="yellow",
|
||||
if options["menu_char"] == options["exit_char"]:
|
||||
raise exception.UserSideException(
|
||||
"--exit-char can not be the same as --menu-char"
|
||||
)
|
||||
|
||||
start_terminal(options)
|
||||
# check for unknown filters
|
||||
if options["filters"]:
|
||||
known_filters = set(get_available_filters())
|
||||
unknown_filters = set(options["filters"]) - known_filters
|
||||
if unknown_filters:
|
||||
options["filters"] = list(known_filters & set(options["filters"]))
|
||||
click.secho(
|
||||
("Warning! Skipping unknown filters `%s`. Known filters are `%s`")
|
||||
% (", ".join(unknown_filters), ", ".join(sorted(known_filters))),
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
start_terminal(options)
|
||||
|
||||
|
||||
def get_project_options(environment=None):
|
||||
|
@ -13,7 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import os.path
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from platformio.device.monitor.filters.base import DeviceMonitorFilterBase
|
||||
@ -27,8 +27,10 @@ class LogToFile(DeviceMonitorFilterBase):
|
||||
self._log_fp = None
|
||||
|
||||
def __call__(self):
|
||||
log_file_name = "platformio-device-monitor-%s.log" % datetime.now().strftime(
|
||||
"%y%m%d-%H%M%S"
|
||||
if not os.path.isdir("logs"):
|
||||
os.makedirs("logs")
|
||||
log_file_name = os.path.join(
|
||||
"logs", "device-monitor-%s.log" % datetime.now().strftime("%y%m%d-%H%M%S")
|
||||
)
|
||||
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
|
||||
# pylint: disable=consider-using-with
|
||||
|
@ -144,9 +144,8 @@ def new_serial_instance(options): # pylint: disable=too-many-branches
|
||||
except KeyboardInterrupt as exc:
|
||||
click.echo("", err=True)
|
||||
raise UserSideException("User aborted and port is not given") from exc
|
||||
else:
|
||||
if not port:
|
||||
raise UserSideException("Port is not given")
|
||||
if not port:
|
||||
raise UserSideException("Port is not given")
|
||||
try:
|
||||
serial_instance = serial.serial_for_url(
|
||||
port,
|
||||
|
@ -14,7 +14,6 @@
|
||||
|
||||
|
||||
class PlatformioException(Exception):
|
||||
|
||||
MESSAGE = None
|
||||
|
||||
def __str__(self): # pragma: no cover
|
||||
@ -26,7 +25,6 @@ class PlatformioException(Exception):
|
||||
|
||||
|
||||
class ReturnErrorCode(PlatformioException):
|
||||
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
@ -35,7 +33,6 @@ class UserSideException(PlatformioException):
|
||||
|
||||
|
||||
class AbortedByUser(UserSideException):
|
||||
|
||||
MESSAGE = "Aborted by user"
|
||||
|
||||
|
||||
@ -49,7 +46,6 @@ class InvalidUdevRules(UserSideException):
|
||||
|
||||
|
||||
class MissedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Please install `99-platformio-udev.rules`. \nMore details: "
|
||||
"https://docs.platformio.org/en/latest/core/installation/udev-rules.html"
|
||||
@ -57,7 +53,6 @@ class MissedUdevRules(InvalidUdevRules):
|
||||
|
||||
|
||||
class OutdatedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Your `{0}` are outdated. Please update or reinstall them."
|
||||
"\nMore details: "
|
||||
@ -71,32 +66,26 @@ class OutdatedUdevRules(InvalidUdevRules):
|
||||
|
||||
|
||||
class GetSerialPortsError(PlatformioException):
|
||||
|
||||
MESSAGE = "No implementation for your platform ('{0}') available"
|
||||
|
||||
|
||||
class GetLatestVersionError(PlatformioException):
|
||||
|
||||
MESSAGE = "Can not retrieve the latest PlatformIO version"
|
||||
|
||||
|
||||
class InvalidSettingName(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid setting with the name '{0}'"
|
||||
|
||||
|
||||
class InvalidSettingValue(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
|
||||
|
||||
|
||||
class InvalidJSONFile(PlatformioException):
|
||||
|
||||
MESSAGE = "Could not load broken JSON: {0}"
|
||||
|
||||
|
||||
class CIBuildEnvsEmpty(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"Can't find PlatformIO build environments.\n"
|
||||
"Please specify `--board` or path to `platformio.ini` with "
|
||||
@ -104,18 +93,7 @@ class CIBuildEnvsEmpty(UserSideException):
|
||||
)
|
||||
|
||||
|
||||
class UpgradeError(PlatformioException):
|
||||
|
||||
MESSAGE = """{0}
|
||||
|
||||
* Upgrade using `pip install -U platformio`
|
||||
* Try different installation/upgrading steps:
|
||||
https://docs.platformio.org/page/installation.html
|
||||
"""
|
||||
|
||||
|
||||
class HomeDirPermissionsError(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"The directory `{0}` or its parent directory is not owned by the "
|
||||
"current user and PlatformIO can not store configuration data.\n"
|
||||
@ -126,7 +104,6 @@ class HomeDirPermissionsError(UserSideException):
|
||||
|
||||
|
||||
class CygwinEnvDetected(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"PlatformIO does not work within Cygwin environment. "
|
||||
"Use native Terminal instead."
|
||||
|
@ -24,7 +24,7 @@ import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, proc
|
||||
from platformio import exception
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
@ -50,6 +50,10 @@ def get_source_dir():
|
||||
return os.path.dirname(curpath)
|
||||
|
||||
|
||||
def get_assets_dir():
|
||||
return os.path.join(get_source_dir(), "assets")
|
||||
|
||||
|
||||
def load_json(file_path):
|
||||
try:
|
||||
with open(file_path, mode="r", encoding="utf8") as f:
|
||||
@ -99,7 +103,7 @@ def calculate_folder_size(path):
|
||||
|
||||
def get_platformio_udev_rules_path():
|
||||
return os.path.abspath(
|
||||
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
|
||||
os.path.join(get_assets_dir(), "system", "99-platformio-udev.rules")
|
||||
)
|
||||
|
||||
|
||||
@ -177,7 +181,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
||||
result = set()
|
||||
# correct fs directory separator
|
||||
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
||||
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||
for action, pattern in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||
candidates = _find_candidates(pattern)
|
||||
if action == "+":
|
||||
result |= candidates
|
||||
@ -189,26 +193,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
||||
def to_unix_path(path):
|
||||
if not IS_WINDOWS or not path:
|
||||
return path
|
||||
return re.sub(r"[\\]+", "/", path)
|
||||
|
||||
|
||||
def normalize_path(path):
|
||||
path = os.path.abspath(path)
|
||||
if not IS_WINDOWS or not path.startswith("\\\\"):
|
||||
return path
|
||||
try:
|
||||
result = proc.exec_command(["net", "use"])
|
||||
if result["returncode"] != 0:
|
||||
return path
|
||||
share_re = re.compile(r"\s([A-Z]\:)\s+(\\\\[^\s]+)")
|
||||
for line in result["out"].split("\n"):
|
||||
share = share_re.search(line)
|
||||
if not share:
|
||||
continue
|
||||
path = path.replace(share.group(2), share.group(1))
|
||||
except OSError:
|
||||
pass
|
||||
return path
|
||||
return path.replace("\\", "/")
|
||||
|
||||
|
||||
def expanduser(path):
|
||||
|
@ -13,11 +13,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import mimetypes
|
||||
import socket
|
||||
|
||||
import click
|
||||
|
||||
from platformio.home.helpers import is_port_used
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.home.run import run_server
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
|
||||
|
||||
@click.command("home", short_help="GUI to manage PlatformIO")
|
||||
@ -48,15 +50,17 @@ from platformio.home.run import run_server
|
||||
),
|
||||
)
|
||||
def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
# download all dependent packages
|
||||
get_core_package_dir("contrib-piohome")
|
||||
return
|
||||
|
||||
# Ensure PIO Home mimetypes are known
|
||||
mimetypes.add_type("text/html", ".html")
|
||||
mimetypes.add_type("text/css", ".css")
|
||||
mimetypes.add_type("application/javascript", ".js")
|
||||
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
return
|
||||
|
||||
home_url = "http://%s:%d%s" % (
|
||||
host,
|
||||
port,
|
||||
@ -92,3 +96,23 @@ def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
home_url=home_url,
|
||||
)
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if IS_WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -1,60 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import socket
|
||||
|
||||
import requests
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class AsyncSession(requests.Session):
|
||||
async def request( # pylint: disable=signature-differs,invalid-overridden-method
|
||||
self, *args, **kwargs
|
||||
):
|
||||
func = super().request
|
||||
return await run_in_threadpool(func, *args, **kwargs)
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def requests_session():
|
||||
return AsyncSession()
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def get_core_fullpath():
|
||||
return where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if IS_WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user