mirror of
https://github.com/platformio/platformio-core.git
synced 2025-06-25 17:41:54 +02:00
Compare commits
332 Commits
Author | SHA1 | Date | |
---|---|---|---|
444c57b4a6 | |||
d787648e71 | |||
846588deec | |||
79142965ce | |||
93bc4fae6c | |||
1aa256d63c | |||
3a133af1a6 | |||
f93d3d509b | |||
145142ea6c | |||
b4b02982d6 | |||
841489c154 | |||
23c142dffd | |||
fc946baa93 | |||
a447022e7f | |||
4c697d9032 | |||
a71443a2ee | |||
20e076191e | |||
d907ecb9e9 | |||
c950d6d366 | |||
29cd2d2bdb | |||
a584a6bce3 | |||
4dc7ea5bd0 | |||
1be6e10f99 | |||
c9016d6939 | |||
baab25a48c | |||
4d4f5a217b | |||
b6d1f4d769 | |||
90fc36cf2d | |||
9be0a8248d | |||
d15314689d | |||
1d4b5c8051 | |||
47a87c57f2 | |||
ec2d01f277 | |||
4e05309e02 | |||
1fd3a4061f | |||
014ac79c87 | |||
dd3fe909a1 | |||
c1afb364e9 | |||
f3c27eadf6 | |||
fe2fd5e880 | |||
07e7dc4717 | |||
a94e5bd5ab | |||
f5ab0e5ddd | |||
3e20abec90 | |||
a4276b4ea6 | |||
cade63fba5 | |||
3a57661230 | |||
33fadd028d | |||
647b131d9b | |||
b537004a75 | |||
67b2759be2 | |||
fe2e8a0a40 | |||
03e84fe325 | |||
b45cdc9cb6 | |||
3aed8e1259 | |||
2d4a87238a | |||
023b58e9f0 | |||
3211a2b91b | |||
4b61de0136 | |||
e6ae18ab0d | |||
4230b223d2 | |||
d224ae658d | |||
20dc006345 | |||
13035ced59 | |||
b9d27240b5 | |||
2441d47321 | |||
cf497e8829 | |||
013153718d | |||
f1726843a2 | |||
44ef6e3469 | |||
eeb5ac456e | |||
aea9075d4b | |||
11a8d9ff7a | |||
7b587ba8bf | |||
9eb6e5166d | |||
aa580360e8 | |||
4c490cc63c | |||
882d4da8cb | |||
781114f026 | |||
7cf8d1d696 | |||
fd1333f031 | |||
8e21259222 | |||
9899547b73 | |||
4075789a32 | |||
ff364610c5 | |||
e5940673d7 | |||
fe140b0566 | |||
2ec5a3154e | |||
956f21b639 | |||
cdac7d497c | |||
591b377e4a | |||
c475578db6 | |||
2bad42ecb1 | |||
0acfc25d56 | |||
9d1593da0b | |||
e9433de50f | |||
fcba901611 | |||
0e3249e8b1 | |||
0d647e164b | |||
c01ef88265 | |||
9fb9e586a0 | |||
28bd200cd6 | |||
56be27fb0b | |||
32991356f3 | |||
dbe58b49bf | |||
d36e39418e | |||
c28740cfb1 | |||
430acc87de | |||
c0d97287dd | |||
0f3dbe623d | |||
6449115635 | |||
d085a02068 | |||
76a11a75b7 | |||
93018930ab | |||
621b24b665 | |||
7606dd4faf | |||
aa06d21abe | |||
042f8dc668 | |||
c4f76848a7 | |||
e1ff9a469d | |||
2239616484 | |||
55be7181b3 | |||
f519a9d524 | |||
f4319f670c | |||
80fc335528 | |||
353f440335 | |||
3e9ca48588 | |||
255e91b51c | |||
adf94843ea | |||
e3e08d9691 | |||
84c7ede0e1 | |||
28c90652bc | |||
a75da327d0 | |||
adf4012b96 | |||
1fe806269d | |||
ffacd17387 | |||
4742ffc9d8 | |||
700c705317 | |||
17ba91977d | |||
f31f9fa616 | |||
485f801c74 | |||
adab425c6d | |||
aabbbef944 | |||
14ce28e028 | |||
ca1f633f9c | |||
a2f3e85760 | |||
f422b5e05c | |||
ba58db3079 | |||
4729d9f55d | |||
41bd751ec2 | |||
c74c9778a1 | |||
f2d16e7631 | |||
b181406a1f | |||
dc16f80ffc | |||
125be4bfd4 | |||
14907579cd | |||
b0a1f3ae16 | |||
195304bbea | |||
e4c4f2ac50 | |||
77e6d1b099 | |||
cf4da42b25 | |||
51bf17515e | |||
1e2c37c190 | |||
204a60dd52 | |||
0f554d2f31 | |||
f382aae66b | |||
998da59f7c | |||
4cad98601d | |||
34545d3f12 | |||
127b422d25 | |||
8c61f0f6b6 | |||
fb93c1937c | |||
827bd09c61 | |||
984d63983d | |||
11df021750 | |||
ac6d94860b | |||
b238c55e53 | |||
961ab6b35e | |||
e1f34c7ea0 | |||
f70e6d50c6 | |||
540465291a | |||
0b3c0144e6 | |||
7ab27ddf9d | |||
e78bf51f68 | |||
5f8c15b96a | |||
9c61ef544d | |||
5548197a74 | |||
2458309d55 | |||
7229e1cce4 | |||
3e95134721 | |||
687189a142 | |||
51b4cd88db | |||
fe52b79eb2 | |||
091c96eb07 | |||
f2eead6ece | |||
c2b3097618 | |||
2728c90441 | |||
5cac6d8b88 | |||
bd34c0f437 | |||
f1c445be15 | |||
b88c393b4e | |||
897844ebc1 | |||
00409fc096 | |||
b75bdbd320 | |||
a0f8def616 | |||
c946613019 | |||
2ee8214485 | |||
7e89e551ae | |||
6972c9c100 | |||
5cfaea91d6 | |||
ce735c0ae5 | |||
aa0df36c8a | |||
99224d7d4e | |||
532759c0c6 | |||
fb43d2508a | |||
07944a9d5b | |||
8b6a4b8ce8 | |||
6e75dc0d57 | |||
a733f3c868 | |||
65397fe059 | |||
48a823d39e | |||
f8b5266c1e | |||
9170eee6e4 | |||
89f4574680 | |||
831f7f52bc | |||
dccc14b507 | |||
3a21f48c9c | |||
54ff3a8d4e | |||
4474175e52 | |||
a983075dac | |||
3268b516a9 | |||
5c9b373b65 | |||
0fe6bf262e | |||
390755c499 | |||
deca77d1a3 | |||
bc2e51d51f | |||
bce70d4945 | |||
940fa327f5 | |||
db8f027f30 | |||
39b61d50e6 | |||
f85c3081fe | |||
2a1fd273ee | |||
a423a4dde4 | |||
abda3edad6 | |||
be4d016f61 | |||
68e62c7137 | |||
bf8f1e9efb | |||
a102fd2d48 | |||
ff221b103a | |||
646aa4f45b | |||
325d4c16b8 | |||
f47083b86b | |||
3d48f3ec04 | |||
837ea85c3c | |||
9585e2a3e3 | |||
5396882e75 | |||
109c537d86 | |||
b239628ac3 | |||
25c7c60f0d | |||
8a38442bba | |||
205b29560f | |||
bbcd92b7c6 | |||
3b3fbecbf3 | |||
a3e66d6325 | |||
355f57e888 | |||
6eff31b5d3 | |||
01423a7659 | |||
0f9a5f8eee | |||
1c419ef71a | |||
01ab1fa4c0 | |||
0ff46bdd88 | |||
dd033bf675 | |||
a28a3d31c9 | |||
450f48ba81 | |||
813861ddae | |||
939b9b9112 | |||
98edf7609f | |||
9f0efdeb5c | |||
3fd063d8ed | |||
1b55da0af2 | |||
4dc44868ea | |||
f720cd841c | |||
53f1d82890 | |||
e78efff33b | |||
a754a28cd8 | |||
1d97982230 | |||
e022b67161 | |||
82de26d401 | |||
31218060db | |||
e25b170b34 | |||
326ebcf593 | |||
8b604c1a03 | |||
f219f35ac8 | |||
4d89593b05 | |||
3881a8c677 | |||
e9cf551101 | |||
5ffa42a5a2 | |||
a5052433f2 | |||
425332040e | |||
3a230dfb51 | |||
292dc3fd71 | |||
e48dfbaadc | |||
c0d2abc9a7 | |||
d017a8197e | |||
378528abfc | |||
91487f179e | |||
363fee4ba0 | |||
41cc735979 | |||
c9235a5276 | |||
355f5afab9 | |||
2b36c7086a | |||
f819cbb4b8 | |||
e3c33596db | |||
1bcec6654d | |||
9df692529b | |||
141d6fc4a6 | |||
6bc915f7db | |||
4ae24a619f | |||
7f7bc76b20 | |||
55e7b36dc4 | |||
395a4053aa | |||
cb65bdf22f | |||
6ea7ded483 | |||
eeb0116f28 | |||
63ca19541f | |||
e0f839a372 | |||
4fc6b26db5 | |||
4388cd4321 | |||
71afa639e2 | |||
89ffd82275 | |||
148ce1a897 | |||
58a59f8ae8 |
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@ -6,9 +6,8 @@ What kind of issue is this?
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
All issues related to PlatformIO IDE should be reported to the
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues) repository
|
||||
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
|
20
.github/workflows/core.yml
vendored
20
.github/workflows/core.yml
vendored
@ -7,18 +7,18 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.9", "3.11"]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@ -27,12 +27,18 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Python Lint
|
||||
if: ${{ matrix.python-version != '3.6' }}
|
||||
- name: Run "codespell" on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
tox -e lint
|
||||
python -m pip install codespell
|
||||
make codespell
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Integration Tests
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
|
10
.github/workflows/deployment.yml
vendored
10
.github/workflows/deployment.yml
vendored
@ -12,19 +12,19 @@ jobs:
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
pip install tox build
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
@ -34,8 +34,8 @@ jobs:
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
run: python setup.py sdist
|
||||
- name: Build Python distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
|
16
.github/workflows/docs.yml
vendored
16
.github/workflows/docs.yml
vendored
@ -7,13 +7,13 @@ jobs:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
@ -101,7 +101,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
|
6
.github/workflows/examples.yml
vendored
6
.github/workflows/examples.yml
vendored
@ -15,14 +15,14 @@ jobs:
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -34,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
|
19
.github/workflows/projects.yml
vendored
19
.github/workflows/projects.yml
vendored
@ -13,11 +13,6 @@ jobs:
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
# - esphome:
|
||||
# repository: "esphome/esphome"
|
||||
# folder: "esphome"
|
||||
# config_dir: "esphome"
|
||||
# env_name: "esp32-arduino"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
@ -34,36 +29,28 @@ jobs:
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
project: {"esphome": "", "repository": "esphome/esphome", "folder": "esphome", "config_dir": "esphome", "env_name": "esp32-arduino"}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Install ESPHome dependencies
|
||||
# Requires esptool package as it's used in a custom prescript
|
||||
if: ${{ contains(matrix.project.repository, 'esphome') }}
|
||||
run: pip install esptool==3.*
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
||||
|
@ -6,12 +6,13 @@ To get started, <a href="https://cla-assistant.io/platformio/platformio-core">si
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of project where is located `tox.ini` and run `tox -e py37`
|
||||
4. Go to the root of the PlatformIO Core project where `tox.ini` is located (``cd platformio-core``) and run `tox -e py39`.
|
||||
You can replace `py39` with your own Python version. For example, `py311` means Python 3.11.
|
||||
5. Activate current development environment:
|
||||
|
||||
* Windows: `.tox\py37\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py37/bin/activate`
|
||||
* Fish: `source .tox/py37/bin/activate.fish`
|
||||
* Windows: `.tox\py39\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py39/bin/activate`
|
||||
* Fish: `source .tox/py39/bin/activate.fish`
|
||||
|
||||
6. Make changes to code, documentation, etc.
|
||||
7. Lint source code `make before-commit`
|
||||
|
115
HISTORY.rst
115
HISTORY.rst
@ -7,13 +7,126 @@ Release Notes
|
||||
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
|
||||
.. |PIOHOME| replace:: `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
PlatformIO Core 6
|
||||
-----------------
|
||||
|
||||
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
6.1.19 (2025-??-??)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a regression issue where custom build flags were not properly reflected in the `compile_commands.json <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ file, ensuring accurate compilation database generation
|
||||
|
||||
6.1.18 (2025-03-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a regression issue that prevented |PIOHOME| from opening external links (`issue #5084 <https://github.com/platformio/platformio-core/issues/5084>`_)
|
||||
|
||||
6.1.17 (2025-02-13)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the `PLATFORMIO_RUN_JOBS <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_RUN_JOBS>`__ environment variable, allowing manual override of the number of parallel build jobs (`issue #5077 <https://github.com/platformio/platformio-core/issues/5077>`_)
|
||||
* Added support for ``tar.xz`` tarball dependencies (`pull #4974 <https://github.com/platformio/platformio-core/pull/4974>`_)
|
||||
* Ensured that dependencies of private libraries are no longer unnecessarily re-installed, optimizing dependency management and reducing redundant operations (`issue #4987 <https://github.com/platformio/platformio-core/issues/4987>`_)
|
||||
* Resolved an issue where the ``compiledb`` target failed to properly escape compiler executable paths containing spaces (`issue #4998 <https://github.com/platformio/platformio-core/issues/4998>`_)
|
||||
* Resolved an issue with incorrect path resolution when linking static libraries via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option (`issue #5004 <https://github.com/platformio/platformio-core/issues/5004>`_)
|
||||
* Resolved an issue where the ``--project-dir`` flag did not function correctly with the `pio check <https://docs.platformio.org/en/latest/core/userguide/cmd_check.html>`__ and `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ commands (`issue #5029 <https://github.com/platformio/platformio-core/issues/5029>`_)
|
||||
* Resolved an issue where the |LDF| occasionally excluded bundled platform libraries from the dependency graph (`pull #4941 <https://github.com/platformio/platformio-core/pull/4941>`_)
|
||||
|
||||
6.1.16 (2024-09-26)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.13
|
||||
* Introduced the `PLATFORMIO_SYSTEM_TYPE <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_SYSTEM_TYPE>`__ environment variable, enabling manual override of the detected system type for greater flexibility and control in custom build environments
|
||||
* Enhanced internet connection checks by falling back to HTTPS protocol when HTTP (port 80) fails (`issue #4980 <https://github.com/platformio/platformio-core/issues/4980>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.8.1) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.8.1>`__)
|
||||
* Upgraded the `Doctest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ testing framework to version 2.4.11, the `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ to version 1.15.2, and the `Unity <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/unity.html>`__ to version 2.6.0, incorporating the latest features and improvements for enhanced testing capabilities
|
||||
* Corrected an issue where the incorrect public class was imported for the ``DoctestTestRunner`` (`issue #4949 <https://github.com/platformio/platformio-core/issues/4949>`_)
|
||||
|
||||
6.1.15 (2024-04-25)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository (`issue #4885 <https://github.com/platformio/platformio-core/issues/4885>`_)
|
||||
* Resolved an issue related to the inaccurate detection of the Clang compiler (`pull #4897 <https://github.com/platformio/platformio-core/pull/4897>`_)
|
||||
|
||||
6.1.14 (2024-03-21)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the ``--json-output`` option to the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command, enabling users to generate test results in the JSON format
|
||||
* Upgraded the build engine to the latest version of SCons (4.7.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.7.0>`__)
|
||||
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
|
||||
* Addressed an issue where passing a relative path (``--project-dir``) to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command resulted in an error (`issue #4847 <https://github.com/platformio/platformio-core/issues/4847>`_)
|
||||
* Enhanced |STATICCODEANALYSIS| to accommodate scenarios where custom ``src_dir`` or ``include_dir`` are located outside the project folder (`pull #4874 <https://github.com/platformio/platformio-core/pull/4874>`_)
|
||||
* Corrected the validation of ``symlink://`` `package specifications <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ , resolving an issue that caused the package manager to repeatedly reinstall dependencies (`pull #4870 <https://github.com/platformio/platformio-core/pull/4870>`_)
|
||||
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
|
||||
* Resolved an issue where the |LDF| selected an incorrect library version (`issue #4860 <https://github.com/platformio/platformio-core/issues/4860>`_)
|
||||
* Resolved an issue with the ``hexlify`` filter in the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ command, ensuring proper representation of characters with Unicode code points higher than 127 (`issue #4732 <https://github.com/platformio/platformio-core/issues/4732>`_)
|
||||
|
||||
6.1.13 (2024-01-12)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Expanded support for SCons variables declared in the legacy format ``${SCONS_VARNAME}`` (`issue #4828 <https://github.com/platformio/platformio-core/issues/4828>`_)
|
||||
|
||||
6.1.12 (2024-01-10)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.12
|
||||
* Introduced the capability to launch the debug server in a separate process (`issue #4722 <https://github.com/platformio/platformio-core/issues/4722>`_)
|
||||
* Introduced a warning during the verification of MCU maximum RAM usage, signaling when the allocated RAM surpasses 100% (`issue #4791 <https://github.com/platformio/platformio-core/issues/4791>`_)
|
||||
* Drastically enhanced the speed of project building when operating in verbose mode (`issue #4783 <https://github.com/platformio/platformio-core/issues/4783>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.6.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.6.0>`__)
|
||||
* Enhanced the handling of built-in variables in |PIOCONF| during |INTERPOLATION| (`issue #4695 <https://github.com/platformio/platformio-core/issues/4695>`_)
|
||||
* Enhanced PIP dependency declarations for improved reliability and extended support to include Python 3.6 (`issue #4819 <https://github.com/platformio/platformio-core/issues/4819>`_)
|
||||
* Implemented automatic installation of missing dependencies when utilizing a SOCKS proxy (`issue #4822 <https://github.com/platformio/platformio-core/issues/4822>`_)
|
||||
* Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed (`issue #4699 <https://github.com/platformio/platformio-core/issues/4699>`_)
|
||||
* Rectified an issue where ``${platformio.name}`` erroneously represented ``None`` as the default `project name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/name.html>`__ (`issue #4717 <https://github.com/platformio/platformio-core/issues/4717>`_)
|
||||
* Resolved an issue where the ``COMPILATIONDB_INCLUDE_TOOLCHAIN`` setting was not correctly applying to private libraries (`issue #4762 <https://github.com/platformio/platformio-core/issues/4762>`_)
|
||||
* Resolved an issue where ``get_systype()`` inaccurately returned the architecture when executed within a Docker container on a 64-bit kernel with a 32-bit userspace (`issue #4777 <https://github.com/platformio/platformio-core/issues/4777>`_)
|
||||
* Resolved an issue with incorrect handling of the ``check_src_filters`` option when used in multiple environments (`issue #4788 <https://github.com/platformio/platformio-core/issues/4788>`_)
|
||||
* Resolved an issue where running `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ resulted in duplicated "include" entries (`issue #4723 <https://github.com/platformio/platformio-core/issues/4723>`_)
|
||||
* Resolved an issue where native debugging failed on the host machine (`issue #4745 <https://github.com/platformio/platformio-core/issues/4745>`_)
|
||||
* Resolved an issue where custom debug configurations were being inadvertently overwritten in VSCode's ``launch.json`` (`issue #4810 <https://github.com/platformio/platformio-core/issues/4810>`_)
|
||||
|
||||
6.1.11 (2023-08-31)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within |PIOHOME|
|
||||
|
||||
6.1.10 (2023-08-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue that caused generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to break when the ``-iprefix`` compiler flag was used
|
||||
* Resolved an issue encountered while utilizing the `pio pkg exec <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_exec.html>`__ command on the Windows platform to execute Python scripts from a package
|
||||
* Implemented a crucial improvement to the `pio run <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, guaranteeing that the ``monitor`` target is not executed if any of the preceding targets, such as ``upload``, encounter failures
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.11 with new checks, CLI commands and various analysis improvements
|
||||
* Resolved a critical issue that arose on macOS ARM platforms due to the Python "requests" module, leading to a "ModuleNotFoundError: No module named 'chardet'" (`issue #4702 <https://github.com/platformio/platformio-core/issues/4702>`_)
|
||||
|
||||
6.1.9 (2023-07-06)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Rectified a regression bug that occurred when the ``-include`` flag was passed via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option as a relative path and subsequently expanded (`issue #4683 <https://github.com/platformio/platformio-core/issues/4683>`_)
|
||||
* Resolved an issue that resulted in unresolved absolute toolchain paths when generating the `Compilation database "compile_commands.json" <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ (`issue #4684 <https://github.com/platformio/platformio-core/issues/4684>`_)
|
||||
|
||||
6.1.8 (2023-07-05)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added a new ``--lint`` option to the `pio project config <https://docs.platformio.org/en/latest/core/userguide/project/cmd_config.html>`__ command, enabling users to efficiently perform linting on the |PIOCONF|
|
||||
* Enhanced the parsing of the |PIOCONF| to provide comprehensive diagnostic information
|
||||
* Expanded the functionality of the |LIBRARYJSON| manifest by allowing the use of the underscore symbol in the `keywords <https://docs.platformio.org/en/latest/manifests/library-json/fields/keywords.html>`__ field
|
||||
* Optimized project integration templates to address the issue of long paths on Windows (`issue #4652 <https://github.com/platformio/platformio-core/issues/4652>`_)
|
||||
* Refactored |UNITTESTING| engine to resolve compiler warnings with "-Wpedantic" option (`pull #4671 <https://github.com/platformio/platformio-core/pull/4671>`_)
|
||||
* Eliminated erroneous warning regarding the use of obsolete PlatformIO Core when downgrading to the stable version (`issue #4664 <https://github.com/platformio/platformio-core/issues/4664>`_)
|
||||
* Updated the `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ command to return C/C++ flags as parsed Unix shell arguments when dumping project build metadata
|
||||
* Resolved a critical issue related to the usage of the ``-include`` flag within the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option, specifically when employing dynamic variables (`issue #4682 <https://github.com/platformio/platformio-core/issues/4682>`_)
|
||||
* Removed PlatformIO IDE for Atom from the documentation as `Atom has been deprecated <https://github.blog/2022-06-08-sunsetting-atom/>`__
|
||||
|
||||
6.1.7 (2023-05-08)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
9
Makefile
9
Makefile
@ -10,10 +10,13 @@ format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
test:
|
||||
py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
before-commit: isort format lint
|
||||
test:
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: codespell isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
@ -36,9 +36,11 @@ PlatformIO Core
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
|
||||
`PlatformIO <https://platformio.org>`_ is a professional collaborative platform for embedded development.
|
||||
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
|
||||
|
||||
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
* Open source, maximum permissive Apache 2.0 license
|
||||
* Cross-platform IDE and Unified Debugger
|
||||
|
2
docs
2
docs
Submodule docs updated: 98609771ba...70ab7ee27b
2
examples
2
examples
Submodule examples updated: 3e23b5ac43...0409a90a01
@ -12,20 +12,16 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
VERSION = (6, 1, 7)
|
||||
VERSION = (6, 1, "19a2")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
__description__ = (
|
||||
"A professional collaborative platform for embedded development. "
|
||||
"Cross-platform IDE and Unified Debugger. "
|
||||
"Static Code Analyzer and Remote Unit Testing. "
|
||||
"Multi-platform and Multi-architecture Build System. "
|
||||
"Firmware File Explorer and Memory Inspection. "
|
||||
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
|
||||
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
|
||||
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
|
||||
"STMicroelectronics (STM8/STM32), Teensy"
|
||||
"Your Gateway to Embedded Software Development Excellence. "
|
||||
"Unlock the true potential of embedded software development "
|
||||
"with PlatformIO's collaborative ecosystem, embracing "
|
||||
"declarative principles, test-driven methodologies, and "
|
||||
"modern toolchains for unrivaled success."
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
@ -42,15 +38,6 @@ __registry_mirror_hosts__ = [
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40502.0",
|
||||
"tool-cppcheck": "~1.270.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from traceback import format_exc
|
||||
import traceback
|
||||
|
||||
import click
|
||||
|
||||
@ -53,13 +53,13 @@ def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_platformio_start(ctx, caller)
|
||||
maintenance.on_cmd_start(ctx, caller)
|
||||
|
||||
|
||||
@cli.result_callback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, *_, **__):
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
def process_result(*_, **__):
|
||||
maintenance.on_cmd_end()
|
||||
|
||||
|
||||
def configure():
|
||||
@ -96,6 +96,7 @@ def main(argv=None):
|
||||
if argv:
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
|
||||
try:
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
@ -106,18 +107,18 @@ def main(argv=None):
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if not isinstance(exc, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(exc)
|
||||
error_str = "Error: "
|
||||
error_str = f"{exc.__class__.__name__}: "
|
||||
if isinstance(exc, exception.PlatformioException):
|
||||
error_str += str(exc)
|
||||
else:
|
||||
error_str += format_exc()
|
||||
error_str += traceback.format_exc()
|
||||
error_str += """
|
||||
============================================================
|
||||
|
||||
An unexpected error occurred. Further steps:
|
||||
|
||||
* Verify that you have the latest version of PlatformIO using
|
||||
`pip install -U platformio` command
|
||||
`python -m pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
https://docs.platformio.org/page/faq/index.html
|
||||
@ -129,6 +130,8 @@ An unexpected error occurred. Further steps:
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
exit_code = int(str(exc)) if str(exc).isdigit() else 1
|
||||
|
||||
maintenance.on_platformio_exit()
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
|
||||
|
@ -16,7 +16,7 @@ import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.http import HTTPClient, HTTPClientError
|
||||
|
||||
|
||||
@ -24,11 +24,11 @@ class AccountError(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthorized(AccountError):
|
||||
class AccountNotAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError):
|
||||
class AccountAlreadyAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
@ -144,7 +144,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint:disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
|
@ -48,11 +48,13 @@ def team_list_cmd(orgname, json_output):
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
|
@ -18,6 +18,7 @@ import json
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from platformio import __version__, exception, fs, proc
|
||||
@ -68,18 +69,23 @@ SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
"pause_telemetry": False,
|
||||
}
|
||||
|
||||
|
||||
def resolve_state_path(conf_option_dir, file_name, ensure_dir_exists=True):
|
||||
state_dir = ProjectConfig.get_instance().get("platformio", conf_option_dir)
|
||||
if ensure_dir_exists and not os.path.isdir(state_dir):
|
||||
os.makedirs(state_dir)
|
||||
return os.path.join(state_dir, file_name)
|
||||
|
||||
|
||||
class State:
|
||||
def __init__(self, path=None, lock=False):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
|
||||
if not os.path.isdir(core_dir):
|
||||
os.makedirs(core_dir)
|
||||
self.path = os.path.join(core_dir, "appstate.json")
|
||||
self.path = resolve_state_path("core_dir", "appstate.json")
|
||||
self._storage = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
@ -248,9 +254,14 @@ def get_cid():
|
||||
cid = str(cid)
|
||||
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
set_state_item("created_at", int(time.time()))
|
||||
return cid
|
||||
|
||||
|
||||
def get_project_id(project_dir):
|
||||
return hashlib.sha1(hashlib_encode_data(project_dir)).hexdigest()
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
data = [
|
||||
"PlatformIO/%s" % __version__,
|
||||
@ -263,6 +274,8 @@ def get_user_agent():
|
||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
data.append("Python/%s" % platform.python_version())
|
||||
data.append("Platform/%s" % platform.platform())
|
||||
if not get_setting("enable_telemetry"):
|
||||
data.append("Telemetry/0")
|
||||
return " ".join(data)
|
||||
|
||||
|
||||
|
@ -36,6 +36,8 @@ ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVIC
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
@ -85,6 +87,8 @@ ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVI
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
@ -171,3 +175,9 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Zephyr framework USB CDC-ACM
|
||||
ATTRS{idVendor}=="2fe3", ATTRS{idProduct}=="0100", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
@ -1,6 +0,0 @@
|
||||
% for include in filter_includes(includes):
|
||||
-I{{include}}
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
@ -1,9 +0,0 @@
|
||||
% _defines = " ".join(["-D%s" % d.replace(" ", "\\\\ ") for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! to_unix_path(cc_flags).replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! to_unix_path(cxx_flags).replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(filter_includes(includes)) }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
.pio
|
||||
.clang_complete
|
||||
.gcc-flags.json
|
@ -1,3 +0,0 @@
|
||||
.pio
|
||||
CMakeListsPrivate.txt
|
||||
cmake-build-*/
|
@ -1,33 +0,0 @@
|
||||
# !!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE
|
||||
# https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
#
|
||||
# If you need to override existing CMake configuration or add extra,
|
||||
# please create `CMakeListsUser.txt` in the root of project.
|
||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
||||
|
||||
cmake_minimum_required(VERSION 3.13)
|
||||
set(CMAKE_SYSTEM_NAME Generic)
|
||||
set(CMAKE_C_COMPILER_WORKS 1)
|
||||
set(CMAKE_CXX_COMPILER_WORKS 1)
|
||||
|
||||
project("{{project_name}}" C CXX)
|
||||
|
||||
include(CMakeListsPrivate.txt)
|
||||
|
||||
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/CMakeListsUser.txt)
|
||||
include(CMakeListsUser.txt)
|
||||
endif()
|
||||
|
||||
add_custom_target(
|
||||
Production ALL
|
||||
COMMAND platformio -c clion run "$<$<NOT:$<CONFIG:All>>:-e${CMAKE_BUILD_TYPE}>"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
Debug ALL
|
||||
COMMAND platformio -c clion debug "$<$<NOT:$<CONFIG:All>>:-e${CMAKE_BUILD_TYPE}>"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_executable(Z_DUMMY_TARGET ${SRC_LIST})
|
@ -1,127 +0,0 @@
|
||||
# !!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE
|
||||
# https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
#
|
||||
# If you need to override existing CMake configuration or add extra,
|
||||
# please create `CMakeListsUser.txt` in the root of project.
|
||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
||||
|
||||
% import os
|
||||
% import re
|
||||
%
|
||||
% from platformio.project.helpers import load_build_metadata
|
||||
%
|
||||
% def _normalize_path(path):
|
||||
% if project_dir in path:
|
||||
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
|
||||
% elif user_home_dir in path:
|
||||
% if "windows" in systype:
|
||||
% path = path.replace(user_home_dir, "${ENV_HOME_PATH}")
|
||||
% else:
|
||||
% path = path.replace(user_home_dir, "$ENV{HOME}")
|
||||
% end
|
||||
% end
|
||||
% return path
|
||||
% end
|
||||
%
|
||||
% def _fix_lib_dirs(lib_dirs):
|
||||
% result = []
|
||||
% for lib_dir in lib_dirs:
|
||||
% if not os.path.isabs(lib_dir):
|
||||
% lib_dir = os.path.join(project_dir, lib_dir)
|
||||
% end
|
||||
% result.append(to_unix_path(os.path.normpath(lib_dir)))
|
||||
% end
|
||||
% return result
|
||||
% end
|
||||
%
|
||||
% def _escape(text):
|
||||
% return to_unix_path(text).replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
% def _get_lib_dirs(envname):
|
||||
% env_libdeps_dir = os.path.join(config.get("platformio", "libdeps_dir"), envname)
|
||||
% env_lib_extra_dirs = config.get("env:" + envname, "lib_extra_dirs", [])
|
||||
% return _fix_lib_dirs([env_libdeps_dir] + env_lib_extra_dirs)
|
||||
% end
|
||||
%
|
||||
% envs = config.envs()
|
||||
|
||||
|
||||
% if len(envs) > 1:
|
||||
set(CMAKE_CONFIGURATION_TYPES "{{ ";".join(envs) }};" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
|
||||
% else:
|
||||
set(CMAKE_CONFIGURATION_TYPES "{{ env_name }}" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
|
||||
% end
|
||||
|
||||
# Convert "Home Directory" that may contain unescaped backslashes on Windows
|
||||
% if "windows" in systype:
|
||||
file(TO_CMAKE_PATH $ENV{HOMEDRIVE}$ENV{HOMEPATH} ENV_HOME_PATH)
|
||||
% end
|
||||
|
||||
% if svd_path:
|
||||
set(CLION_SVD_FILE_PATH "{{ _normalize_path(svd_path) }}" CACHE FILEPATH "Peripheral Registers Definitions File" FORCE)
|
||||
% end
|
||||
|
||||
SET(CMAKE_C_COMPILER "{{ _normalize_path(cc_path) }}")
|
||||
SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
|
||||
SET(CMAKE_CXX_FLAGS "{{ _normalize_path(to_unix_path(cxx_flags)) }}")
|
||||
SET(CMAKE_C_FLAGS "{{ _normalize_path(to_unix_path(cc_flags)) }}")
|
||||
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% if cc_stds:
|
||||
SET(CMAKE_C_STANDARD {{ cc_stds[-1] }})
|
||||
% end
|
||||
% if cxx_stds:
|
||||
set(CMAKE_CXX_STANDARD {{ cxx_stds[-1] }})
|
||||
% end
|
||||
|
||||
if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}")
|
||||
% for define in defines:
|
||||
add_definitions(-D{{!re.sub(r"([\"\(\)\ #])", r"\\\1", define)}})
|
||||
% end
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
include_directories("{{ _normalize_path(include) }}")
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
|
||||
% for dir in _get_lib_dirs(env_name):
|
||||
{{ _normalize_path(dir) + "/*.*" }}
|
||||
% end
|
||||
)
|
||||
endif()
|
||||
|
||||
% leftover_envs = list(set(envs) ^ set([env_name]))
|
||||
%
|
||||
% ide_data = {}
|
||||
% if leftover_envs:
|
||||
% ide_data = load_build_metadata(project_dir, leftover_envs)
|
||||
% end
|
||||
%
|
||||
% for env, data in ide_data.items():
|
||||
if (CMAKE_BUILD_TYPE MATCHES "{{ env }}")
|
||||
% for define in data["defines"]:
|
||||
add_definitions(-D{{!re.sub(r"([\"\(\)\ #])", r"\\\1", define)}})
|
||||
% end
|
||||
|
||||
% for include in filter_includes(data["includes"]):
|
||||
include_directories("{{ _normalize_path(to_unix_path(include)) }}")
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
|
||||
% for dir in _get_lib_dirs(env):
|
||||
{{ _normalize_path(dir) + "/*.*" }}
|
||||
% end
|
||||
)
|
||||
endif()
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE SRC_LIST
|
||||
% for path in (project_src_dir, project_lib_dir, project_test_dir):
|
||||
{{ _normalize_path(path) + "/*.*" }}
|
||||
% end
|
||||
)
|
||||
|
||||
list(APPEND SRC_LIST ${EXTRA_LIB_SOURCES})
|
@ -1 +0,0 @@
|
||||
{{cc_flags.replace('-mlongcalls', '-mlong-calls')}}
|
@ -1 +0,0 @@
|
||||
{{cxx_flags.replace('-mlongcalls', '-mlong-calls')}}
|
@ -30,7 +30,7 @@ from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import app, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path, where_is_program
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
AllowSubstExceptions(NameError)
|
||||
@ -38,7 +38,6 @@ AllowSubstExceptions(NameError)
|
||||
# append CLI arguments to build environment
|
||||
clivars = Variables(None)
|
||||
clivars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
("BUILD_SCRIPT",),
|
||||
("PROJECT_CONFIG",),
|
||||
("PIOENV",),
|
||||
@ -148,13 +147,13 @@ if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
@ -194,13 +193,6 @@ if env.get("SIZETOOL") and not (
|
||||
Default("checkprogsize")
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
env.Alias("compiledb", env.CompilationDatabase("$COMPILATIONDB_PATH"))
|
||||
|
||||
# Print configured protocols
|
||||
|
@ -26,6 +26,7 @@ from SCons.Script import SConscript # pylint: disable=import-error
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import IS_MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
@ -53,11 +54,12 @@ def GetBuildType(env):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return "+".join(modes or ["release"])
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
@ -125,9 +127,23 @@ def ProcessProgramDeps(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS and env.get(
|
||||
"COMPILATIONDB_INCLUDE_TOOLCHAIN"
|
||||
):
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
@ -200,13 +216,19 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
p = env.subst(p)
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result["CCFLAGS"][i] = (f[0], env.File(os.path.abspath(f[1].get_path())))
|
||||
result["CCFLAGS"][i] = (f[0], env.subst(f[1].get_path()))
|
||||
|
||||
return result
|
||||
|
||||
@ -365,6 +387,7 @@ def generate(env):
|
||||
env.AddMethod(GetBuildType)
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessProgramDeps)
|
||||
env.AddMethod(ProcessCompileDbToolchainOption)
|
||||
env.AddMethod(ProcessProjectDeps)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
|
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
@ -30,12 +29,7 @@ def IsIntegrationDump(_):
|
||||
def DumpIntegrationIncludes(env):
|
||||
result = dict(build=[], compatlib=[], toolchain=[])
|
||||
|
||||
result["build"].extend(
|
||||
[
|
||||
env.subst("$PROJECT_INCLUDE_DIR"),
|
||||
env.subst("$PROJECT_SRC_DIR"),
|
||||
]
|
||||
)
|
||||
# `env`(project) CPPPATH
|
||||
result["build"].extend(
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
@ -139,9 +133,9 @@ def dump_svd_path(env):
|
||||
return None
|
||||
|
||||
|
||||
def _subst_cmd(env, cmd):
|
||||
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
|
||||
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
|
||||
def _split_flags_string(env, s):
|
||||
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
|
||||
return [str(arg) for arg in args]
|
||||
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
@ -154,8 +148,8 @@ def DumpIntegrationData(*args):
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _subst_cmd(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
|
@ -39,7 +39,7 @@ from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem, PackageSpec
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@ -309,10 +309,10 @@ class LibBuilderBase:
|
||||
if not self.dependencies or self._deps_are_processed:
|
||||
return
|
||||
self._deps_are_processed = True
|
||||
for item in self.dependencies:
|
||||
for dependency in self.dependencies:
|
||||
found = False
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item["name"] != lb.name:
|
||||
if not lb.is_dependency_compatible(dependency):
|
||||
continue
|
||||
found = True
|
||||
if lb not in self.depbuilders:
|
||||
@ -322,9 +322,28 @@ class LibBuilderBase:
|
||||
if not found and self.verbose:
|
||||
sys.stderr.write(
|
||||
"Warning: Ignored `%s` dependency for `%s` "
|
||||
"library\n" % (item["name"], self.name)
|
||||
"library\n" % (dependency["name"], self.name)
|
||||
)
|
||||
|
||||
def is_dependency_compatible(self, dependency):
|
||||
pkg = PackageItem(self.path)
|
||||
qualifiers = {"name": self.name, "version": self.version}
|
||||
if pkg.metadata:
|
||||
qualifiers = {"name": pkg.metadata.name, "version": pkg.metadata.version}
|
||||
if pkg.metadata.spec and pkg.metadata.spec.owner:
|
||||
qualifiers["owner"] = pkg.metadata.spec.owner
|
||||
dep_qualifiers = {
|
||||
k: v for k, v in dependency.items() if k in ("owner", "name", "version")
|
||||
}
|
||||
if (
|
||||
"version" in dep_qualifiers
|
||||
and not PackageSpec(dep_qualifiers["version"]).requirements
|
||||
):
|
||||
del dep_qualifiers["version"]
|
||||
return PackageCompatibility.from_dependency(dep_qualifiers).is_compatible(
|
||||
PackageCompatibility(**qualifiers)
|
||||
)
|
||||
|
||||
def get_search_files(self):
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
@ -477,6 +496,7 @@ class LibBuilderBase:
|
||||
self.is_built = True
|
||||
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
self.env.ProcessCompileDbToolchainOption()
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
@ -791,7 +811,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
if path not in include_dirs and path not in self.envorigin.get(
|
||||
"CPPPATH", []
|
||||
):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
@ -1137,6 +1159,8 @@ def ConfigureProjectLibBuilder(env):
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
# refill found libs after recursive search
|
||||
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
|
||||
for lb in lib_builders:
|
||||
for deplb in lb.depbuilders[:]:
|
||||
if deplb not in found_lbs:
|
||||
|
@ -23,10 +23,10 @@ from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8192
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
@ -20,19 +20,23 @@ from platformio.proc import exec_command
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def GetCompilerType(env):
|
||||
if env.subst("$CC").endswith("-gcc"):
|
||||
def GetCompilerType(env): # pylint: disable=too-many-return-statements
|
||||
CC = env.subst("$CC")
|
||||
if CC.endswith("-gcc"):
|
||||
return "gcc"
|
||||
if os.path.basename(CC) == "clang":
|
||||
return "clang"
|
||||
try:
|
||||
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
result = exec_command([CC, "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
output = "".join([result["out"], result["err"]]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
if "clang version" in output:
|
||||
return "clang"
|
||||
if "gcc" in output:
|
||||
return "gcc"
|
||||
|
@ -33,9 +33,7 @@ from platformio.project.config import ProjectOptions
|
||||
@util.memoized()
|
||||
def _PioPlatform():
|
||||
env = DefaultEnvironment()
|
||||
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
|
||||
p.configure_project_packages(env["PIOENV"], COMMAND_LINE_TARGETS)
|
||||
return p
|
||||
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def PioPlatform(_):
|
||||
@ -77,9 +75,11 @@ def LoadPioPlatform(env):
|
||||
continue
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path,
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
|
@ -53,7 +53,7 @@ def _get_symbol_locations(env, elf_path, addrs):
|
||||
locations = [line for line in result["out"].split("\n") if line]
|
||||
assert len(addrs) == len(locations)
|
||||
|
||||
return dict(zip(addrs, [l.strip() for l in locations]))
|
||||
return dict(zip(addrs, [loc.strip() for loc in locations]))
|
||||
|
||||
|
||||
def _get_demangled_names(env, mangled_names):
|
||||
@ -73,31 +73,7 @@ def _get_demangled_names(env, mangled_names):
|
||||
)
|
||||
|
||||
|
||||
def _determine_section(sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not _is_flash_section(info) and not _is_ram_section(info):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _is_ram_section(section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def _is_flash_section(section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def _is_valid_symbol(symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def _collect_sections_info(elffile):
|
||||
def _collect_sections_info(env, elffile):
|
||||
sections = {}
|
||||
for section in elffile.iter_sections():
|
||||
if section.is_null() or section.name.startswith(".debug"):
|
||||
@ -107,13 +83,18 @@ def _collect_sections_info(elffile):
|
||||
section_flags = describe_sh_flags(section["sh_flags"])
|
||||
section_size = section.data_size
|
||||
|
||||
sections[section.name] = {
|
||||
section_data = {
|
||||
"name": section.name,
|
||||
"size": section_size,
|
||||
"start_addr": section["sh_addr"],
|
||||
"type": section_type,
|
||||
"flags": section_flags,
|
||||
}
|
||||
|
||||
sections[section.name] = section_data
|
||||
sections[section.name]["in_flash"] = env.pioSizeIsFlashSection(section_data)
|
||||
sections[section.name]["in_ram"] = env.pioSizeIsRamSection(section_data)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
@ -136,7 +117,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
symbol_size = s["st_size"]
|
||||
symbol_type = symbol_info["type"]
|
||||
|
||||
if not _is_valid_symbol(s.name, symbol_type, symbol_addr):
|
||||
if not env.pioSizeIsValidSymbol(s.name, symbol_type, symbol_addr):
|
||||
continue
|
||||
|
||||
symbol = {
|
||||
@ -145,7 +126,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
"name": s.name,
|
||||
"type": symbol_type,
|
||||
"size": symbol_size,
|
||||
"section": _determine_section(sections, symbol_addr),
|
||||
"section": env.pioSizeDetermineSection(sections, symbol_addr),
|
||||
}
|
||||
|
||||
if s.name.startswith("_Z"):
|
||||
@ -175,12 +156,36 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
return symbols
|
||||
|
||||
|
||||
def _calculate_firmware_size(sections):
|
||||
def pioSizeDetermineSection(_, sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not info.get("in_flash", False) and not info.get("in_ram", False):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def pioSizeIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def pioSizeIsRamSection(_, section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def pioSizeIsFlashSection(_, section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def pioSizeCalculateFirmwareSize(_, sections):
|
||||
flash_size = ram_size = 0
|
||||
for section_info in sections.values():
|
||||
if _is_flash_section(section_info):
|
||||
if section_info.get("in_flash", False):
|
||||
flash_size += section_info.get("size", 0)
|
||||
if _is_ram_section(section_info):
|
||||
if section_info.get("in_ram", False):
|
||||
ram_size += section_info.get("size", 0)
|
||||
|
||||
return ram_size, flash_size
|
||||
@ -210,8 +215,8 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
sys.stderr.write("Elf file doesn't contain DWARF information")
|
||||
env.Exit(1)
|
||||
|
||||
sections = _collect_sections_info(elffile)
|
||||
firmware_ram, firmware_flash = _calculate_firmware_size(sections)
|
||||
sections = _collect_sections_info(env, elffile)
|
||||
firmware_ram, firmware_flash = env.pioSizeCalculateFirmwareSize(sections)
|
||||
data["memory"]["total"] = {
|
||||
"ram_size": firmware_ram,
|
||||
"flash_size": firmware_flash,
|
||||
@ -226,9 +231,11 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
|
||||
symbol_size = symbol.get("size", 0)
|
||||
section = sections.get(symbol.get("section", ""), {})
|
||||
if _is_ram_section(section):
|
||||
if not section:
|
||||
continue
|
||||
if section.get("in_ram", False):
|
||||
files[file_path]["ram_size"] += symbol_size
|
||||
if _is_flash_section(section):
|
||||
if section.get("in_flash", False):
|
||||
files[file_path]["flash_size"] += symbol_size
|
||||
|
||||
files[file_path]["symbols"].append(symbol)
|
||||
@ -250,5 +257,10 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(pioSizeIsRamSection)
|
||||
env.AddMethod(pioSizeIsFlashSection)
|
||||
env.AddMethod(pioSizeCalculateFirmwareSize)
|
||||
env.AddMethod(pioSizeDetermineSection)
|
||||
env.AddMethod(pioSizeIsValidSymbol)
|
||||
env.AddMethod(DumpSizeData)
|
||||
return env
|
||||
|
@ -61,7 +61,7 @@ def CleanProject(env, fullclean=False):
|
||||
print("Done cleaning")
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments
|
||||
def AddTarget( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
env,
|
||||
name,
|
||||
dependencies,
|
||||
|
@ -218,12 +218,11 @@ def CheckUploadSize(_, target, source, env):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if data_max_size and data_size > data_max_size:
|
||||
sys.stderr.write(
|
||||
"Warning! The data size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
|
||||
)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
|
@ -19,7 +19,6 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@ -60,7 +59,7 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@ -77,7 +76,7 @@ def cli(
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
@ -103,12 +102,23 @@ def cli(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_src_filters = [
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "src_dir")),
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "include_dir")),
|
||||
]
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
src_filters = (
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
@ -120,11 +130,13 @@ def cli(
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
src_filters=src_filters,
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
@ -137,14 +149,16 @@ def cli(
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, project_dir, config, envname, tool_options
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
@ -235,12 +249,12 @@ def collect_component_stats(result):
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = dirname(defect.file) or defect.file
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = dirname(component)
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
|
@ -29,7 +29,7 @@ class DefectItem:
|
||||
SEVERITY_LOW = 4
|
||||
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
|
||||
|
||||
def __init__(
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
severity,
|
||||
category,
|
||||
|
@ -60,8 +60,8 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
data = load_build_metadata(self.project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
|
||||
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
|
||||
self.cc_flags = data.get("cc_flags", [])
|
||||
self.cxx_flags = data.get("cxx_flags", [])
|
||||
self.cpp_includes = self._dump_includes(data.get("includes", {}))
|
||||
self.cpp_defines = data.get("defines", [])
|
||||
self.cc_path = data.get("cc_path")
|
||||
|
@ -63,6 +63,21 @@ class PlatformioCLI(click.MultiCommand):
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def reveal_cmd_path_args(cls, ctx):
|
||||
result = []
|
||||
group = ctx.command
|
||||
args = cls.leftover_args[::]
|
||||
while args:
|
||||
cmd_name = args.pop(0)
|
||||
next_group = group.get_command(ctx, cmd_name)
|
||||
if next_group:
|
||||
group = next_group
|
||||
result.append(cmd_name)
|
||||
if not hasattr(group, "get_command"):
|
||||
break
|
||||
return result
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
|
@ -63,7 +63,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, too-many-branches
|
||||
ctx,
|
||||
src,
|
||||
lib,
|
||||
|
@ -152,7 +152,7 @@ def cli(ctx, **options):
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
def lib_install( # pylint: disable=too-many-arguments,too-many-positional-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
click.secho(
|
||||
@ -210,7 +210,7 @@ def lib_uninstall(ctx, libraries, save, silent):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_update( # pylint: disable=too-many-arguments
|
||||
def lib_update( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
@ -159,7 +159,7 @@ def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branc
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
@ -224,7 +224,7 @@ def platform_uninstall(ctx, platforms):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
def platform_update( # pylint: disable=too-many-locals,too-many-arguments,too-many-positional-arguments
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
@ -19,6 +19,7 @@ import subprocess
|
||||
import click
|
||||
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.dependencies import get_pip_dependencies
|
||||
from platformio.http import fetch_remote_content
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.proc import get_pythonexe_path
|
||||
@ -33,9 +34,14 @@ DEVELOP_INIT_SCRIPT_URL = (
|
||||
|
||||
@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version")
|
||||
@click.option("--dev", is_flag=True, help="Use development branch")
|
||||
@click.option("--only-dependencies", is_flag=True)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(dev, verbose):
|
||||
def cli(dev, only_dependencies, verbose):
|
||||
if only_dependencies:
|
||||
return upgrade_pip_dependencies(verbose)
|
||||
|
||||
update_core_packages()
|
||||
|
||||
if not dev and __version__ == get_latest_version():
|
||||
return click.secho(
|
||||
"You're up-to-date!\nPlatformIO %s is currently the "
|
||||
@ -50,19 +56,28 @@ def cli(dev, verbose):
|
||||
pkg_spec = DEVELOP_ZIP_URL if to_develop else "platformio"
|
||||
|
||||
try:
|
||||
# PIO Core
|
||||
subprocess.run(
|
||||
[python_exe, "-m", "pip", "install", "--upgrade", pkg_spec],
|
||||
check=True,
|
||||
capture_output=not verbose,
|
||||
stdout=subprocess.PIPE if not verbose else None,
|
||||
)
|
||||
r = subprocess.run(
|
||||
|
||||
# PyPI dependencies
|
||||
subprocess.run(
|
||||
[python_exe, "-m", "platformio", "upgrade", "--only-dependencies"],
|
||||
check=False,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
# Check version
|
||||
output = subprocess.run(
|
||||
[python_exe, "-m", "platformio", "--version"],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
assert "version" in r.stdout
|
||||
actual_version = r.stdout.split("version", 1)[1].strip()
|
||||
stdout=subprocess.PIPE,
|
||||
).stdout.decode()
|
||||
assert "version" in output
|
||||
actual_version = output.split("version", 1)[1].strip()
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s" % actual_version,
|
||||
fg="green",
|
||||
@ -88,9 +103,20 @@ def cli(dev, verbose):
|
||||
return True
|
||||
|
||||
|
||||
def get_pkg_spec(to_develop):
|
||||
if to_develop:
|
||||
return
|
||||
def upgrade_pip_dependencies(verbose):
|
||||
subprocess.run(
|
||||
[
|
||||
get_pythonexe_path(),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--upgrade",
|
||||
"pip",
|
||||
*get_pip_dependencies(),
|
||||
],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE if not verbose else None,
|
||||
)
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
|
@ -17,6 +17,8 @@
|
||||
import importlib.util
|
||||
import inspect
|
||||
import locale
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
@ -29,7 +31,25 @@ else:
|
||||
from asyncio import get_event_loop as aio_get_running_loop
|
||||
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from shlex import join as shlex_join
|
||||
else:
|
||||
|
||||
def shlex_join(split_command):
|
||||
return " ".join(shlex.quote(arg) for arg in split_command)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from asyncio import to_thread as aio_to_thread
|
||||
else:
|
||||
try:
|
||||
from starlette.concurrency import run_in_threadpool as aio_to_thread
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2 # DO NOT REMOVE IT. ESP8266/ESP32 depend on it
|
||||
PY36 = sys.version_info[0:2] == (3, 6)
|
||||
IS_CYGWIN = sys.platform.startswith("cygwin")
|
||||
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
|
||||
IS_MACOS = sys.platform.startswith("darwin")
|
||||
@ -117,3 +137,12 @@ def path_to_unicode(path):
|
||||
and custom device monitor filters
|
||||
"""
|
||||
return path
|
||||
|
||||
|
||||
def is_proxy_set(socks=False):
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
|
||||
value = os.getenv(var, os.getenv(var.lower()))
|
||||
if not value or (socks and not value.startswith("socks5://")):
|
||||
continue
|
||||
return True
|
||||
return False
|
||||
|
@ -55,9 +55,9 @@ from platformio.project.options import ProjectOptions
|
||||
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.argument("client_extra_args", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@ -65,10 +65,13 @@ def cli(
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
client_extra_args,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
if not interface and client_extra_args:
|
||||
raise click.UsageError("Please specify debugging interface")
|
||||
|
||||
# use env variables from Eclipse or CLion
|
||||
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
@ -83,7 +86,7 @@ def cli(
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
ctx, os.getcwd(), project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
configure_args = (
|
||||
@ -92,7 +95,7 @@ def cli(
|
||||
env_name,
|
||||
load_mode,
|
||||
verbose,
|
||||
__unprocessed,
|
||||
client_extra_args,
|
||||
)
|
||||
if helpers.is_gdbmi_mode():
|
||||
os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true"
|
||||
@ -103,21 +106,21 @@ def cli(
|
||||
else:
|
||||
debug_config = _configure(*configure_args)
|
||||
|
||||
_run(project_dir, debug_config, __unprocessed)
|
||||
_run(os.getcwd(), debug_config, client_extra_args)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed):
|
||||
platform = PlatformFactory.new(
|
||||
project_config.get(f"env:{env_name}", "platform"), autoinstall=True
|
||||
)
|
||||
def _configure(
|
||||
ctx, project_config, env_name, load_mode, verbose, client_extra_args
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
platform = PlatformFactory.from_env(env_name, autoinstall=True)
|
||||
debug_config = DebugConfigFactory.new(
|
||||
platform,
|
||||
project_config,
|
||||
env_name,
|
||||
)
|
||||
if "--version" in __unprocessed:
|
||||
if "--version" in client_extra_args:
|
||||
raise ReturnErrorCode(
|
||||
subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
@ -163,12 +166,12 @@ def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed)
|
||||
return debug_config
|
||||
|
||||
|
||||
def _run(project_dir, debug_config, __unprocessed):
|
||||
def _run(project_dir, debug_config, client_extra_args):
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
client = GDBClientProcess(project_dir, debug_config)
|
||||
coro = client.run(__unprocessed)
|
||||
coro = client.run(client_extra_args)
|
||||
try:
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
loop.run_until_complete(coro)
|
||||
|
@ -24,7 +24,9 @@ from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, platform, project_config, env_name, port=None):
|
||||
DEFAULT_PORT = None
|
||||
|
||||
def __init__(self, platform, project_config, env_name):
|
||||
self.platform = platform
|
||||
self.project_config = project_config
|
||||
self.env_name = env_name
|
||||
@ -48,7 +50,6 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
self._load_cmds = None
|
||||
self._port = None
|
||||
|
||||
self.port = port
|
||||
self.server = self._configure_server()
|
||||
|
||||
try:
|
||||
@ -120,8 +121,10 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
@property
|
||||
def port(self):
|
||||
return (
|
||||
self.env_options.get("debug_port", self.tool_settings.get("port"))
|
||||
or self._port
|
||||
self._port
|
||||
or self.env_options.get("debug_port")
|
||||
or self.tool_settings.get("port")
|
||||
or self.DEFAULT_PORT
|
||||
)
|
||||
|
||||
@port.setter
|
||||
@ -145,7 +148,9 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
|
||||
data = load_build_metadata(
|
||||
os.getcwd(), self.env_name, cache=True, build_type="debug"
|
||||
)
|
||||
if not data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
return data
|
||||
@ -191,9 +196,11 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=result.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
(
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
)
|
||||
for a in result.get("arguments", [])
|
||||
],
|
||||
)
|
||||
|
@ -27,17 +27,13 @@ class DebugConfigFactory:
|
||||
|
||||
@classmethod
|
||||
def new(cls, platform, project_config, env_name):
|
||||
board_config = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
)
|
||||
tool_name = (
|
||||
board_config.get_debug_tool_name(
|
||||
project_config.get("env:" + env_name, "debug_tool")
|
||||
)
|
||||
if board_config
|
||||
else None
|
||||
)
|
||||
board_id = project_config.get("env:" + env_name, "board")
|
||||
config_cls = None
|
||||
tool_name = None
|
||||
if board_id:
|
||||
tool_name = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
).get_debug_tool_name(project_config.get("env:" + env_name, "debug_tool"))
|
||||
try:
|
||||
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
|
||||
config_cls = getattr(mod, cls.get_clsname(tool_name))
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class GenericDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":3333"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
@ -31,8 +32,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class JlinkDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":2331"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
@ -36,11 +37,6 @@ $LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2331"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super().server_ready_pattern or ("Waiting for GDB connection")
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class MspdebugDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":2000"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
@ -29,8 +30,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2000"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class QemuDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":1234"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
@ -30,8 +31,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":1234"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class RenodeDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":3333"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
@ -33,11 +34,6 @@ $INIT_BREAK
|
||||
monitor start
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super().server_ready_pattern or (
|
||||
|
@ -30,3 +30,7 @@ class DebugSupportError(DebugError, UserSideException):
|
||||
|
||||
class DebugInvalidOptionsError(DebugError, UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
class DebugInitError(DebugError, UserSideException):
|
||||
pass
|
||||
|
@ -76,7 +76,7 @@ def get_default_debug_env(config):
|
||||
|
||||
def predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
): # pylint: disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
debug_testname = project_config.get("env:" + env_name, "debug_test")
|
||||
if debug_testname:
|
||||
test_names = list_test_names(project_config)
|
||||
|
@ -13,13 +13,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import time
|
||||
|
||||
from platformio import telemetry
|
||||
from platformio.compat import aio_get_running_loop, is_bytes
|
||||
from platformio.debug import helpers
|
||||
from platformio.debug.exception import DebugInitError
|
||||
from platformio.debug.process.client import DebugClientProcess
|
||||
|
||||
|
||||
@ -130,11 +130,7 @@ class GDBClientProcess(DebugClientProcess):
|
||||
self._handle_error(data)
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
telemetry.send_event(
|
||||
"Debug",
|
||||
"Started",
|
||||
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||
)
|
||||
telemetry.log_debug_started(self.debug_config)
|
||||
self._auto_exec_continue()
|
||||
|
||||
def console_log(self, msg):
|
||||
@ -179,14 +175,7 @@ class GDBClientProcess(DebugClientProcess):
|
||||
and b"Error in sourced" in self._errors_buffer
|
||||
):
|
||||
return
|
||||
|
||||
last_erros = self._errors_buffer.decode()
|
||||
last_erros = " ".join(reversed(last_erros.split("\n")))
|
||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||
|
||||
err = "%s -> %s" % (
|
||||
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||
last_erros,
|
||||
telemetry.log_debug_exception(
|
||||
DebugInitError(self._errors_buffer.decode()), self.debug_config
|
||||
)
|
||||
telemetry.send_exception("DebugInitError: %s" % err)
|
||||
self.transport.close()
|
||||
|
@ -62,7 +62,9 @@ class DebugServerProcess(DebugBaseProcess):
|
||||
|
||||
openocd_pipe_allowed = all(
|
||||
[
|
||||
not self.debug_config.env_options.get("debug_port"),
|
||||
not self.debug_config.env_options.get(
|
||||
"debug_port", self.debug_config.tool_settings.get("port")
|
||||
),
|
||||
"gdb" in self.debug_config.client_executable_path,
|
||||
"openocd" in server_executable,
|
||||
]
|
||||
|
69
platformio/dependencies.py
Normal file
69
platformio/dependencies.py
Normal file
@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.compat import is_proxy_set
|
||||
|
||||
|
||||
def get_core_dependencies():
|
||||
return {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40801.0",
|
||||
"tool-cppcheck": "~1.21100.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
|
||||
def get_pip_dependencies():
|
||||
core = [
|
||||
"bottle == 0.13.*",
|
||||
"click >=8.0.4, <8.1.8",
|
||||
"colorama",
|
||||
"marshmallow == 3.*",
|
||||
"pyelftools >=0.27, <1",
|
||||
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
|
||||
"requests%s == 2.*" % ("[socks]" if is_proxy_set(socks=True) else ""),
|
||||
"semantic_version == 2.10.*",
|
||||
"tabulate == 0.*",
|
||||
]
|
||||
|
||||
home = [
|
||||
# PIO Home requirements
|
||||
"ajsonrpc == 1.2.*",
|
||||
"starlette >=0.19, <0.47",
|
||||
"uvicorn >=0.16, <0.35",
|
||||
"wsproto == 1.*",
|
||||
]
|
||||
|
||||
extra = []
|
||||
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||
extra.append(
|
||||
'chardet >= 3.0.2,<6; platform_system == "Darwin" and "arm" in platform_machine'
|
||||
)
|
||||
|
||||
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
|
||||
try:
|
||||
import ssl # pylint: disable=import-outside-toplevel
|
||||
|
||||
if ssl.OPENSSL_VERSION.startswith("OpenSSL ") and ssl.OPENSSL_VERSION_INFO < (
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
):
|
||||
extra.append("urllib3<2")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return core + home + extra
|
@ -89,7 +89,7 @@ def is_serial_port_ready(port, timeout=1):
|
||||
|
||||
|
||||
class SerialPortFinder:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
@ -163,7 +163,7 @@ class SerialPortFinder:
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if item.vid == device.vid and item.pid == device.pid:
|
||||
candidates.append(item)
|
||||
if len(candidates) == 1:
|
||||
if len(candidates) <= 1:
|
||||
return device.device
|
||||
for item in candidates:
|
||||
if ("GDB" if self.prefer_gdb_port else "UART") in item.description:
|
||||
|
@ -144,9 +144,9 @@ def list_mdns_services():
|
||||
if service.properties:
|
||||
try:
|
||||
properties = {
|
||||
k.decode("utf8"): v.decode("utf8")
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
k.decode("utf8"): (
|
||||
v.decode("utf8") if isinstance(v, bytes) else v
|
||||
)
|
||||
for k, v in service.properties.items()
|
||||
}
|
||||
json.dumps(properties)
|
||||
|
@ -58,7 +58,7 @@ from platformio.project.options import ProjectOptions
|
||||
"--encoding",
|
||||
help=(
|
||||
"Set the encoding for the serial port "
|
||||
"(e.g. hexlify, Latin1, UTF-8) [default=%s]"
|
||||
"(e.g. hexlify, Latin-1, UTF-8) [default=%s]"
|
||||
% ProjectOptions["env.monitor_encoding"].default
|
||||
),
|
||||
)
|
||||
@ -125,9 +125,11 @@ def device_monitor_cmd(**options):
|
||||
options = apply_project_monitor_options(options, project_options)
|
||||
register_filters(platform=platform, options=options)
|
||||
options["port"] = SerialPortFinder(
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
board_config=(
|
||||
platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None
|
||||
),
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
).find(initial_port=options["port"])
|
||||
|
@ -25,11 +25,12 @@ from platformio.project.config import ProjectConfig
|
||||
class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def __init__(self, options=None):
|
||||
"""Called by PlatformIO to pass context"""
|
||||
miniterm.Transform.__init__(self)
|
||||
super().__init__()
|
||||
|
||||
self.options = options or {}
|
||||
self.project_dir = self.options.get("project_dir")
|
||||
self.environment = self.options.get("environment")
|
||||
self._running_terminal = None
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
if not self.environment:
|
||||
@ -47,6 +48,12 @@ class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def NAME(self):
|
||||
raise NotImplementedError("Please declare NAME attribute for the filter class")
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
self._running_terminal = terminal
|
||||
|
||||
def get_running_terminal(self):
|
||||
return self._running_terminal
|
||||
|
||||
|
||||
def register_filters(platform=None, options=None):
|
||||
# project filters
|
||||
|
@ -24,12 +24,18 @@ class Hexlify(DeviceMonitorFilterBase):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._counter = 0
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
# force to Latin-1, issue #4732
|
||||
if terminal.input_encoding == "UTF-8":
|
||||
terminal.set_rx_encoding("Latin-1")
|
||||
super().set_running_terminal(terminal)
|
||||
|
||||
def rx(self, text):
|
||||
result = ""
|
||||
for b in serial.iterbytes(text):
|
||||
for c in serial.iterbytes(text):
|
||||
if (self._counter % 16) == 0:
|
||||
result += "\n{:04X} | ".format(self._counter)
|
||||
asciicode = ord(b)
|
||||
asciicode = ord(c)
|
||||
if asciicode <= 255:
|
||||
result += "{:02X} ".format(asciicode)
|
||||
else:
|
||||
|
@ -110,6 +110,12 @@ def new_terminal(options):
|
||||
term.raw = options["raw"]
|
||||
term.set_rx_encoding(options["encoding"])
|
||||
term.set_tx_encoding(options["encoding"])
|
||||
for ts in (term.tx_transformations, term.rx_transformations):
|
||||
for t in ts:
|
||||
try:
|
||||
t.set_running_terminal(term)
|
||||
except AttributeError:
|
||||
pass
|
||||
return term
|
||||
|
||||
|
||||
|
@ -81,7 +81,7 @@ class InvalidSettingValue(UserSideException):
|
||||
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
|
||||
|
||||
|
||||
class InvalidJSONFile(PlatformioException):
|
||||
class InvalidJSONFile(ValueError, UserSideException):
|
||||
MESSAGE = "Could not load broken JSON: {0}"
|
||||
|
||||
|
||||
|
@ -210,7 +210,7 @@ def change_filemtime(path, mtime):
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
def _onerror(func, path, __):
|
||||
def _onexc(func, path, _):
|
||||
try:
|
||||
st_mode = os.stat(path).st_mode
|
||||
if st_mode & stat.S_IREAD:
|
||||
@ -223,4 +223,7 @@ def rmtree(path):
|
||||
err=True,
|
||||
)
|
||||
|
||||
return shutil.rmtree(path, onerror=_onerror)
|
||||
# pylint: disable=unexpected-keyword-arg, deprecated-argument
|
||||
if sys.version_info < (3, 12):
|
||||
return shutil.rmtree(path, onerror=_onexc)
|
||||
return shutil.rmtree(path, onexc=_onexc)
|
||||
|
@ -12,12 +12,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from platformio import __version__, app, fs, util
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
|
||||
|
||||
@ -32,16 +30,11 @@ class AppRPC(BaseRPCHandler):
|
||||
"projectsDir",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_state_path():
|
||||
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
|
||||
if not os.path.isdir(core_dir):
|
||||
os.makedirs(core_dir)
|
||||
return os.path.join(core_dir, "homestate.json")
|
||||
|
||||
@staticmethod
|
||||
def load_state():
|
||||
with app.State(AppRPC.get_state_path(), lock=True) as state:
|
||||
with app.State(
|
||||
app.resolve_state_path("core_dir", "homestate.json"), lock=True
|
||||
) as state:
|
||||
storage = state.get("storage", {})
|
||||
|
||||
# base data
|
||||
@ -81,7 +74,9 @@ class AppRPC(BaseRPCHandler):
|
||||
|
||||
@staticmethod
|
||||
def save_state(state):
|
||||
with app.State(AppRPC.get_state_path(), lock=True) as s:
|
||||
with app.State(
|
||||
app.resolve_state_path("core_dir", "homestate.json"), lock=True
|
||||
) as s:
|
||||
s.clear()
|
||||
s.update(state)
|
||||
storage = s.get("storage", {})
|
||||
|
@ -19,10 +19,10 @@ import shutil
|
||||
from functools import cmp_to_key
|
||||
|
||||
import click
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import fs
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.device.list.util import list_logical_devices
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.http import HTTPSession, ensure_internet_on
|
||||
@ -33,12 +33,14 @@ class HTTPAsyncSession(HTTPSession):
|
||||
self, *args, **kwargs
|
||||
):
|
||||
func = super().request
|
||||
return await run_in_threadpool(func, *args, **kwargs)
|
||||
return await aio_to_thread(func, *args, **kwargs)
|
||||
|
||||
|
||||
class OSRPC(BaseRPCHandler):
|
||||
@staticmethod
|
||||
async def fetch_content(url, data=None, headers=None, cache_valid=None):
|
||||
_http_session = None
|
||||
|
||||
@classmethod
|
||||
async def fetch_content(cls, url, data=None, headers=None, cache_valid=None):
|
||||
if not headers:
|
||||
headers = {
|
||||
"User-Agent": (
|
||||
@ -57,11 +59,13 @@ class OSRPC(BaseRPCHandler):
|
||||
# check internet before and resolve issue with 60 seconds timeout
|
||||
ensure_internet_on(raise_exception=True)
|
||||
|
||||
session = HTTPAsyncSession()
|
||||
if not cls._http_session:
|
||||
cls._http_session = HTTPAsyncSession()
|
||||
|
||||
if data:
|
||||
r = await session.post(url, data=data, headers=headers)
|
||||
r = await cls._http_session.post(url, data=data, headers=headers)
|
||||
else:
|
||||
r = await session.get(url, headers=headers)
|
||||
r = await cls._http_session.get(url, headers=headers)
|
||||
|
||||
r.raise_for_status()
|
||||
result = r.text
|
||||
@ -73,9 +77,9 @@ class OSRPC(BaseRPCHandler):
|
||||
async def request_content(self, uri, data=None, headers=None, cache_valid=None):
|
||||
if uri.startswith("http"):
|
||||
return await self.fetch_content(uri, data, headers, cache_valid)
|
||||
if os.path.isfile(uri):
|
||||
with io.open(uri, encoding="utf-8") as fp:
|
||||
return fp.read()
|
||||
local_path = uri[7:] if uri.startswith("file://") else uri
|
||||
with io.open(local_path, encoding="utf-8") as fp:
|
||||
return fp.read()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
|
@ -22,13 +22,13 @@ import threading
|
||||
|
||||
import click
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import __main__, __version__, app, fs, proc, util
|
||||
from platformio.compat import (
|
||||
IS_WINDOWS,
|
||||
aio_create_task,
|
||||
aio_get_running_loop,
|
||||
aio_to_thread,
|
||||
get_locale_encoding,
|
||||
is_bytes,
|
||||
)
|
||||
@ -177,7 +177,7 @@ class PIOCoreRPC(BaseRPCHandler):
|
||||
|
||||
@staticmethod
|
||||
async def _call_subprocess(args, options):
|
||||
result = await run_in_threadpool(
|
||||
result = await aio_to_thread(
|
||||
proc.exec_command,
|
||||
[get_core_fullpath()] + args,
|
||||
cwd=options.get("cwd") or os.getcwd(),
|
||||
@ -197,7 +197,7 @@ class PIOCoreRPC(BaseRPCHandler):
|
||||
exit_code,
|
||||
)
|
||||
|
||||
return await run_in_threadpool(
|
||||
return await aio_to_thread(
|
||||
_thread_safe_call, args=args, cwd=options.get("cwd") or os.getcwd()
|
||||
)
|
||||
|
||||
|
@ -12,21 +12,55 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os.path
|
||||
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
|
||||
|
||||
class PlatformRPC(BaseRPCHandler):
|
||||
async def fetch_platforms(self, search_query=None, page=0, force_installed=False):
|
||||
if force_installed:
|
||||
return {
|
||||
"items": await aio_to_thread(
|
||||
self._load_installed_platforms, search_query
|
||||
)
|
||||
}
|
||||
|
||||
search_result = await self.factory.manager.dispatcher["registry.call_client"](
|
||||
method="list_packages",
|
||||
query=search_query,
|
||||
qualifiers={
|
||||
"types": ["platform"],
|
||||
},
|
||||
page=page,
|
||||
)
|
||||
return {
|
||||
"page": search_result["page"],
|
||||
"limit": search_result["limit"],
|
||||
"total": search_result["total"],
|
||||
"items": [
|
||||
{
|
||||
"id": item["id"],
|
||||
"ownername": item["owner"]["username"],
|
||||
"name": item["name"],
|
||||
"version": item["version"]["name"],
|
||||
"description": item["description"],
|
||||
"tier": item["tier"],
|
||||
}
|
||||
for item in search_result["items"]
|
||||
],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def list_installed(options=None):
|
||||
result = []
|
||||
options = options or {}
|
||||
def _load_installed_platforms(search_query=None):
|
||||
search_query = (search_query or "").strip()
|
||||
|
||||
def _matchSearchQuery(p):
|
||||
searchQuery = options.get("searchQuery")
|
||||
if not searchQuery:
|
||||
return True
|
||||
content_blocks = [p.name, p.title, p.description]
|
||||
if p.frameworks:
|
||||
content_blocks.append(" ".join(p.frameworks.keys()))
|
||||
@ -34,28 +68,73 @@ class PlatformRPC(BaseRPCHandler):
|
||||
board_data = board.get_brief_data()
|
||||
for key in ("id", "mcu", "vendor"):
|
||||
content_blocks.append(board_data.get(key))
|
||||
return searchQuery.strip() in " ".join(content_blocks)
|
||||
return search_query in " ".join(content_blocks)
|
||||
|
||||
items = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
p = PlatformFactory.new(pkg)
|
||||
if not _matchSearchQuery(p):
|
||||
if search_query and not _matchSearchQuery(p):
|
||||
continue
|
||||
result.append(
|
||||
dict(
|
||||
__pkg_path=pkg.path,
|
||||
__pkg_meta=pkg.metadata.as_dict(),
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
)
|
||||
items.append(
|
||||
{
|
||||
"__pkg_path": pkg.path,
|
||||
"ownername": pkg.metadata.spec.owner if pkg.metadata.spec else None,
|
||||
"name": p.name,
|
||||
"version": str(pkg.metadata.version),
|
||||
"title": p.title,
|
||||
"description": p.description,
|
||||
}
|
||||
)
|
||||
return result
|
||||
return items
|
||||
|
||||
async def fetch_boards(self, platform_spec):
|
||||
spec = PackageSpec(platform_spec)
|
||||
if spec.owner:
|
||||
return await self.factory.manager.dispatcher["registry.call_client"](
|
||||
method="get_package",
|
||||
typex="platform",
|
||||
owner=spec.owner,
|
||||
name=spec.name,
|
||||
extra_path="/boards",
|
||||
)
|
||||
return await aio_to_thread(self._load_installed_boards, spec)
|
||||
|
||||
@staticmethod
|
||||
def get_boards(spec):
|
||||
p = PlatformFactory.new(spec)
|
||||
def _load_installed_boards(platform_spec):
|
||||
p = PlatformFactory.new(platform_spec)
|
||||
return sorted(
|
||||
[b.get_brief_data() for b in p.get_boards().values()],
|
||||
key=lambda item: item["name"],
|
||||
)
|
||||
|
||||
async def fetch_examples(self, platform_spec):
|
||||
spec = PackageSpec(platform_spec)
|
||||
if spec.owner:
|
||||
return await self.factory.manager.dispatcher["registry.call_client"](
|
||||
method="get_package",
|
||||
typex="platform",
|
||||
owner=spec.owner,
|
||||
name=spec.name,
|
||||
extra_path="/examples",
|
||||
)
|
||||
return await aio_to_thread(self._load_installed_examples, spec)
|
||||
|
||||
@staticmethod
|
||||
def _load_installed_examples(platform_spec):
|
||||
platform = PlatformFactory.new(platform_spec)
|
||||
platform_dir = platform.get_dir()
|
||||
parser = ManifestParserFactory.new_from_dir(platform_dir)
|
||||
result = parser.as_dict().get("examples") or []
|
||||
for example in result:
|
||||
example["files"] = [
|
||||
{
|
||||
"path": item,
|
||||
"url": (
|
||||
"file://%s"
|
||||
+ os.path.join(platform_dir, "examples", example["name"], item)
|
||||
),
|
||||
}
|
||||
for item in example["files"]
|
||||
]
|
||||
return result
|
||||
|
@ -15,6 +15,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import semantic_version
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
@ -24,6 +25,7 @@ from platformio.home.rpc.handlers.app import AppRPC
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectError
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
@ -36,9 +38,13 @@ class ProjectRPC(BaseRPCHandler):
|
||||
def config_call(init_kwargs, method, *args):
|
||||
assert isinstance(init_kwargs, dict)
|
||||
assert "path" in init_kwargs
|
||||
project_dir = get_project_dir()
|
||||
if os.path.isfile(init_kwargs["path"]):
|
||||
if os.path.isdir(init_kwargs["path"]):
|
||||
project_dir = init_kwargs["path"]
|
||||
init_kwargs["path"] = os.path.join(init_kwargs["path"], "platformio.ini")
|
||||
elif os.path.isfile(init_kwargs["path"]):
|
||||
project_dir = os.path.dirname(init_kwargs["path"])
|
||||
else:
|
||||
project_dir = get_project_dir()
|
||||
with fs.cd(project_dir):
|
||||
return getattr(ProjectConfig(**init_kwargs), method)(*args)
|
||||
|
||||
@ -267,15 +273,39 @@ class ProjectRPC(BaseRPCHandler):
|
||||
)
|
||||
return new_project_dir
|
||||
|
||||
async def create_empty(self, configuration, options=None):
|
||||
async def init_v2(self, configuration, options=None):
|
||||
project_dir = os.path.join(configuration["location"], configuration["name"])
|
||||
if not os.path.isdir(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
|
||||
envclone = os.environ.copy()
|
||||
envclone["PLATFORMIO_FORCE_ANSI"] = "true"
|
||||
options = options or {}
|
||||
options["spawn"] = {"env": envclone, "cwd": project_dir}
|
||||
|
||||
args = ["project", "init"]
|
||||
ide = app.get_session_var("caller_id")
|
||||
if ide in ProjectGenerator.get_supported_ides():
|
||||
args.extend(["--ide", ide])
|
||||
|
||||
if configuration.get("example"):
|
||||
await self.factory.notify_clients(
|
||||
method=options.get("stdoutNotificationMethod"),
|
||||
params=["Copying example files...\n"],
|
||||
actor="frontend",
|
||||
)
|
||||
await self._pre_init_example(configuration, project_dir)
|
||||
else:
|
||||
args.extend(self._pre_init_empty(configuration))
|
||||
|
||||
return await self.factory.manager.dispatcher["core.exec"](args, options=options)
|
||||
|
||||
@staticmethod
|
||||
def _pre_init_empty(configuration):
|
||||
project_options = []
|
||||
platform = configuration["platform"]
|
||||
board = configuration.get("board", {}).get("id")
|
||||
env_name = board or platform["name"]
|
||||
board_id = configuration.get("board", {}).get("id")
|
||||
env_name = board_id or platform["name"]
|
||||
if configuration.get("description"):
|
||||
project_options.append(("description", configuration.get("description")))
|
||||
try:
|
||||
@ -288,20 +318,73 @@ class ProjectRPC(BaseRPCHandler):
|
||||
project_options.append(
|
||||
("platform", "{name} @ {version}".format(**platform))
|
||||
)
|
||||
if board:
|
||||
project_options.append(("board", board))
|
||||
if board_id:
|
||||
project_options.append(("board", board_id))
|
||||
if configuration.get("framework"):
|
||||
project_options.append(("framework", configuration["framework"]["name"]))
|
||||
|
||||
args = ["project", "init", "-e", env_name, "--sample-code"]
|
||||
ide = app.get_session_var("caller_id")
|
||||
if ide in ProjectGenerator.get_supported_ides():
|
||||
args.extend(["--ide", ide])
|
||||
args = ["-e", env_name, "--sample-code"]
|
||||
for name, value in project_options:
|
||||
args.extend(["-O", f"{name}={value}"])
|
||||
return args
|
||||
|
||||
envclone = os.environ.copy()
|
||||
envclone["PLATFORMIO_FORCE_ANSI"] = "true"
|
||||
options = options or {}
|
||||
options["spawn"] = {"env": envclone, "cwd": project_dir}
|
||||
return await self.factory.manager.dispatcher["core.exec"](args, options=options)
|
||||
async def _pre_init_example(self, configuration, project_dir):
|
||||
for item in configuration["example"]["files"]:
|
||||
p = Path(project_dir).joinpath(item["path"])
|
||||
if not p.parent.is_dir():
|
||||
p.parent.mkdir(parents=True)
|
||||
p.write_text(
|
||||
await self.factory.manager.dispatcher["os.request_content"](
|
||||
item["url"]
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def configuration(project_dir, env):
|
||||
assert is_platformio_project(project_dir)
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig(os.path.join(project_dir, "platformio.ini"))
|
||||
platform = PlatformFactory.from_env(env, autoinstall=True)
|
||||
platform_pkg = PlatformPackageManager().get_package(platform.get_dir())
|
||||
board_id = config.get(f"env:{env}", "board", None)
|
||||
|
||||
# frameworks
|
||||
frameworks = []
|
||||
for name in config.get(f"env:{env}", "framework", []):
|
||||
if name not in platform.frameworks:
|
||||
continue
|
||||
f_pkg_name = platform.frameworks[name].get("package")
|
||||
if not f_pkg_name:
|
||||
continue
|
||||
f_pkg = platform.get_package(f_pkg_name)
|
||||
if not f_pkg:
|
||||
continue
|
||||
f_manifest = platform.pm.load_manifest(f_pkg)
|
||||
frameworks.append(
|
||||
dict(
|
||||
name=name,
|
||||
title=f_manifest.get("title"),
|
||||
version=str(f_pkg.metadata.version),
|
||||
)
|
||||
)
|
||||
|
||||
return dict(
|
||||
platform=dict(
|
||||
ownername=(
|
||||
platform_pkg.metadata.spec.owner
|
||||
if platform_pkg.metadata.spec
|
||||
else None
|
||||
),
|
||||
name=platform.name,
|
||||
title=platform.title,
|
||||
version=str(platform_pkg.metadata.version),
|
||||
),
|
||||
board=(
|
||||
platform.board_config(board_id).get_brief_data()
|
||||
if board_id
|
||||
else None
|
||||
),
|
||||
frameworks=frameworks or None,
|
||||
)
|
||||
|
@ -13,8 +13,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.registry.client import RegistryClient
|
||||
|
||||
@ -24,7 +24,7 @@ class RegistryRPC(BaseRPCHandler):
|
||||
async def call_client(method, *args, **kwargs):
|
||||
try:
|
||||
client = RegistryClient()
|
||||
return await run_in_threadpool(getattr(client, method), *args, **kwargs)
|
||||
return await aio_to_thread(getattr(client, method), *args, **kwargs)
|
||||
except Exception as exc: # pylint: disable=bare-except
|
||||
raise JSONRPC20DispatchException(
|
||||
code=5000, message="Registry Call Error", data=str(exc)
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
import ajsonrpc.utils
|
||||
import click
|
||||
from ajsonrpc.core import JSONRPC20Error, JSONRPC20Request
|
||||
from ajsonrpc.dispatcher import Dispatcher
|
||||
@ -24,6 +25,10 @@ from platformio.compat import aio_create_task, aio_get_running_loop
|
||||
from platformio.http import InternetConnectionError
|
||||
from platformio.proc import force_exit
|
||||
|
||||
# Remove this line when PR is merged
|
||||
# https://github.com/pavlov99/ajsonrpc/pull/22
|
||||
ajsonrpc.utils.is_invalid_params = lambda: False
|
||||
|
||||
|
||||
class JSONRPCServerFactoryBase:
|
||||
connection_nums = 0
|
||||
|
@ -13,21 +13,21 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests.adapters
|
||||
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from platformio import __check_internet_hosts__, app, util
|
||||
from platformio.cache import ContentCache, cleanup_content_cache
|
||||
from platformio.compat import is_proxy_set
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
|
||||
class HTTPClientError(PlatformioException):
|
||||
class HTTPClientError(UserSideException):
|
||||
def __init__(self, message, response=None):
|
||||
super().__init__()
|
||||
self.message = message
|
||||
@ -50,7 +50,10 @@ class HTTPSession(requests.Session):
|
||||
self._x_base_url = kwargs.pop("x_base_url") if "x_base_url" in kwargs else None
|
||||
super().__init__(*args, **kwargs)
|
||||
self.headers.update({"User-Agent": app.get_user_agent()})
|
||||
self.verify = app.get_setting("enable_proxy_strict_ssl")
|
||||
try:
|
||||
self.verify = app.get_setting("enable_proxy_strict_ssl")
|
||||
except PlatformioException:
|
||||
self.verify = True
|
||||
|
||||
def request( # pylint: disable=signature-differs,arguments-differ
|
||||
self, method, url, *args, **kwargs
|
||||
@ -60,9 +63,11 @@ class HTTPSession(requests.Session):
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
return super().request(
|
||||
method,
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url),
|
||||
(
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url)
|
||||
),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
@ -154,7 +159,10 @@ class HTTPClient:
|
||||
with ContentCache("http") as cc:
|
||||
result = cc.get(cache_key)
|
||||
if result is not None:
|
||||
return json.loads(result)
|
||||
try:
|
||||
return json.loads(result)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
response = self.send_request(method, path, **kwargs)
|
||||
data = self._parse_json_response(response)
|
||||
cc.set(cache_key, response.text, cache_valid)
|
||||
@ -182,12 +190,11 @@ class HTTPClient:
|
||||
@util.memoized(expire="10s")
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
use_proxy = is_proxy_set()
|
||||
socket.setdefaulttimeout(timeout)
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
|
||||
if not os.getenv(var) and not os.getenv(var.lower()):
|
||||
continue
|
||||
if use_proxy:
|
||||
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
return True
|
||||
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
|
||||
@ -197,6 +204,15 @@ def _internet_on():
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# falling back to HTTPs, issue #4980
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
requests.get("https://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
except requests.exceptions.RequestException:
|
||||
pass
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
@ -25,23 +25,20 @@ from platformio.cli import PlatformioCLI
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.http import HTTPClientError, InternetConnectionError, ensure_internet_on
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.system.prune import calculate_unnecessary_system_data
|
||||
|
||||
|
||||
def on_platformio_start(ctx, caller):
|
||||
def on_cmd_start(ctx, caller):
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
telemetry.on_cmd_start(ctx)
|
||||
if PlatformioCLI.in_silence():
|
||||
return
|
||||
after_upgrade(ctx)
|
||||
|
||||
|
||||
def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
def on_cmd_end():
|
||||
if PlatformioCLI.in_silence():
|
||||
return
|
||||
|
||||
@ -60,8 +57,12 @@ def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
)
|
||||
|
||||
|
||||
def on_platformio_exception(e):
|
||||
telemetry.on_exception(e)
|
||||
def on_platformio_exception(exc):
|
||||
telemetry.log_exception(exc)
|
||||
|
||||
|
||||
def on_platformio_exit():
|
||||
telemetry.on_exit()
|
||||
|
||||
|
||||
def set_caller(caller=None):
|
||||
@ -79,11 +80,10 @@ def set_caller(caller=None):
|
||||
|
||||
class Upgrader:
|
||||
def __init__(self, from_version, to_version):
|
||||
self.from_version = pepver_to_semver(from_version)
|
||||
self.to_version = pepver_to_semver(to_version)
|
||||
|
||||
self.from_version = from_version
|
||||
self.to_version = to_version
|
||||
self._upgraders = [
|
||||
(semantic_version.Version("4.4.0-a.8"), self._update_pkg_metadata),
|
||||
(semantic_version.Version("6.1.8-a.1"), self._appstate_migration),
|
||||
]
|
||||
|
||||
def run(self, ctx):
|
||||
@ -99,37 +99,43 @@ class Upgrader:
|
||||
return all(result)
|
||||
|
||||
@staticmethod
|
||||
def _update_pkg_metadata(_):
|
||||
pm = ToolPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
if not pkg.metadata or pkg.metadata.spec.external or pkg.metadata.spec.id:
|
||||
continue
|
||||
result = pm.search_registry_packages(PackageSpec(name=pkg.metadata.name))
|
||||
if len(result) != 1:
|
||||
continue
|
||||
result = result[0]
|
||||
pkg.metadata.spec = PackageSpec(
|
||||
id=result["id"],
|
||||
owner=result["owner"]["username"],
|
||||
name=result["name"],
|
||||
def _appstate_migration(_):
|
||||
state_path = app.resolve_state_path("core_dir", "appstate.json")
|
||||
if not os.path.isfile(state_path):
|
||||
return True
|
||||
app.delete_state_item("telemetry")
|
||||
created_at = app.get_state_item("created_at", None)
|
||||
if not created_at:
|
||||
state_stat = os.stat(state_path)
|
||||
app.set_state_item(
|
||||
"created_at",
|
||||
int(
|
||||
state_stat.st_birthtime
|
||||
if hasattr(state_stat, "st_birthtime")
|
||||
else state_stat.st_ctime
|
||||
),
|
||||
)
|
||||
pkg.dump_meta()
|
||||
return True
|
||||
|
||||
|
||||
def after_upgrade(ctx):
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
last_version = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version == __version__:
|
||||
return
|
||||
last_version_str = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version_str == __version__:
|
||||
return None
|
||||
|
||||
if last_version == "0.0.0":
|
||||
if last_version_str == "0.0.0":
|
||||
app.set_state_item("last_version", __version__)
|
||||
elif pepver_to_semver(last_version) > pepver_to_semver(__version__):
|
||||
return print_welcome_banner()
|
||||
|
||||
last_version = pepver_to_semver(last_version_str)
|
||||
current_version = pepver_to_semver(__version__)
|
||||
|
||||
if last_version > current_version and not last_version.prerelease:
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
click.secho(
|
||||
"Obsolete PIO Core v%s is used (previous was %s)"
|
||||
% (__version__, last_version),
|
||||
% (__version__, last_version_str),
|
||||
fg="yellow",
|
||||
)
|
||||
click.secho("Please remove multiple PIO Cores from a system:", fg="yellow")
|
||||
@ -139,43 +145,50 @@ def after_upgrade(ctx):
|
||||
fg="cyan",
|
||||
)
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
return
|
||||
else:
|
||||
click.secho("Please wait while upgrading PlatformIO...", fg="yellow")
|
||||
return None
|
||||
|
||||
# Update PlatformIO's Core packages
|
||||
cleanup_content_cache("http")
|
||||
update_core_packages()
|
||||
click.secho("Please wait while upgrading PlatformIO...", fg="yellow")
|
||||
|
||||
u = Upgrader(last_version, __version__)
|
||||
if u.run(ctx):
|
||||
app.set_state_item("last_version", __version__)
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s!\n" % __version__,
|
||||
fg="green",
|
||||
)
|
||||
telemetry.send_event(
|
||||
category="Auto",
|
||||
action="Upgrade",
|
||||
label="%s > %s" % (last_version, __version__),
|
||||
)
|
||||
# Update PlatformIO's Core packages
|
||||
cleanup_content_cache("http")
|
||||
update_core_packages()
|
||||
|
||||
# PlatformIO banner
|
||||
u = Upgrader(last_version, current_version)
|
||||
if u.run(ctx):
|
||||
app.set_state_item("last_version", __version__)
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s!\n" % __version__,
|
||||
fg="green",
|
||||
)
|
||||
telemetry.log_event(
|
||||
"pio_upgrade_core",
|
||||
{
|
||||
"label": "%s > %s" % (last_version_str, __version__),
|
||||
"from_version": last_version_str,
|
||||
"to_version": __version__,
|
||||
},
|
||||
)
|
||||
|
||||
return print_welcome_banner()
|
||||
|
||||
|
||||
def print_welcome_banner():
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.echo("*" * terminal_width)
|
||||
click.echo("If you like %s, please:" % (click.style("PlatformIO", fg="cyan")))
|
||||
click.echo(
|
||||
"- %s us on Twitter to stay up-to-date "
|
||||
"on the latest project news > %s"
|
||||
% (
|
||||
click.style("follow", fg="cyan"),
|
||||
click.style("https://twitter.com/PlatformIO_Org", fg="cyan"),
|
||||
)
|
||||
)
|
||||
click.echo(
|
||||
"- %s it on GitHub > %s"
|
||||
% (
|
||||
click.style("star", fg="cyan"),
|
||||
click.style("https://github.com/platformio/platformio", fg="cyan"),
|
||||
click.style("https://github.com/platformio/platformio-core", fg="cyan"),
|
||||
)
|
||||
)
|
||||
click.echo(
|
||||
"- %s us on LinkedIn to stay up-to-date "
|
||||
"on the latest project news > %s"
|
||||
% (
|
||||
click.style("follow", fg="cyan"),
|
||||
click.style("https://www.linkedin.com/company/platformio/", fg="cyan"),
|
||||
)
|
||||
)
|
||||
if not os.getenv("PLATFORMIO_IDE"):
|
||||
@ -228,7 +241,7 @@ def check_platformio_upgrade():
|
||||
else:
|
||||
click.secho("platformio upgrade", fg="cyan", nl=False)
|
||||
click.secho("` or `", fg="yellow", nl=False)
|
||||
click.secho("pip install -U platformio", fg="cyan", nl=False)
|
||||
click.secho("python -m pip install -U platformio", fg="cyan", nl=False)
|
||||
click.secho("` command.", fg="yellow")
|
||||
click.secho("Changes: ", fg="yellow", nl=False)
|
||||
click.secho("https://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
|
@ -20,7 +20,7 @@ import click
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.exception import ReturnErrorCode, UserSideException
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.proc import get_pythonexe_path, where_is_program
|
||||
|
||||
|
||||
@click.command("exec", short_help="Run command from package tool")
|
||||
@ -52,9 +52,13 @@ def package_exec_cmd(obj, package, call, args):
|
||||
|
||||
inject_pkg_to_environ(pkg)
|
||||
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
||||
|
||||
# inject current python interpreter on Windows
|
||||
if IS_WINDOWS and args and args[0].endswith(".py"):
|
||||
if args and args[0].endswith(".py"):
|
||||
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
||||
if not os.path.exists(args[1]):
|
||||
args[1] = where_is_program(args[1])
|
||||
|
||||
result = None
|
||||
try:
|
||||
run_options = dict(shell=call is not None, env=os.environ)
|
||||
|
@ -20,6 +20,7 @@ import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
@ -120,7 +121,7 @@ def install_project_env_dependencies(project_env, options=None):
|
||||
# custom tools
|
||||
if options.get("tools"):
|
||||
installed_conds.append(_install_project_env_custom_tools(project_env, options))
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
installed_conds.append(
|
||||
_install_project_env_custom_libraries(project_env, options)
|
||||
@ -152,6 +153,8 @@ def _install_project_env_platform(project_env, options):
|
||||
skip_dependencies=options.get("skip_dependencies"),
|
||||
force=options.get("force"),
|
||||
)
|
||||
# ensure SCons is installed
|
||||
get_core_package_dir("tool-scons")
|
||||
return not already_up_to_date
|
||||
|
||||
|
||||
@ -206,7 +209,7 @@ def _install_project_env_libraries(project_env, options):
|
||||
config = ProjectConfig.get_instance()
|
||||
|
||||
compatibility_qualifiers = {}
|
||||
if config.get(f"env:{project_env}", "platform"):
|
||||
if config.get(f"env:{project_env}", "platform", None):
|
||||
try:
|
||||
p = PlatformFactory.new(config.get(f"env:{project_env}", "platform"))
|
||||
compatibility_qualifiers["platforms"] = [p.name]
|
||||
@ -219,9 +222,11 @@ def _install_project_env_libraries(project_env, options):
|
||||
|
||||
env_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), project_env),
|
||||
compatibility=PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None,
|
||||
compatibility=(
|
||||
PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None
|
||||
),
|
||||
)
|
||||
private_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "lib_dir"))
|
||||
@ -292,7 +297,11 @@ def _install_project_private_library_deps(private_pkg, private_lm, env_lm, optio
|
||||
if not spec.external and not spec.owner:
|
||||
continue
|
||||
pkg = private_lm.get_package(spec)
|
||||
if not pkg and not env_lm.get_package(spec):
|
||||
if (
|
||||
not pkg
|
||||
and not private_lm.get_package(spec)
|
||||
and not env_lm.get_package(spec)
|
||||
):
|
||||
pkg = env_lm.install(
|
||||
spec,
|
||||
skip_dependencies=True,
|
||||
|
@ -22,6 +22,7 @@ from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
@ -59,7 +60,8 @@ def humanize_package(pkg, spec=None, verbose=False):
|
||||
if spec and not isinstance(spec, PackageSpec):
|
||||
spec = PackageSpec(spec)
|
||||
data = [
|
||||
click.style("{name} @ {version}".format(**pkg.metadata.as_dict()), fg="cyan")
|
||||
click.style(pkg.metadata.name, fg="cyan"),
|
||||
click.style(f"@ {str(pkg.metadata.version)}", bold=True),
|
||||
]
|
||||
extra_data = ["required: %s" % (spec.humanize() if spec else "Any")]
|
||||
if verbose:
|
||||
@ -135,20 +137,20 @@ def list_global_packages(options):
|
||||
("libraries", LibraryPackageManager(options.get("storage_dir"))),
|
||||
]
|
||||
only_packages = any(
|
||||
options.get(type_) or options.get(f"only_{type_}") for (type_, _) in data
|
||||
options.get(typex) or options.get(f"only_{typex}") for (typex, _) in data
|
||||
)
|
||||
for type_, pm in data:
|
||||
for typex, pm in data:
|
||||
skip_conds = [
|
||||
only_packages
|
||||
and not options.get(type_)
|
||||
and not options.get(f"only_{type_}"),
|
||||
and not options.get(typex)
|
||||
and not options.get(f"only_{typex}"),
|
||||
not pm.get_installed(),
|
||||
]
|
||||
if any(skip_conds):
|
||||
continue
|
||||
click.secho(type_.capitalize(), bold=True)
|
||||
click.secho(typex.capitalize(), bold=True)
|
||||
print_dependency_tree(
|
||||
pm, filter_specs=options.get(type_), verbose=options.get("verbose")
|
||||
pm, filter_specs=options.get(typex), verbose=options.get("verbose")
|
||||
)
|
||||
click.echo()
|
||||
|
||||
@ -156,12 +158,12 @@ def list_global_packages(options):
|
||||
def list_project_packages(options):
|
||||
environments = options["environments"]
|
||||
only_packages = any(
|
||||
options.get(type_) or options.get(f"only_{type_}")
|
||||
for type_ in ("platforms", "tools", "libraries")
|
||||
options.get(typex) or options.get(f"only_{typex}")
|
||||
for typex in ("platforms", "tools", "libraries")
|
||||
)
|
||||
only_platform_packages = any(
|
||||
options.get(type_) or options.get(f"only_{type_}")
|
||||
for type_ in ("platforms", "tools")
|
||||
options.get(typex) or options.get(f"only_{typex}")
|
||||
for typex in ("platforms", "tools")
|
||||
)
|
||||
only_library_packages = options.get("libraries") or options.get("only_libraries")
|
||||
|
||||
@ -186,20 +188,20 @@ def list_project_packages(options):
|
||||
|
||||
|
||||
def print_project_env_platform_packages(project_env, options):
|
||||
config = ProjectConfig.get_instance()
|
||||
platform = config.get(f"env:{project_env}", "platform")
|
||||
if not platform:
|
||||
return None
|
||||
pkg = PlatformPackageManager().get_package(platform)
|
||||
if not pkg:
|
||||
try:
|
||||
p = PlatformFactory.from_env(project_env)
|
||||
except UnknownPlatform:
|
||||
return None
|
||||
click.echo(
|
||||
"Platform %s"
|
||||
% (humanize_package(pkg, platform, verbose=options.get("verbose")))
|
||||
% (
|
||||
humanize_package(
|
||||
PlatformPackageManager().get_package(p.get_dir()),
|
||||
p.config.get(f"env:{project_env}", "platform"),
|
||||
verbose=options.get("verbose"),
|
||||
)
|
||||
)
|
||||
)
|
||||
p = PlatformFactory.new(pkg)
|
||||
if project_env:
|
||||
p.configure_project_packages(project_env)
|
||||
print_dependency_tree(
|
||||
p.pm,
|
||||
specs=[p.get_package_spec(name) for name in p.packages],
|
||||
|
@ -62,10 +62,9 @@ class OutdatedCandidate:
|
||||
)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
def package_outdated_cmd(project_dir, environments):
|
||||
candidates = fetch_outdated_candidates(
|
||||
project_dir, environments, with_progress=True
|
||||
)
|
||||
print_outdated_candidates(candidates)
|
||||
with fs.cd(project_dir):
|
||||
candidates = fetch_outdated_candidates(environments, with_progress=True)
|
||||
print_outdated_candidates(candidates)
|
||||
|
||||
|
||||
def print_outdated_candidates(candidates):
|
||||
@ -126,8 +125,10 @@ def get_candidate_update_color(outdated):
|
||||
return None
|
||||
|
||||
|
||||
def fetch_outdated_candidates(project_dir, environments, with_progress=False):
|
||||
def fetch_outdated_candidates(environments, with_progress=False):
|
||||
candidates = []
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(environments)
|
||||
|
||||
def _add_candidate(data):
|
||||
new_candidate = OutdatedCandidate(
|
||||
@ -139,20 +140,16 @@ def fetch_outdated_candidates(project_dir, environments, with_progress=False):
|
||||
return
|
||||
candidates.append(new_candidate)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(environments)
|
||||
# platforms
|
||||
for item in find_platform_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
# platform package dependencies
|
||||
for dep_item in find_platform_dependency_candidates(item["env"]):
|
||||
_add_candidate(dep_item)
|
||||
|
||||
# platforms
|
||||
for item in find_platform_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
# platform package dependencies
|
||||
for dep_item in find_platform_dependency_candidates(item):
|
||||
_add_candidate(dep_item)
|
||||
|
||||
# libraries
|
||||
for item in find_library_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
# libraries
|
||||
for item in find_library_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
|
||||
result = []
|
||||
if not with_progress:
|
||||
@ -172,7 +169,7 @@ def find_platform_candidates(config, environments):
|
||||
result = []
|
||||
pm = PlatformPackageManager()
|
||||
for env in config.envs():
|
||||
platform = config.get(f"env:{env}", "platform")
|
||||
platform = config.get(f"env:{env}", "platform", None)
|
||||
if not platform or (environments and env not in environments):
|
||||
continue
|
||||
spec = PackageSpec(platform)
|
||||
@ -183,14 +180,13 @@ def find_platform_candidates(config, environments):
|
||||
return result
|
||||
|
||||
|
||||
def find_platform_dependency_candidates(platform_candidate):
|
||||
def find_platform_dependency_candidates(env):
|
||||
result = []
|
||||
p = PlatformFactory.new(platform_candidate["spec"])
|
||||
p.configure_project_packages(platform_candidate["env"])
|
||||
p = PlatformFactory.from_env(env)
|
||||
for pkg in p.get_installed_packages():
|
||||
result.append(
|
||||
dict(
|
||||
env=platform_candidate["env"],
|
||||
env=env,
|
||||
pm=p.pm,
|
||||
pkg=pkg,
|
||||
spec=p.get_package_spec(pkg.metadata.name),
|
||||
|
@ -56,7 +56,7 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
)
|
||||
@click.option(
|
||||
"--type",
|
||||
"type_",
|
||||
"typex",
|
||||
type=click.Choice(list(PackageType.items().values())),
|
||||
help="Custom package type",
|
||||
)
|
||||
@ -82,10 +82,11 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
help="Do not show interactive prompt",
|
||||
hidden=True,
|
||||
)
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
package, owner, type_, released_at, private, notify, no_interactive, non_interactive
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals
|
||||
package, owner, typex, released_at, private, notify, no_interactive, non_interactive
|
||||
):
|
||||
click.secho("Preparing a package...", fg="cyan")
|
||||
package = os.path.abspath(package)
|
||||
no_interactive = no_interactive or non_interactive
|
||||
owner = owner or AccountClient().get_logged_username()
|
||||
do_not_pack = (
|
||||
@ -103,14 +104,14 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
|
||||
type_ = type_ or PackageType.from_archive(archive_path)
|
||||
typex = typex or PackageType.from_archive(archive_path)
|
||||
manifest = ManifestSchema().load_manifest(
|
||||
ManifestParserFactory.new_from_archive(archive_path).as_dict()
|
||||
)
|
||||
name = manifest.get("name")
|
||||
version = manifest.get("version")
|
||||
data = [
|
||||
("Type:", type_),
|
||||
("Type:", typex),
|
||||
("Owner:", owner),
|
||||
("Name:", name),
|
||||
("Version:", version),
|
||||
@ -124,13 +125,13 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
check_archive_file_names(archive_path)
|
||||
|
||||
# look for duplicates
|
||||
check_package_duplicates(owner, type_, name, version, manifest.get("system"))
|
||||
check_package_duplicates(owner, typex, name, version, manifest.get("system"))
|
||||
|
||||
if not no_interactive:
|
||||
click.confirm(
|
||||
"Are you sure you want to publish the %s %s to the registry?\n"
|
||||
% (
|
||||
type_,
|
||||
typex,
|
||||
click.style(
|
||||
"%s/%s@%s" % (owner, name, version),
|
||||
fg="cyan",
|
||||
@ -146,7 +147,7 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
)
|
||||
click.echo("Publishing...")
|
||||
response = RegistryClient().publish_package(
|
||||
owner, type_, archive_path, released_at, private, notify
|
||||
owner, typex, archive_path, released_at, private, notify
|
||||
)
|
||||
if not do_not_pack:
|
||||
os.remove(archive_path)
|
||||
|
@ -65,10 +65,12 @@ def print_search_item(item):
|
||||
click.echo(
|
||||
"%s • %s • Published on %s"
|
||||
% (
|
||||
item["type"].capitalize()
|
||||
if item["tier"] == "community"
|
||||
else click.style(
|
||||
("%s %s" % (item["tier"], item["type"])).title(), bold=True
|
||||
(
|
||||
item["type"].capitalize()
|
||||
if item["tier"] == "community"
|
||||
else click.style(
|
||||
("%s %s" % (item["tier"], item["type"])).title(), bold=True
|
||||
)
|
||||
),
|
||||
item["version"]["name"],
|
||||
util.parse_datetime(item["version"]["released_at"]).strftime("%c"),
|
||||
|
@ -111,7 +111,7 @@ def uninstall_project_env_dependencies(project_env, options=None):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_tools(project_env, options)
|
||||
)
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_libraries(project_env, options)
|
||||
|
@ -110,7 +110,7 @@ def update_project_env_dependencies(project_env, options=None):
|
||||
# custom tools
|
||||
if options.get("tools"):
|
||||
updated_conds.append(_update_project_env_custom_tools(project_env, options))
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
updated_conds.append(_update_project_env_custom_libraries(project_env, options))
|
||||
# declared dependencies
|
||||
|
@ -34,7 +34,7 @@ class FileDownloader:
|
||||
url,
|
||||
stream=True,
|
||||
)
|
||||
if self._http_response.status_code != 200:
|
||||
if self._http_response.status_code not in (200, 203):
|
||||
raise PackageException(
|
||||
"Got the unrecognized status code '{0}' when downloaded {1}".format(
|
||||
self._http_response.status_code, url
|
||||
|
@ -13,10 +13,10 @@
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import util
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
|
||||
class PackageException(PlatformioException):
|
||||
class PackageException(UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
@ -51,14 +51,14 @@ class MissingPackageManifestError(ManifestException):
|
||||
MESSAGE = "Could not find one of '{0}' manifest files in the package"
|
||||
|
||||
|
||||
class UnknownPackageError(UserSideException):
|
||||
class UnknownPackageError(PackageException):
|
||||
MESSAGE = (
|
||||
"Could not find the package with '{0}' requirements for your system '%s'"
|
||||
% util.get_systype()
|
||||
)
|
||||
|
||||
|
||||
class NotGlobalLibDir(UserSideException):
|
||||
class NotGlobalLibDir(PackageException):
|
||||
MESSAGE = (
|
||||
"The `{0}` is not a PlatformIO project.\n\n"
|
||||
"To manage libraries in global storage `{1}`,\n"
|
||||
|
@ -15,7 +15,7 @@
|
||||
import os
|
||||
from time import sleep, time
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
LOCKFILE_TIMEOUT = 3600 # in seconds, 1 hour
|
||||
LOCKFILE_DELAY = 0.2
|
||||
@ -36,11 +36,11 @@ except ImportError:
|
||||
LOCKFILE_CURRENT_INTERFACE = None
|
||||
|
||||
|
||||
class LockFileExists(PlatformioException):
|
||||
class LockFileExists(UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
class LockFileTimeoutError(PlatformioException):
|
||||
class LockFileTimeoutError(UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -98,9 +98,13 @@ class PackageManagerInstallMixin:
|
||||
else:
|
||||
pkg = self.install_from_registry(
|
||||
spec,
|
||||
search_qualifiers=compatibility.to_search_qualifiers()
|
||||
if compatibility
|
||||
else None,
|
||||
search_qualifiers=(
|
||||
compatibility.to_search_qualifiers(
|
||||
["platforms", "frameworks", "authors"]
|
||||
)
|
||||
if compatibility
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
if not pkg or not pkg.metadata:
|
||||
|
@ -25,12 +25,13 @@ from platformio.registry.mirror import RegistryFileMirrorIterator
|
||||
|
||||
class PackageManagerRegistryMixin:
|
||||
def install_from_registry(self, spec, search_qualifiers=None):
|
||||
package = version = None
|
||||
if spec.owner and spec.name and not search_qualifiers:
|
||||
package = self.fetch_registry_package(spec)
|
||||
if not package:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
version = self.pick_best_registry_version(package["versions"], spec)
|
||||
else:
|
||||
elif spec.id or spec.name:
|
||||
packages = self.search_registry_packages(spec, search_qualifiers)
|
||||
if not packages:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
|
@ -35,7 +35,7 @@ from platformio.package.manager._update import PackageManagerUpdateMixin
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.meta import (
|
||||
PackageItem,
|
||||
PackageMetaData,
|
||||
PackageMetadata,
|
||||
PackageSpec,
|
||||
PackageType,
|
||||
)
|
||||
@ -199,7 +199,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
|
||||
def build_metadata(self, pkg_dir, spec, vcs_revision=None):
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
metadata = PackageMetaData(
|
||||
metadata = PackageMetadata(
|
||||
type=self.pkg_type,
|
||||
name=manifest.get("name"),
|
||||
version=manifest.get("version"),
|
||||
@ -280,11 +280,15 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
|
||||
# external "URL" mismatch
|
||||
if spec.external:
|
||||
# local folder mismatch
|
||||
if os.path.abspath(spec.uri) == os.path.abspath(pkg.path) or (
|
||||
# local/symlinked folder mismatch
|
||||
check_conds = [
|
||||
os.path.abspath(spec.uri) == os.path.abspath(pkg.path),
|
||||
spec.uri.startswith("file://")
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:])
|
||||
):
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:]),
|
||||
spec.uri.startswith("symlink://")
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[10:]),
|
||||
]
|
||||
if any(check_conds):
|
||||
return True
|
||||
if spec.uri != pkg.metadata.spec.uri:
|
||||
return False
|
||||
|
@ -14,7 +14,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from platformio import __core_packages__, exception
|
||||
from platformio import exception
|
||||
from platformio.dependencies import get_core_dependencies
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
@ -23,7 +24,7 @@ from platformio.package.meta import PackageSpec
|
||||
def get_installed_core_packages():
|
||||
result = []
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
@ -32,11 +33,11 @@ def get_installed_core_packages():
|
||||
|
||||
|
||||
def get_core_package_dir(name, spec=None, auto_install=True):
|
||||
if name not in __core_packages__:
|
||||
if name not in get_core_dependencies():
|
||||
raise exception.PlatformioException("Please upgrade PlatformIO Core")
|
||||
pm = ToolPackageManager()
|
||||
spec = spec or PackageSpec(
|
||||
owner="platformio", name=name, requirements=__core_packages__[name]
|
||||
owner="platformio", name=name, requirements=get_core_dependencies()[name]
|
||||
)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
@ -50,7 +51,7 @@ def get_core_package_dir(name, spec=None, auto_install=True):
|
||||
|
||||
def update_core_packages():
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
try:
|
||||
pm.update(spec, spec)
|
||||
@ -65,7 +66,7 @@ def remove_unnecessary_core_packages(dry_run=False):
|
||||
pm = ToolPackageManager()
|
||||
best_pkg_versions = {}
|
||||
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
|
@ -38,7 +38,7 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
def manifest_names(self):
|
||||
return PackageType.get_manifest_map()[PackageType.PLATFORM]
|
||||
|
||||
def install( # pylint: disable=arguments-differ,too-many-arguments
|
||||
def install( # pylint: disable=arguments-differ,too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
spec,
|
||||
skip_dependencies=False,
|
||||
|
@ -294,9 +294,11 @@ class BaseManifestParser:
|
||||
if not matched_files:
|
||||
continue
|
||||
result[root] = dict(
|
||||
name="Examples"
|
||||
if root == examples_dir
|
||||
else os.path.relpath(root, examples_dir),
|
||||
name=(
|
||||
"Examples"
|
||||
if root == examples_dir
|
||||
else os.path.relpath(root, examples_dir)
|
||||
),
|
||||
base=os.path.relpath(root, package_dir),
|
||||
files=matched_files,
|
||||
)
|
||||
@ -540,6 +542,8 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
|
||||
"esp32": "espressif32",
|
||||
"arc32": "intel_arc32",
|
||||
"stm32": "ststm32",
|
||||
"nrf52": "nordicnrf52",
|
||||
"rp2040": "raspberrypi",
|
||||
}
|
||||
for arch in properties.get("architectures", "").split(","):
|
||||
if "particle-" in arch:
|
||||
|
@ -183,7 +183,7 @@ class ManifestSchema(BaseSchema):
|
||||
validate=[
|
||||
validate.Length(min=1, max=50),
|
||||
validate.Regexp(
|
||||
r"^[a-z\d\-\+\. ]+$", error="Only [a-z0-9-+. ] chars are allowed"
|
||||
r"^[a-z\d\-_\+\. ]+$", error="Only [a-z0-9+_-. ] chars are allowed"
|
||||
),
|
||||
]
|
||||
)
|
||||
@ -276,9 +276,9 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.20"
|
||||
version = "3.26.0"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||
"v%s/json/licenses.json" % version
|
||||
f"v{version}/json/licenses.json"
|
||||
)
|
||||
return json.loads(fetch_remote_content(spdx_data_url))
|
||||
|
@ -24,7 +24,7 @@ import semantic_version
|
||||
from platformio import fs
|
||||
from platformio.compat import get_object_members, hashlib_encode_data, string_types
|
||||
from platformio.package.manifest.parser import ManifestFileType
|
||||
from platformio.package.version import cast_version_to_semver
|
||||
from platformio.package.version import SemanticVersionError, cast_version_to_semver
|
||||
from platformio.util import items_in_list
|
||||
|
||||
|
||||
@ -65,7 +65,14 @@ class PackageType:
|
||||
|
||||
|
||||
class PackageCompatibility:
|
||||
KNOWN_QUALIFIERS = ("platforms", "frameworks", "authors")
|
||||
KNOWN_QUALIFIERS = (
|
||||
"owner",
|
||||
"name",
|
||||
"version",
|
||||
"platforms",
|
||||
"frameworks",
|
||||
"authors",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_dependency(cls, dependency):
|
||||
@ -89,19 +96,45 @@ class PackageCompatibility:
|
||||
def __repr__(self):
|
||||
return "PackageCompatibility <%s>" % self.qualifiers
|
||||
|
||||
def to_search_qualifiers(self):
|
||||
return self.qualifiers
|
||||
def to_search_qualifiers(self, fields=None):
|
||||
result = {}
|
||||
for name, value in self.qualifiers.items():
|
||||
if not fields or name in fields:
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
def is_compatible(self, other):
|
||||
assert isinstance(other, PackageCompatibility)
|
||||
for key, value in self.qualifiers.items():
|
||||
for key, current_value in self.qualifiers.items():
|
||||
other_value = other.qualifiers.get(key)
|
||||
if not value or not other_value:
|
||||
if not current_value or not other_value:
|
||||
continue
|
||||
if not items_in_list(value, other_value):
|
||||
if any(isinstance(v, list) for v in (current_value, other_value)):
|
||||
if not items_in_list(current_value, other_value):
|
||||
return False
|
||||
continue
|
||||
if key == "version":
|
||||
if not self._compare_versions(current_value, other_value):
|
||||
return False
|
||||
continue
|
||||
if current_value != other_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _compare_versions(self, current, other):
|
||||
if current == other:
|
||||
return True
|
||||
try:
|
||||
version = (
|
||||
other
|
||||
if isinstance(other, semantic_version.Version)
|
||||
else cast_version_to_semver(other)
|
||||
)
|
||||
return version in semantic_version.SimpleSpec(current)
|
||||
except ValueError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class PackageOutdatedResult:
|
||||
UPDATE_INCREMENT_MAJOR = "major"
|
||||
@ -163,7 +196,7 @@ class PackageOutdatedResult:
|
||||
|
||||
|
||||
class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments,too-many-positional-arguments
|
||||
self, raw=None, owner=None, id=None, name=None, requirements=None, uri=None
|
||||
):
|
||||
self._requirements = None
|
||||
@ -175,7 +208,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
if requirements:
|
||||
try:
|
||||
self.requirements = requirements
|
||||
except ValueError as exc:
|
||||
except SemanticVersionError as exc:
|
||||
if not self.name or self.uri or self.raw:
|
||||
raise exc
|
||||
self.raw = "%s=%s" % (self.name, requirements)
|
||||
@ -224,11 +257,14 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
if not value:
|
||||
self._requirements = None
|
||||
return
|
||||
self._requirements = (
|
||||
value
|
||||
if isinstance(value, semantic_version.SimpleSpec)
|
||||
else semantic_version.SimpleSpec(str(value))
|
||||
)
|
||||
try:
|
||||
self._requirements = (
|
||||
value
|
||||
if isinstance(value, semantic_version.SimpleSpec)
|
||||
else semantic_version.SimpleSpec(str(value))
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise SemanticVersionError(exc) from exc
|
||||
|
||||
def humanize(self):
|
||||
result = ""
|
||||
@ -360,7 +396,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
parts.path.endswith(".git"),
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
parts.netloc in ("github.com", "gitlab.com", "bitbucket.com")
|
||||
and not parts.path.endswith((".zip", ".tar.gz")),
|
||||
and not parts.path.endswith((".zip", ".tar.gz", ".tar.xz")),
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
@ -398,7 +434,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
return name
|
||||
|
||||
|
||||
class PackageMetaData:
|
||||
class PackageMetadata:
|
||||
def __init__( # pylint: disable=redefined-builtin
|
||||
self, type, name, version, spec=None
|
||||
):
|
||||
@ -413,7 +449,7 @@ class PackageMetaData:
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"PackageMetaData <type={type} name={name} version={version} "
|
||||
"PackageMetadata <type={type} name={name} version={version} "
|
||||
"spec={spec}".format(**self.as_dict())
|
||||
)
|
||||
|
||||
@ -463,7 +499,7 @@ class PackageMetaData:
|
||||
data["spec"]["uri"] = data["spec"]["url"]
|
||||
del data["spec"]["url"]
|
||||
data["spec"] = PackageSpec(**data["spec"])
|
||||
return PackageMetaData(**data)
|
||||
return PackageMetadata(**data)
|
||||
|
||||
|
||||
class PackageItem:
|
||||
@ -482,9 +518,11 @@ class PackageItem:
|
||||
|
||||
def __eq__(self, other):
|
||||
conds = [
|
||||
os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
if self.path and other.path
|
||||
else self.path == other.path,
|
||||
(
|
||||
os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
if self.path and other.path
|
||||
else self.path == other.path
|
||||
),
|
||||
self.metadata == other.metadata,
|
||||
]
|
||||
return all(conds)
|
||||
@ -512,7 +550,7 @@ class PackageItem:
|
||||
for location in self.get_metafile_locations():
|
||||
manifest_path = os.path.join(location, self.METAFILE_NAME)
|
||||
if os.path.isfile(manifest_path):
|
||||
return PackageMetaData.load(manifest_path)
|
||||
return PackageMetadata.load(manifest_path)
|
||||
return None
|
||||
|
||||
def dump_meta(self):
|
||||
|
@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from tarfile import open as tarfile_open
|
||||
from time import mktime
|
||||
from zipfile import ZipFile
|
||||
@ -82,19 +83,23 @@ class TARArchiver(BaseArchiver):
|
||||
).startswith(base)
|
||||
|
||||
def extract_item(self, item, dest_dir):
|
||||
if sys.version_info >= (3, 12):
|
||||
self._afo.extract(item, dest_dir, filter="data")
|
||||
return self.after_extract(item, dest_dir)
|
||||
|
||||
# apply custom security logic
|
||||
dest_dir = self.resolve_path(dest_dir)
|
||||
bad_conds = [
|
||||
self.is_bad_path(item.name, dest_dir),
|
||||
self.is_link(item) and self.is_bad_link(item, dest_dir),
|
||||
]
|
||||
if not any(bad_conds):
|
||||
super().extract_item(item, dest_dir)
|
||||
else:
|
||||
click.secho(
|
||||
if any(bad_conds):
|
||||
return click.secho(
|
||||
"Blocked insecure item `%s` from TAR archive" % item.name,
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
return super().extract_item(item, dest_dir)
|
||||
|
||||
|
||||
class ZIPArchiver(BaseArchiver):
|
||||
@ -147,6 +152,7 @@ class FileUnpacker:
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\xfd\x37\x7a\x58\x5a\x00": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
|
@ -18,14 +18,10 @@ import subprocess
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from platformio import proc
|
||||
from platformio.package.exception import (
|
||||
PackageException,
|
||||
PlatformioException,
|
||||
UserSideException,
|
||||
)
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
|
||||
class VCSBaseException(PackageException):
|
||||
class VCSBaseException(UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
@ -74,8 +70,8 @@ class VCSClientBase:
|
||||
self.get_cmd_output(["--version"])
|
||||
else:
|
||||
assert self.run_cmd(["--version"])
|
||||
except (AssertionError, OSError, PlatformioException) as exc:
|
||||
raise UserSideException(
|
||||
except (AssertionError, OSError) as exc:
|
||||
raise VCSBaseException(
|
||||
"VCS: `%s` client is not installed in your system" % self.command
|
||||
) from exc
|
||||
return True
|
||||
|
@ -16,6 +16,12 @@ import re
|
||||
|
||||
import semantic_version
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
|
||||
class SemanticVersionError(UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
def cast_version_to_semver(value, force=True, raise_exception=False):
|
||||
assert value
|
||||
@ -29,7 +35,7 @@ def cast_version_to_semver(value, force=True, raise_exception=False):
|
||||
except ValueError:
|
||||
pass
|
||||
if raise_exception:
|
||||
raise ValueError("Invalid SemVer version %s" % value)
|
||||
raise SemanticVersionError("Invalid SemVer version %s" % value)
|
||||
# parse commit hash
|
||||
if re.match(r"^[\da-f]+$", value, flags=re.I):
|
||||
return semantic_version.Version("0.0.0+sha." + value)
|
||||
@ -38,7 +44,7 @@ def cast_version_to_semver(value, force=True, raise_exception=False):
|
||||
|
||||
def pepver_to_semver(pepver):
|
||||
return cast_version_to_semver(
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, count=1)
|
||||
)
|
||||
|
||||
|
||||
|
@ -44,40 +44,29 @@ class PlatformRunMixin:
|
||||
value = json.loads(value)
|
||||
return value
|
||||
|
||||
def run( # pylint: disable=too-many-arguments
|
||||
def run( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self, variables, targets, silent, verbose, jobs
|
||||
):
|
||||
assert isinstance(variables, dict)
|
||||
assert isinstance(targets, list)
|
||||
|
||||
self.ensure_engine_compatible()
|
||||
self.configure_project_packages(variables["pioenv"], targets)
|
||||
self._report_non_sensitive_data(variables["pioenv"], targets)
|
||||
|
||||
self.silent = silent
|
||||
self.verbose = verbose or app.get_setting("force_verbose")
|
||||
|
||||
variables["platform_manifest"] = self.manifest_path
|
||||
|
||||
if "build_script" not in variables:
|
||||
variables["build_script"] = self.get_build_script()
|
||||
if not os.path.isfile(variables["build_script"]):
|
||||
raise BuildScriptNotFound(variables["build_script"])
|
||||
|
||||
telemetry.log_platform_run(self, self.config, variables["pioenv"], targets)
|
||||
result = self._run_scons(variables, targets, jobs)
|
||||
|
||||
assert "returncode" in result
|
||||
|
||||
return result
|
||||
|
||||
def _report_non_sensitive_data(self, env, targets):
|
||||
options = self.config.items(env=env, as_dict=True)
|
||||
options["platform_packages"] = [
|
||||
dict(name=item["name"], version=item["version"])
|
||||
for item in self.dump_used_packages()
|
||||
]
|
||||
options["platform"] = {"name": self.name, "version": self.version}
|
||||
telemetry.send_run_environment(options, targets)
|
||||
|
||||
def _run_scons(self, variables, targets, jobs):
|
||||
scons_dir = get_core_package_dir("tool-scons")
|
||||
args = [
|
||||
@ -127,9 +116,9 @@ class PlatformRunMixin:
|
||||
args,
|
||||
stdout=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stdout_line,
|
||||
data_callback=lambda data: None
|
||||
if self.silent
|
||||
else _write_and_flush(sys.stdout, data),
|
||||
data_callback=lambda data: (
|
||||
None if self.silent else _write_and_flush(sys.stdout, data)
|
||||
),
|
||||
),
|
||||
stderr=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stderr_line,
|
||||
|
@ -34,6 +34,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
|
||||
def __init__(self, manifest_path):
|
||||
self.manifest_path = manifest_path
|
||||
self.project_env = None # set by factory.from_env(env)
|
||||
self.silent = False
|
||||
self.verbose = False
|
||||
|
||||
@ -168,6 +169,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
|
||||
|
||||
def board_config(self, id_):
|
||||
assert id_
|
||||
return self.get_boards(id_)
|
||||
|
||||
def get_package_type(self, name):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user