mirror of
https://github.com/platformio/platformio-core.git
synced 2025-06-25 17:41:54 +02:00
Compare commits
483 Commits
Author | SHA1 | Date | |
---|---|---|---|
444c57b4a6 | |||
d787648e71 | |||
846588deec | |||
79142965ce | |||
93bc4fae6c | |||
1aa256d63c | |||
3a133af1a6 | |||
f93d3d509b | |||
145142ea6c | |||
b4b02982d6 | |||
841489c154 | |||
23c142dffd | |||
fc946baa93 | |||
a447022e7f | |||
4c697d9032 | |||
a71443a2ee | |||
20e076191e | |||
d907ecb9e9 | |||
c950d6d366 | |||
29cd2d2bdb | |||
a584a6bce3 | |||
4dc7ea5bd0 | |||
1be6e10f99 | |||
c9016d6939 | |||
baab25a48c | |||
4d4f5a217b | |||
b6d1f4d769 | |||
90fc36cf2d | |||
9be0a8248d | |||
d15314689d | |||
1d4b5c8051 | |||
47a87c57f2 | |||
ec2d01f277 | |||
4e05309e02 | |||
1fd3a4061f | |||
014ac79c87 | |||
dd3fe909a1 | |||
c1afb364e9 | |||
f3c27eadf6 | |||
fe2fd5e880 | |||
07e7dc4717 | |||
a94e5bd5ab | |||
f5ab0e5ddd | |||
3e20abec90 | |||
a4276b4ea6 | |||
cade63fba5 | |||
3a57661230 | |||
33fadd028d | |||
647b131d9b | |||
b537004a75 | |||
67b2759be2 | |||
fe2e8a0a40 | |||
03e84fe325 | |||
b45cdc9cb6 | |||
3aed8e1259 | |||
2d4a87238a | |||
023b58e9f0 | |||
3211a2b91b | |||
4b61de0136 | |||
e6ae18ab0d | |||
4230b223d2 | |||
d224ae658d | |||
20dc006345 | |||
13035ced59 | |||
b9d27240b5 | |||
2441d47321 | |||
cf497e8829 | |||
013153718d | |||
f1726843a2 | |||
44ef6e3469 | |||
eeb5ac456e | |||
aea9075d4b | |||
11a8d9ff7a | |||
7b587ba8bf | |||
9eb6e5166d | |||
aa580360e8 | |||
4c490cc63c | |||
882d4da8cb | |||
781114f026 | |||
7cf8d1d696 | |||
fd1333f031 | |||
8e21259222 | |||
9899547b73 | |||
4075789a32 | |||
ff364610c5 | |||
e5940673d7 | |||
fe140b0566 | |||
2ec5a3154e | |||
956f21b639 | |||
cdac7d497c | |||
591b377e4a | |||
c475578db6 | |||
2bad42ecb1 | |||
0acfc25d56 | |||
9d1593da0b | |||
e9433de50f | |||
fcba901611 | |||
0e3249e8b1 | |||
0d647e164b | |||
c01ef88265 | |||
9fb9e586a0 | |||
28bd200cd6 | |||
56be27fb0b | |||
32991356f3 | |||
dbe58b49bf | |||
d36e39418e | |||
c28740cfb1 | |||
430acc87de | |||
c0d97287dd | |||
0f3dbe623d | |||
6449115635 | |||
d085a02068 | |||
76a11a75b7 | |||
93018930ab | |||
621b24b665 | |||
7606dd4faf | |||
aa06d21abe | |||
042f8dc668 | |||
c4f76848a7 | |||
e1ff9a469d | |||
2239616484 | |||
55be7181b3 | |||
f519a9d524 | |||
f4319f670c | |||
80fc335528 | |||
353f440335 | |||
3e9ca48588 | |||
255e91b51c | |||
adf94843ea | |||
e3e08d9691 | |||
84c7ede0e1 | |||
28c90652bc | |||
a75da327d0 | |||
adf4012b96 | |||
1fe806269d | |||
ffacd17387 | |||
4742ffc9d8 | |||
700c705317 | |||
17ba91977d | |||
f31f9fa616 | |||
485f801c74 | |||
adab425c6d | |||
aabbbef944 | |||
14ce28e028 | |||
ca1f633f9c | |||
a2f3e85760 | |||
f422b5e05c | |||
ba58db3079 | |||
4729d9f55d | |||
41bd751ec2 | |||
c74c9778a1 | |||
f2d16e7631 | |||
b181406a1f | |||
dc16f80ffc | |||
125be4bfd4 | |||
14907579cd | |||
b0a1f3ae16 | |||
195304bbea | |||
e4c4f2ac50 | |||
77e6d1b099 | |||
cf4da42b25 | |||
51bf17515e | |||
1e2c37c190 | |||
204a60dd52 | |||
0f554d2f31 | |||
f382aae66b | |||
998da59f7c | |||
4cad98601d | |||
34545d3f12 | |||
127b422d25 | |||
8c61f0f6b6 | |||
fb93c1937c | |||
827bd09c61 | |||
984d63983d | |||
11df021750 | |||
ac6d94860b | |||
b238c55e53 | |||
961ab6b35e | |||
e1f34c7ea0 | |||
f70e6d50c6 | |||
540465291a | |||
0b3c0144e6 | |||
7ab27ddf9d | |||
e78bf51f68 | |||
5f8c15b96a | |||
9c61ef544d | |||
5548197a74 | |||
2458309d55 | |||
7229e1cce4 | |||
3e95134721 | |||
687189a142 | |||
51b4cd88db | |||
fe52b79eb2 | |||
091c96eb07 | |||
f2eead6ece | |||
c2b3097618 | |||
2728c90441 | |||
5cac6d8b88 | |||
bd34c0f437 | |||
f1c445be15 | |||
b88c393b4e | |||
897844ebc1 | |||
00409fc096 | |||
b75bdbd320 | |||
a0f8def616 | |||
c946613019 | |||
2ee8214485 | |||
7e89e551ae | |||
6972c9c100 | |||
5cfaea91d6 | |||
ce735c0ae5 | |||
aa0df36c8a | |||
99224d7d4e | |||
532759c0c6 | |||
fb43d2508a | |||
07944a9d5b | |||
8b6a4b8ce8 | |||
6e75dc0d57 | |||
a733f3c868 | |||
65397fe059 | |||
48a823d39e | |||
f8b5266c1e | |||
9170eee6e4 | |||
89f4574680 | |||
831f7f52bc | |||
dccc14b507 | |||
3a21f48c9c | |||
54ff3a8d4e | |||
4474175e52 | |||
a983075dac | |||
3268b516a9 | |||
5c9b373b65 | |||
0fe6bf262e | |||
390755c499 | |||
deca77d1a3 | |||
bc2e51d51f | |||
bce70d4945 | |||
940fa327f5 | |||
db8f027f30 | |||
39b61d50e6 | |||
f85c3081fe | |||
2a1fd273ee | |||
a423a4dde4 | |||
abda3edad6 | |||
be4d016f61 | |||
68e62c7137 | |||
bf8f1e9efb | |||
a102fd2d48 | |||
ff221b103a | |||
646aa4f45b | |||
325d4c16b8 | |||
f47083b86b | |||
3d48f3ec04 | |||
837ea85c3c | |||
9585e2a3e3 | |||
5396882e75 | |||
109c537d86 | |||
b239628ac3 | |||
25c7c60f0d | |||
8a38442bba | |||
205b29560f | |||
bbcd92b7c6 | |||
3b3fbecbf3 | |||
a3e66d6325 | |||
355f57e888 | |||
6eff31b5d3 | |||
01423a7659 | |||
0f9a5f8eee | |||
1c419ef71a | |||
01ab1fa4c0 | |||
0ff46bdd88 | |||
dd033bf675 | |||
a28a3d31c9 | |||
450f48ba81 | |||
813861ddae | |||
939b9b9112 | |||
98edf7609f | |||
9f0efdeb5c | |||
3fd063d8ed | |||
1b55da0af2 | |||
4dc44868ea | |||
f720cd841c | |||
53f1d82890 | |||
e78efff33b | |||
a754a28cd8 | |||
1d97982230 | |||
e022b67161 | |||
82de26d401 | |||
31218060db | |||
e25b170b34 | |||
326ebcf593 | |||
8b604c1a03 | |||
f219f35ac8 | |||
4d89593b05 | |||
3881a8c677 | |||
e9cf551101 | |||
5ffa42a5a2 | |||
a5052433f2 | |||
425332040e | |||
3a230dfb51 | |||
292dc3fd71 | |||
e48dfbaadc | |||
c0d2abc9a7 | |||
d017a8197e | |||
378528abfc | |||
91487f179e | |||
363fee4ba0 | |||
41cc735979 | |||
c9235a5276 | |||
355f5afab9 | |||
2b36c7086a | |||
f819cbb4b8 | |||
e3c33596db | |||
1bcec6654d | |||
9df692529b | |||
141d6fc4a6 | |||
6bc915f7db | |||
4ae24a619f | |||
7f7bc76b20 | |||
55e7b36dc4 | |||
395a4053aa | |||
cb65bdf22f | |||
6ea7ded483 | |||
eeb0116f28 | |||
63ca19541f | |||
e0f839a372 | |||
4fc6b26db5 | |||
4388cd4321 | |||
71afa639e2 | |||
89ffd82275 | |||
148ce1a897 | |||
fc12dda765 | |||
58a59f8ae8 | |||
41fb1ca8bd | |||
bba5bc9d0f | |||
f8f3e9863e | |||
eb20f3410a | |||
681b90e6b2 | |||
c016d6827b | |||
f840577066 | |||
475c5d2a3c | |||
f9cbf6cb97 | |||
d728e0e873 | |||
7876626f04 | |||
f6aa95a4fe | |||
4596acab81 | |||
09f1269440 | |||
939f8e0812 | |||
1d0d89a4fa | |||
2908efd337 | |||
9dc6ed031f | |||
21c4f091e2 | |||
2c94fb2aad | |||
2312ca929d | |||
6f0b1fbb91 | |||
c8eea40dd0 | |||
53cd43b676 | |||
981266646c | |||
0acf968b2d | |||
1e5a728f3c | |||
c4d178e50e | |||
b991d9f25c | |||
82d380d895 | |||
9344f3cd81 | |||
6ee9cc04fb | |||
a0c959be28 | |||
df896ad401 | |||
743fc8e636 | |||
fa255ff8b3 | |||
97a7cdd2a2 | |||
02a63a6954 | |||
7f38e222c9 | |||
f71317dad9 | |||
4444a0db99 | |||
e8ffa244e5 | |||
7e9b637143 | |||
0d9ee75b05 | |||
66fe55668e | |||
5c3ae15bee | |||
58a1d5d96e | |||
ab15da4f4b | |||
71bb84f3f2 | |||
faff0fb56c | |||
0fb064eba3 | |||
bea5e87543 | |||
62e9589851 | |||
7d86eebe77 | |||
0bba598c61 | |||
4b446b0d72 | |||
f43f41cc53 | |||
00c5d30ce9 | |||
269d5e0a3e | |||
331ff2dc9c | |||
a24cf50413 | |||
8d33a3d151 | |||
d9ff250f82 | |||
f2d206ca54 | |||
d7c9dc2411 | |||
3e6725bb5f | |||
c3e287672e | |||
a387f9708a | |||
07bfa8ce4a | |||
3dfb936f3c | |||
e39438791c | |||
c7060f93e8 | |||
8794b2a3a1 | |||
cabe7d8c11 | |||
56cc7ce270 | |||
5b13aeda52 | |||
674d00183e | |||
598d2b24de | |||
4c4fb5029e | |||
2c4055d9e1 | |||
efbe3d4aa6 | |||
c785c8c6f3 | |||
9b4a045413 | |||
7c650c2c08 | |||
1422b77298 | |||
1af508272b | |||
5073313c33 | |||
18b6aad369 | |||
097de2be98 | |||
188c65ef7b | |||
b2a04f265e | |||
15d53c95c0 | |||
0d57a799b5 | |||
464b167e65 | |||
380652eb52 | |||
4350c4ca48 | |||
8835a03cd9 | |||
61ba8afee6 | |||
199e3d8958 | |||
9f09657997 | |||
2e64056787 | |||
83d2173748 | |||
1503eb5d41 | |||
6db3eb8e33 | |||
355222b0c0 | |||
2fbd766fd9 | |||
fb5e99473f | |||
de4ba4cbe1 | |||
42f1197de8 | |||
2337dbd2cd | |||
17360b0ed2 | |||
5ee79f1724 | |||
20067c5736 | |||
d43c5696cc | |||
8970f36f1a | |||
75716d26ff | |||
d0ca48661c | |||
4121882a9d | |||
9c38cf6621 | |||
6395a032e5 | |||
a0387bd16e | |||
4d4aec4f57 | |||
7bbfaab891 | |||
337e7fe43a | |||
38f03224d3 | |||
48655ad728 | |||
5de541e493 | |||
527d61296f | |||
2141a09736 | |||
190ebcccfe | |||
a1d9798594 | |||
bf3942e7cc | |||
5ec5660fa6 | |||
fb09077c38 | |||
ec5bf1b5e7 | |||
bf7fb15941 | |||
b35f1ea572 | |||
7e6cb84c87 | |||
bf769e1a9e | |||
476bf20923 | |||
3277ac3a18 | |||
93ce9b0c5e | |||
b11925a9ec | |||
7f8784e2a8 | |||
ca7a100392 | |||
32c2e33edf | |||
30fc00098d | |||
1dd62361c7 | |||
c870c09d67 | |||
c763f4b3a3 |
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@ -6,9 +6,8 @@ What kind of issue is this?
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
All issues related to PlatformIO IDE should be reported to the
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues) repository
|
||||
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
|
23
.github/workflows/core.yml
vendored
23
.github/workflows/core.yml
vendored
@ -8,22 +8,17 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
python-version: "3.6"
|
||||
- os: windows-latest
|
||||
python-version: "3.10"
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@ -32,12 +27,18 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Python Lint
|
||||
if: ${{ matrix.python-version != '3.6' }}
|
||||
- name: Run "codespell" on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
tox -e lint
|
||||
python -m pip install codespell
|
||||
make codespell
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Integration Tests
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
|
12
.github/workflows/deployment.yml
vendored
12
.github/workflows/deployment.yml
vendored
@ -12,19 +12,19 @@ jobs:
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
pip install tox build
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
@ -34,8 +34,8 @@ jobs:
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
run: python setup.py sdist
|
||||
- name: Build Python distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
|
16
.github/workflows/docs.yml
vendored
16
.github/workflows/docs.yml
vendored
@ -7,13 +7,13 @@ jobs:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.7
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
@ -101,7 +101,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
|
8
.github/workflows/examples.yml
vendored
8
.github/workflows/examples.yml
vendored
@ -15,14 +15,14 @@ jobs:
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -34,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
|
23
.github/workflows/projects.yml
vendored
23
.github/workflows/projects.yml
vendored
@ -13,11 +13,6 @@ jobs:
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
# - esphome:
|
||||
# repository: "esphome/esphome"
|
||||
# folder: "esphome"
|
||||
# config_dir: "esphome"
|
||||
# env_name: "esp32-arduino"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
@ -32,38 +27,30 @@ jobs:
|
||||
repository: "1technophile/OpenMQTTGateway"
|
||||
folder: "OpenMQTTGateway"
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
project: {"esphome": "", "repository": "esphome/esphome", "folder": "esphome", "config_dir": "esphome", "env_name": "esp32-arduino"}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Install ESPHome dependencies
|
||||
# Requires esptool package as it's used in a custom prescript
|
||||
if: ${{ contains(matrix.project.repository, 'esphome') }}
|
||||
run: pip install esptool==3.*
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
||||
|
@ -8,4 +8,5 @@ disable=
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
consider-using-f-string,
|
||||
cyclic-import
|
||||
cyclic-import,
|
||||
use-dict-literal
|
||||
|
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
See https://piolabs.com/legal/code-of-conduct.html
|
@ -6,12 +6,13 @@ To get started, <a href="https://cla-assistant.io/platformio/platformio-core">si
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of project where is located `tox.ini` and run `tox -e py37`
|
||||
4. Go to the root of the PlatformIO Core project where `tox.ini` is located (``cd platformio-core``) and run `tox -e py39`.
|
||||
You can replace `py39` with your own Python version. For example, `py311` means Python 3.11.
|
||||
5. Activate current development environment:
|
||||
|
||||
* Windows: `.tox\py37\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py37/bin/activate`
|
||||
* Fish: `source .tox/py37/bin/activate.fish`
|
||||
* Windows: `.tox\py39\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py39/bin/activate`
|
||||
* Fish: `source .tox/py39/bin/activate.fish`
|
||||
|
||||
6. Make changes to code, documentation, etc.
|
||||
7. Lint source code `make before-commit`
|
||||
|
154
HISTORY.rst
154
HISTORY.rst
@ -2,16 +2,166 @@ Release Notes
|
||||
=============
|
||||
|
||||
.. |PIOCONF| replace:: `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__ configuration file
|
||||
.. |LIBRARYJSON| replace:: `library.json <https://docs.platformio.org/en/latest/manifests/library-json/index.html>`__
|
||||
.. |LDF| replace:: `LDF <https://docs.platformio.org/en/latest/librarymanager/ldf.html>`__
|
||||
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
|
||||
.. |PIOHOME| replace:: `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
PlatformIO Core 6
|
||||
-----------------
|
||||
|
||||
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
6.1.19 (2025-??-??)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a regression issue where custom build flags were not properly reflected in the `compile_commands.json <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ file, ensuring accurate compilation database generation
|
||||
|
||||
6.1.18 (2025-03-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a regression issue that prevented |PIOHOME| from opening external links (`issue #5084 <https://github.com/platformio/platformio-core/issues/5084>`_)
|
||||
|
||||
6.1.17 (2025-02-13)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the `PLATFORMIO_RUN_JOBS <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_RUN_JOBS>`__ environment variable, allowing manual override of the number of parallel build jobs (`issue #5077 <https://github.com/platformio/platformio-core/issues/5077>`_)
|
||||
* Added support for ``tar.xz`` tarball dependencies (`pull #4974 <https://github.com/platformio/platformio-core/pull/4974>`_)
|
||||
* Ensured that dependencies of private libraries are no longer unnecessarily re-installed, optimizing dependency management and reducing redundant operations (`issue #4987 <https://github.com/platformio/platformio-core/issues/4987>`_)
|
||||
* Resolved an issue where the ``compiledb`` target failed to properly escape compiler executable paths containing spaces (`issue #4998 <https://github.com/platformio/platformio-core/issues/4998>`_)
|
||||
* Resolved an issue with incorrect path resolution when linking static libraries via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option (`issue #5004 <https://github.com/platformio/platformio-core/issues/5004>`_)
|
||||
* Resolved an issue where the ``--project-dir`` flag did not function correctly with the `pio check <https://docs.platformio.org/en/latest/core/userguide/cmd_check.html>`__ and `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ commands (`issue #5029 <https://github.com/platformio/platformio-core/issues/5029>`_)
|
||||
* Resolved an issue where the |LDF| occasionally excluded bundled platform libraries from the dependency graph (`pull #4941 <https://github.com/platformio/platformio-core/pull/4941>`_)
|
||||
|
||||
6.1.16 (2024-09-26)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.13
|
||||
* Introduced the `PLATFORMIO_SYSTEM_TYPE <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_SYSTEM_TYPE>`__ environment variable, enabling manual override of the detected system type for greater flexibility and control in custom build environments
|
||||
* Enhanced internet connection checks by falling back to HTTPS protocol when HTTP (port 80) fails (`issue #4980 <https://github.com/platformio/platformio-core/issues/4980>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.8.1) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.8.1>`__)
|
||||
* Upgraded the `Doctest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ testing framework to version 2.4.11, the `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ to version 1.15.2, and the `Unity <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/unity.html>`__ to version 2.6.0, incorporating the latest features and improvements for enhanced testing capabilities
|
||||
* Corrected an issue where the incorrect public class was imported for the ``DoctestTestRunner`` (`issue #4949 <https://github.com/platformio/platformio-core/issues/4949>`_)
|
||||
|
||||
6.1.15 (2024-04-25)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository (`issue #4885 <https://github.com/platformio/platformio-core/issues/4885>`_)
|
||||
* Resolved an issue related to the inaccurate detection of the Clang compiler (`pull #4897 <https://github.com/platformio/platformio-core/pull/4897>`_)
|
||||
|
||||
6.1.14 (2024-03-21)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the ``--json-output`` option to the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command, enabling users to generate test results in the JSON format
|
||||
* Upgraded the build engine to the latest version of SCons (4.7.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.7.0>`__)
|
||||
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
|
||||
* Addressed an issue where passing a relative path (``--project-dir``) to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command resulted in an error (`issue #4847 <https://github.com/platformio/platformio-core/issues/4847>`_)
|
||||
* Enhanced |STATICCODEANALYSIS| to accommodate scenarios where custom ``src_dir`` or ``include_dir`` are located outside the project folder (`pull #4874 <https://github.com/platformio/platformio-core/pull/4874>`_)
|
||||
* Corrected the validation of ``symlink://`` `package specifications <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ , resolving an issue that caused the package manager to repeatedly reinstall dependencies (`pull #4870 <https://github.com/platformio/platformio-core/pull/4870>`_)
|
||||
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
|
||||
* Resolved an issue where the |LDF| selected an incorrect library version (`issue #4860 <https://github.com/platformio/platformio-core/issues/4860>`_)
|
||||
* Resolved an issue with the ``hexlify`` filter in the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ command, ensuring proper representation of characters with Unicode code points higher than 127 (`issue #4732 <https://github.com/platformio/platformio-core/issues/4732>`_)
|
||||
|
||||
6.1.13 (2024-01-12)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Expanded support for SCons variables declared in the legacy format ``${SCONS_VARNAME}`` (`issue #4828 <https://github.com/platformio/platformio-core/issues/4828>`_)
|
||||
|
||||
6.1.12 (2024-01-10)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.12
|
||||
* Introduced the capability to launch the debug server in a separate process (`issue #4722 <https://github.com/platformio/platformio-core/issues/4722>`_)
|
||||
* Introduced a warning during the verification of MCU maximum RAM usage, signaling when the allocated RAM surpasses 100% (`issue #4791 <https://github.com/platformio/platformio-core/issues/4791>`_)
|
||||
* Drastically enhanced the speed of project building when operating in verbose mode (`issue #4783 <https://github.com/platformio/platformio-core/issues/4783>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.6.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.6.0>`__)
|
||||
* Enhanced the handling of built-in variables in |PIOCONF| during |INTERPOLATION| (`issue #4695 <https://github.com/platformio/platformio-core/issues/4695>`_)
|
||||
* Enhanced PIP dependency declarations for improved reliability and extended support to include Python 3.6 (`issue #4819 <https://github.com/platformio/platformio-core/issues/4819>`_)
|
||||
* Implemented automatic installation of missing dependencies when utilizing a SOCKS proxy (`issue #4822 <https://github.com/platformio/platformio-core/issues/4822>`_)
|
||||
* Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed (`issue #4699 <https://github.com/platformio/platformio-core/issues/4699>`_)
|
||||
* Rectified an issue where ``${platformio.name}`` erroneously represented ``None`` as the default `project name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/name.html>`__ (`issue #4717 <https://github.com/platformio/platformio-core/issues/4717>`_)
|
||||
* Resolved an issue where the ``COMPILATIONDB_INCLUDE_TOOLCHAIN`` setting was not correctly applying to private libraries (`issue #4762 <https://github.com/platformio/platformio-core/issues/4762>`_)
|
||||
* Resolved an issue where ``get_systype()`` inaccurately returned the architecture when executed within a Docker container on a 64-bit kernel with a 32-bit userspace (`issue #4777 <https://github.com/platformio/platformio-core/issues/4777>`_)
|
||||
* Resolved an issue with incorrect handling of the ``check_src_filters`` option when used in multiple environments (`issue #4788 <https://github.com/platformio/platformio-core/issues/4788>`_)
|
||||
* Resolved an issue where running `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ resulted in duplicated "include" entries (`issue #4723 <https://github.com/platformio/platformio-core/issues/4723>`_)
|
||||
* Resolved an issue where native debugging failed on the host machine (`issue #4745 <https://github.com/platformio/platformio-core/issues/4745>`_)
|
||||
* Resolved an issue where custom debug configurations were being inadvertently overwritten in VSCode's ``launch.json`` (`issue #4810 <https://github.com/platformio/platformio-core/issues/4810>`_)
|
||||
|
||||
6.1.11 (2023-08-31)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within |PIOHOME|
|
||||
|
||||
6.1.10 (2023-08-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue that caused generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to break when the ``-iprefix`` compiler flag was used
|
||||
* Resolved an issue encountered while utilizing the `pio pkg exec <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_exec.html>`__ command on the Windows platform to execute Python scripts from a package
|
||||
* Implemented a crucial improvement to the `pio run <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, guaranteeing that the ``monitor`` target is not executed if any of the preceding targets, such as ``upload``, encounter failures
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.11 with new checks, CLI commands and various analysis improvements
|
||||
* Resolved a critical issue that arose on macOS ARM platforms due to the Python "requests" module, leading to a "ModuleNotFoundError: No module named 'chardet'" (`issue #4702 <https://github.com/platformio/platformio-core/issues/4702>`_)
|
||||
|
||||
6.1.9 (2023-07-06)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Rectified a regression bug that occurred when the ``-include`` flag was passed via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option as a relative path and subsequently expanded (`issue #4683 <https://github.com/platformio/platformio-core/issues/4683>`_)
|
||||
* Resolved an issue that resulted in unresolved absolute toolchain paths when generating the `Compilation database "compile_commands.json" <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ (`issue #4684 <https://github.com/platformio/platformio-core/issues/4684>`_)
|
||||
|
||||
6.1.8 (2023-07-05)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added a new ``--lint`` option to the `pio project config <https://docs.platformio.org/en/latest/core/userguide/project/cmd_config.html>`__ command, enabling users to efficiently perform linting on the |PIOCONF|
|
||||
* Enhanced the parsing of the |PIOCONF| to provide comprehensive diagnostic information
|
||||
* Expanded the functionality of the |LIBRARYJSON| manifest by allowing the use of the underscore symbol in the `keywords <https://docs.platformio.org/en/latest/manifests/library-json/fields/keywords.html>`__ field
|
||||
* Optimized project integration templates to address the issue of long paths on Windows (`issue #4652 <https://github.com/platformio/platformio-core/issues/4652>`_)
|
||||
* Refactored |UNITTESTING| engine to resolve compiler warnings with "-Wpedantic" option (`pull #4671 <https://github.com/platformio/platformio-core/pull/4671>`_)
|
||||
* Eliminated erroneous warning regarding the use of obsolete PlatformIO Core when downgrading to the stable version (`issue #4664 <https://github.com/platformio/platformio-core/issues/4664>`_)
|
||||
* Updated the `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ command to return C/C++ flags as parsed Unix shell arguments when dumping project build metadata
|
||||
* Resolved a critical issue related to the usage of the ``-include`` flag within the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option, specifically when employing dynamic variables (`issue #4682 <https://github.com/platformio/platformio-core/issues/4682>`_)
|
||||
* Removed PlatformIO IDE for Atom from the documentation as `Atom has been deprecated <https://github.blog/2022-06-08-sunsetting-atom/>`__
|
||||
|
||||
6.1.7 (2023-05-08)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced a new ``--sample-code`` option to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command, which allows users to include sample code in the newly created project
|
||||
* Added validation for `project working environment names <https://docs.platformio.org/en/latest/projectconf/sections/env/index.html#working-env-name>`__ to ensure that they only contain lowercase letters ``a-z``, numbers ``0-9``, and special characters ``_`` (underscore) and ``-`` (hyphen)
|
||||
* Added the ability to show a detailed library dependency tree only in `verbose mode <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-pio-run-v>`__, which can help you understand the relationship between libraries and troubleshoot issues more effectively (`issue #4517 <https://github.com/platformio/platformio-core/issues/4517>`_)
|
||||
* Added the ability to run only the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ when using the `pio run -t monitor <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, saving you time and resources by skipping the build process
|
||||
* Implemented a new feature to store device monitor logs in the project's ``logs`` folder, making it easier to access and review device monitor logs for your projects (`issue #4596 <https://github.com/platformio/platformio-core/issues/4596>`_)
|
||||
* Improved support for projects located on Windows network drives, including Network Shared Folder, Dropbox, OneDrive, Google Drive, and other similar services (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
|
||||
* Improved source file filtering functionality for the `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__ feature, making it easier to analyze only the code you need to
|
||||
* Upgraded the build engine to the latest version of SCons (4.5.2) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.5.2>`__)
|
||||
* Implemented a fix for shell injection vulnerabilities when converting INO files to CPP, ensuring your code is safe and secure (`issue #4532 <https://github.com/platformio/platformio-core/issues/4532>`_)
|
||||
* Restored the project generator for the `NetBeans IDE <https://docs.platformio.org/en/latest/integration/ide/netbeans.html>`__, providing you with more flexibility and options for your development workflow
|
||||
* Resolved installation issues with PIO Remote on Raspberry Pi and other small form-factor PCs (`issue #4425 <https://github.com/platformio/platformio-core/issues/4425>`_, `issue #4493 <https://github.com/platformio/platformio-core/issues/4493>`_, `issue #4607 <https://github.com/platformio/platformio-core/issues/4607>`_)
|
||||
* Resolved an issue where the `build_cache_dir <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/directory/build_cache_dir.html>`__ setting was not being recognized consistently across multiple environments (`issue #4574 <https://github.com/platformio/platformio-core/issues/4574>`_)
|
||||
* Resolved an issue where organization details could not be updated using the `pio org update <https://docs.platformio.org/en/latest/core/userguide/org/cmd_update.html>`__ command
|
||||
* Resolved an issue where the incorrect debugging environment was generated for VSCode in "Auto" mode (`issue #4597 <https://github.com/platformio/platformio-core/issues/4597>`_)
|
||||
* Resolved an issue where native tests would fail if a custom program name was specified (`issue #4546 <https://github.com/platformio/platformio-core/issues/4546>`_)
|
||||
* Resolved an issue where the PlatformIO |DEBUGGING| solution was not escaping the tool installation process into MI2 correctly (`issue #4565 <https://github.com/platformio/platformio-core/issues/4565>`_)
|
||||
* Resolved an issue where multiple targets were not executed sequentially (`issue #4604 <https://github.com/platformio/platformio-core/issues/4604>`_)
|
||||
* Resolved an issue where upgrading PlatformIO Core fails on Windows with Python 3.11 (`issue #4540 <https://github.com/platformio/platformio-core/issues/4540>`_)
|
||||
|
||||
6.1.6 (2023-01-23)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.11
|
||||
* Added a new `name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/description.html>`__ configuration option to customize a project name (`pull #4498 <https://github.com/platformio/platformio-core/pull/4498>`_)
|
||||
* Made assets (templates, ``99-platformio-udev.rules``) part of Python's module (`issue #4458 <https://github.com/platformio/platformio-core/issues/4458>`_)
|
||||
* Updated `Clang-Tidy <https://docs.platformio.org/en/latest/plus/check-tools/clang-tidy.html>`__ check tool to v15.0.5 with new diagnostics and bugfixes
|
||||
* Removed dependency on the "zeroconf" package and install it only when a user lists mDNS devices (issue with zeroconf's LGPL license)
|
||||
* Show the real error message instead of "Can not remove temporary directory" when |PIOCONF| is broken (`issue #4480 <https://github.com/platformio/platformio-core/issues/4480>`_)
|
||||
* Fixed an issue with an incorrect test summary when a testcase name includes a colon (`issue #4508 <https://github.com/platformio/platformio-core/issues/4508>`_)
|
||||
* Fixed an issue when `extends <https://docs.platformio.org/en/latest/projectconf/sections/env/options/advanced/extends.html>`__ did not override options in the right order (`issue #4462 <https://github.com/platformio/platformio-core/issues/4462>`_)
|
||||
* Fixed an issue when `pio pkg list <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_list.html>`__ and `pio pkg uninstall <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_uninstall.html>`__ commands fail if there are circular dependencies in the |LIBRARYJSON| manifests (`issue #4475 <https://github.com/platformio/platformio-core/issues/4475>`_)
|
||||
|
||||
6.1.5 (2022-11-01)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
@ -46,7 +196,7 @@ PlatformIO Core 6
|
||||
* Export a ``PIO_UNIT_TESTING`` macro to the project source files and dependent libraries in the |UNITTESTING| mode
|
||||
* Improved detection of Windows architecture (`issue #4353 <https://github.com/platformio/platformio-core/issues/4353>`_)
|
||||
* Warn about unknown `device monitor filters <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #4362 <https://github.com/platformio/platformio-core/issues/4362>`_)
|
||||
* Fixed a regression bug when `libArchive <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/libarchive.html>`__ option declared in the `library.json <https://docs.platformio.org/en/latest/manifests/library-json/index.html>`__ manifest was ignored (`issue #4351 <https://github.com/platformio/platformio-core/issues/4351>`_)
|
||||
* Fixed a regression bug when `libArchive <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/libarchive.html>`__ option declared in the |LIBRARYJSON| manifest was ignored (`issue #4351 <https://github.com/platformio/platformio-core/issues/4351>`_)
|
||||
* Fixed an issue when the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command didn't work with Python 3.6 (`issue #4352 <https://github.com/platformio/platformio-core/issues/4352>`_)
|
||||
|
||||
6.1.1 (2022-07-11)
|
||||
|
@ -1 +0,0 @@
|
||||
include LICENSE
|
9
Makefile
9
Makefile
@ -10,10 +10,13 @@ format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
test:
|
||||
py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
before-commit: isort format lint
|
||||
test:
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: codespell isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
@ -36,9 +36,11 @@ PlatformIO Core
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
|
||||
`PlatformIO <https://platformio.org>`_ is a professional collaborative platform for embedded development.
|
||||
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
|
||||
|
||||
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
* Open source, maximum permissive Apache 2.0 license
|
||||
* Cross-platform IDE and Unified Debugger
|
||||
|
34
SECURITY.md
Normal file
34
SECURITY.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We are committed to ensuring the security and protection of PlatformIO Core.
|
||||
To this end, we support only the following versions:
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 6.1.x | :white_check_mark: |
|
||||
| < 6.1 | :x: |
|
||||
|
||||
Unsupported versions of the PlatformIO Core may have known vulnerabilities or security issues that could compromise the security of our organization's systems and data.
|
||||
Therefore, it is important that all developers use only supported versions of the PlatformIO Core.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We take the security of our systems and data very seriously. We encourage responsible disclosure of any vulnerabilities or security issues that you may find in our systems or applications. If you believe you have discovered a vulnerability, please report it to us immediately.
|
||||
|
||||
To report a vulnerability, please send an email to our security team at contact@piolabs.com. Please include as much information as possible, including:
|
||||
|
||||
- A description of the vulnerability and how it can be exploited
|
||||
- Steps to reproduce the vulnerability
|
||||
- Any additional information that can help us understand and reproduce the vulnerability
|
||||
|
||||
Once we receive your report, our security team will acknowledge receipt within 24 hours and will work to validate the reported vulnerability. We will provide periodic updates on the progress of the vulnerability assessment, and will notify you once a fix has been deployed.
|
||||
|
||||
If the vulnerability is accepted, we will work to remediate the issue as quickly as possible. We may also provide credit or recognition to the individual who reported the vulnerability, at our discretion.
|
||||
|
||||
If the vulnerability is declined, we will provide a justification for our decision and may offer guidance on how to improve the report or how to test the system more effectively.
|
||||
|
||||
Please note that we will not take any legal action against individuals who report vulnerabilities in good faith and in accordance with this policy.
|
||||
|
||||
Thank you for helping us keep our systems and data secure.
|
2
docs
2
docs
Submodule docs updated: f82e7f4266...70ab7ee27b
2
examples
2
examples
Submodule examples updated: f98cb5a9be...0409a90a01
@ -12,22 +12,16 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (6, 1, 5)
|
||||
VERSION = (6, 1, "19a2")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
__description__ = (
|
||||
"A professional collaborative platform for embedded development. "
|
||||
"Cross-platform IDE and Unified Debugger. "
|
||||
"Static Code Analyzer and Remote Unit Testing. "
|
||||
"Multi-platform and Multi-architecture Build System. "
|
||||
"Firmware File Explorer and Memory Inspection. "
|
||||
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
|
||||
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
|
||||
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
|
||||
"STMicroelectronics (STM8/STM32), Teensy"
|
||||
"Your Gateway to Embedded Software Development Excellence. "
|
||||
"Unlock the true potential of embedded software development "
|
||||
"with PlatformIO's collaborative ecosystem, embracing "
|
||||
"declarative principles, test-driven methodologies, and "
|
||||
"modern toolchains for unrivaled success."
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
@ -44,15 +38,6 @@ __registry_mirror_hosts__ = [
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-scons": "~4.40400.0",
|
||||
"tool-cppcheck": "~1.270.0",
|
||||
"tool-clangtidy": "~1.120001.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from traceback import format_exc
|
||||
import traceback
|
||||
|
||||
import click
|
||||
|
||||
@ -53,13 +53,13 @@ def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_platformio_start(ctx, caller)
|
||||
maintenance.on_cmd_start(ctx, caller)
|
||||
|
||||
|
||||
@cli.result_callback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, *_, **__):
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
def process_result(*_, **__):
|
||||
maintenance.on_cmd_end()
|
||||
|
||||
|
||||
def configure():
|
||||
@ -96,6 +96,7 @@ def main(argv=None):
|
||||
if argv:
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
|
||||
try:
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
@ -106,18 +107,18 @@ def main(argv=None):
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if not isinstance(exc, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(exc)
|
||||
error_str = "Error: "
|
||||
error_str = f"{exc.__class__.__name__}: "
|
||||
if isinstance(exc, exception.PlatformioException):
|
||||
error_str += str(exc)
|
||||
else:
|
||||
error_str += format_exc()
|
||||
error_str += traceback.format_exc()
|
||||
error_str += """
|
||||
============================================================
|
||||
|
||||
An unexpected error occurred. Further steps:
|
||||
|
||||
* Verify that you have the latest version of PlatformIO using
|
||||
`pip install -U platformio` command
|
||||
`python -m pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
https://docs.platformio.org/page/faq/index.html
|
||||
@ -129,6 +130,8 @@ An unexpected error occurred. Further steps:
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
exit_code = int(str(exc)) if str(exc).isdigit() else 1
|
||||
|
||||
maintenance.on_platformio_exit()
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
|
||||
|
@ -16,27 +16,23 @@ import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.http import HTTPClient, HTTPClientError
|
||||
|
||||
|
||||
class AccountError(PlatformioException):
|
||||
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthorized(AccountError):
|
||||
|
||||
class AccountNotAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError):
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
|
||||
|
||||
def __init__(self):
|
||||
@ -148,7 +144,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint:disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
@ -298,7 +294,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
@ -351,6 +347,6 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
@ -22,29 +22,27 @@ from platformio.account.validate import validate_email, validate_orgname
|
||||
@click.argument("cur_orgname")
|
||||
@click.option(
|
||||
"--orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
callback=lambda _, __, value: validate_orgname(value) if value else value,
|
||||
help="A new orgname",
|
||||
)
|
||||
@click.option("--email")
|
||||
@click.option(
|
||||
"--email",
|
||||
callback=lambda _, __, value: validate_email(value) if value else value,
|
||||
)
|
||||
@click.option("--displayname")
|
||||
def org_update_cmd(cur_orgname, **kwargs):
|
||||
client = AccountClient()
|
||||
org = client.get_org(cur_orgname)
|
||||
del org["owners"]
|
||||
new_org = org.copy()
|
||||
new_org = {
|
||||
key: value if value is not None else org[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for field in org:
|
||||
new_org[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=org[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_org[field])
|
||||
if field == "orgname":
|
||||
validate_orgname(new_org[field])
|
||||
else:
|
||||
new_org.update(
|
||||
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
|
||||
)
|
||||
for key in kwargs:
|
||||
new_org[key] = click.prompt(key.capitalize(), default=org[key])
|
||||
if key == "email":
|
||||
validate_email(new_org[key])
|
||||
if key == "orgname":
|
||||
validate_orgname(new_org[key])
|
||||
client.update_org(cur_orgname, new_org)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully updated." % cur_orgname,
|
||||
|
@ -22,9 +22,7 @@ from platformio.account.validate import validate_orgname_teamname
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(
|
||||
value, teamname_validate=True
|
||||
),
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
|
@ -48,11 +48,13 @@ def team_list_cmd(orgname, json_output):
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
|
@ -26,7 +26,7 @@ from platformio.account.validate import validate_orgname_teamname, validate_team
|
||||
)
|
||||
@click.option(
|
||||
"--name",
|
||||
callback=lambda _, __, value: validate_teamname(value),
|
||||
callback=lambda _, __, value: validate_teamname(value) if value else value,
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option(
|
||||
@ -36,18 +36,14 @@ def team_update_cmd(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
team = client.get_team(orgname, teamname)
|
||||
del team["id"]
|
||||
del team["members"]
|
||||
new_team = team.copy()
|
||||
new_team = {
|
||||
key: value if value is not None else team[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for field in team:
|
||||
new_team[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=team[field]
|
||||
)
|
||||
if field == "name":
|
||||
validate_teamname(new_team[field])
|
||||
else:
|
||||
new_team.update({key: value for key, value in kwargs.items() if value})
|
||||
for key in kwargs:
|
||||
new_team[key] = click.prompt(key.capitalize(), default=team[key])
|
||||
if key == "name":
|
||||
validate_teamname(new_team[key])
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
|
@ -18,8 +18,10 @@ import click
|
||||
|
||||
|
||||
def validate_username(value, field="username"):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid %s format. "
|
||||
"%s must contain only alphanumeric characters "
|
||||
@ -30,16 +32,22 @@ def validate_username(value, field="username"):
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
@ -48,27 +56,11 @@ def validate_password(value):
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_orgname_teamname(value, teamname_validate=False):
|
||||
if ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name in the next"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
teamname = str(value.strip().split(":", 1)[1])
|
||||
if teamname_validate:
|
||||
validate_teamname(teamname)
|
||||
return value
|
||||
|
||||
|
||||
def validate_teamname(value):
|
||||
if not value:
|
||||
return value
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid team name format. "
|
||||
"Team name must only contain alphanumeric characters, "
|
||||
@ -77,3 +69,16 @@ def validate_teamname(value):
|
||||
" not be longer than 20 characters."
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name using the following"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
orgname, teamname = value.split(":", 1)
|
||||
validate_orgname(orgname)
|
||||
validate_teamname(teamname)
|
||||
return value
|
||||
|
@ -18,6 +18,7 @@ import json
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from platformio import __version__, exception, fs, proc
|
||||
@ -68,18 +69,23 @@ SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
"pause_telemetry": False,
|
||||
}
|
||||
|
||||
|
||||
def resolve_state_path(conf_option_dir, file_name, ensure_dir_exists=True):
|
||||
state_dir = ProjectConfig.get_instance().get("platformio", conf_option_dir)
|
||||
if ensure_dir_exists and not os.path.isdir(state_dir):
|
||||
os.makedirs(state_dir)
|
||||
return os.path.join(state_dir, file_name)
|
||||
|
||||
|
||||
class State:
|
||||
def __init__(self, path=None, lock=False):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
|
||||
if not os.path.isdir(core_dir):
|
||||
os.makedirs(core_dir)
|
||||
self.path = os.path.join(core_dir, "appstate.json")
|
||||
self.path = resolve_state_path("core_dir", "appstate.json")
|
||||
self._storage = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
@ -248,9 +254,14 @@ def get_cid():
|
||||
cid = str(cid)
|
||||
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
set_state_item("created_at", int(time.time()))
|
||||
return cid
|
||||
|
||||
|
||||
def get_project_id(project_dir):
|
||||
return hashlib.sha1(hashlib_encode_data(project_dir)).hexdigest()
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
data = [
|
||||
"PlatformIO/%s" % __version__,
|
||||
@ -263,6 +274,8 @@ def get_user_agent():
|
||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
data.append("Python/%s" % platform.python_version())
|
||||
data.append("Platform/%s" % platform.platform())
|
||||
if not get_setting("enable_telemetry"):
|
||||
data.append("Telemetry/0")
|
||||
return " ".join(data)
|
||||
|
||||
|
||||
|
487
platformio/assets/schema/library.json
Normal file
487
platformio/assets/schema/library.json
Normal file
@ -0,0 +1,487 @@
|
||||
{
|
||||
"$id": "https://example.com/library.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "library.json schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"maxLength": 50,
|
||||
"description": "A name of a library.\nMust be unique in the PlatformIO Registry\nShould be slug style for simplicity, consistency, and compatibility. Example: HelloWorld\nCan contain a-z, digits, and dashes (but not start/end with them)\nConsecutive dashes and [:;/,@<>] chars are not allowed.",
|
||||
"required": true
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"description": "A version of a current library source code. Can contain a-z, digits, dots or dash and should be Semantic Versioning compatible.",
|
||||
"required": true
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "The field helps users to identify and search for your library with a brief description. Describe the hardware devices (sensors, boards and etc.) which are suitable with it.",
|
||||
"required": true
|
||||
},
|
||||
"keywords": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Used for search by keyword. Helps to make your library easier to discover without people needing to know its name.\nThe keyword should be lowercased, can contain a-z, digits and dash (but not start/end with them). A list from the keywords can be specified with separator , or declared as Array.",
|
||||
"required": true
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "Home page of a library (if is different from repository url).",
|
||||
"required": false
|
||||
},
|
||||
"repository": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"git",
|
||||
"hg",
|
||||
"svn"
|
||||
],
|
||||
"description": "only “git”, “hg” or “svn” are supported"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"branch": {
|
||||
"type": "string",
|
||||
"description": "if is not specified, default branch will be used. This field will be ignored if tag/release exists with the value of version."
|
||||
}
|
||||
},
|
||||
"description": "The repository in which the source code can be found.",
|
||||
"required": false
|
||||
},
|
||||
"authors": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "An author contact information\nIf authors field is not defined, PlatformIO will try to fetch data from VCS provider (Github, Gitlab, etc) if repository is declared.",
|
||||
"required": false
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "A SPDX license ID or SPDX Expression. You can check the full list of SPDX license IDs (see “Identifier” column).",
|
||||
"required": false
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible frameworks. The available framework names are defined in the Frameworks section.\nIf the library is compatible with the all frameworks, then do not declare this field or you use *",
|
||||
"required": false
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible development platforms. The available platform name are defined in Development Platforms section.\nIf the library is compatible with the all platforms, then do not declare this field or use *.\nPlatformIO does not check platforms for compatibility in default mode. See Compatibility Mode for details. If you need a strict checking for compatible platforms for a library, please set libCompatMode to strict.",
|
||||
"required": false
|
||||
},
|
||||
"headers": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "MyLibrary.h"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "FooCore.h, FooFeature.h"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of header files that can be included in a project source files using #include <...> directive.",
|
||||
"required": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"base": {
|
||||
"type": "string"
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "A list of example patterns.",
|
||||
"required": "false"
|
||||
},
|
||||
"dependencies": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of dependent libraries that will be automatically installed.",
|
||||
"required": false
|
||||
},
|
||||
"export": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Export only files that matched declared patterns.\n* - matches everything\n? - matches any single character\n[seq] - matches any character in seq\n[!seq] - matches any character not in seq"
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Exclude the directories and files which match with exclude patterns."
|
||||
}
|
||||
},
|
||||
"description": "This option is useful if you need to exclude extra data (test code, docs, images, PDFs, etc). It allows one to reduce the size of the final archive.\nTo check which files will be included in the final packages, please use pio pkg pack command.",
|
||||
"required": false
|
||||
},
|
||||
"scripts": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postinstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script AFTER the package has been installed.\nRun a custom Python script located in the package “scripts” folder AFTER the package is installed. Please note that you don’t need to specify a Python interpreter for Python scripts"
|
||||
},
|
||||
"preuninstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script BEFORE the package is removed.\nRun a custom Bash script BEFORE the package is uninstalled. The script is declared as a list of command arguments and is located at the root of a package"
|
||||
}
|
||||
},
|
||||
"description": "Execute custom scripts during the special Package Management CLI life cycle events",
|
||||
"required": false
|
||||
},
|
||||
"build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"flags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Extra flags to control preprocessing, compilation, assembly, and linking processes. More details build_flags.\nKeep in mind when operating with the -I flag (directories to be searched for header files). The path should be relative to the root directory where the library.json manifest is located."
|
||||
},
|
||||
"unflags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Remove base/initial flags which were set by development platform. More details build_unflags."
|
||||
},
|
||||
"includeDir": {
|
||||
"type": "string",
|
||||
"description": "Custom directory to be searched for header files. A default value is include and means that folder is located at the root of a library.\nThe Library Dependency Finder (LDF) will pick a library automatically only when a project or other dependent libraries include any header file located in includeDir or srcDir.",
|
||||
"required": false
|
||||
},
|
||||
"srcDir": {
|
||||
"type": "string",
|
||||
"description": "Custom location of library source code. A default value is src and means that folder is located in the root of a library.",
|
||||
"required": "false"
|
||||
},
|
||||
"srcFilter": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Specify which source files should be included/excluded from build process. The path in filter should be relative to the srcDir option of a library.\nSee syntax for build_src_filter.\nPlease note that you can generate source filter “on-the-fly” using extraScript",
|
||||
"required": false
|
||||
},
|
||||
"extraScript": {
|
||||
"type": "string",
|
||||
"description": "Launch extra script before a build process.",
|
||||
"required": "false"
|
||||
},
|
||||
"libArchive": {
|
||||
"type": "boolean",
|
||||
"description": "Create an archive (*.a, static library) from the object files and link it into a firmware (program). This is default behavior of PlatformIO Build System (\"libArchive\": true).\nSetting \"libArchive\": false will instruct PlatformIO Build System to link object files directly (in-line). This could be useful if you need to override weak symbols defined in framework or other libraries.\nYou can disable library archiving globally using lib_archive option in “platformio.ini” (Project Configuration File).",
|
||||
"required": "false"
|
||||
},
|
||||
"libLDFMode": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"off"
|
||||
],
|
||||
"description": "“Manual mode”, does not process source files of a project and dependencies. Builds only the libraries that are specified in manifests (library.json, module.json) or using lib_deps option."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain"
|
||||
],
|
||||
"description": "[DEFAULT] Parses ALL C/C++ source files of the project and follows only by nested includes (#include ..., chain...) from the libraries. It also parses C, CC, CPP files from libraries which have the same name as included header file. Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep"
|
||||
],
|
||||
"description": "Parses ALL C/C++ source files of the project and parses ALL C/C++ source files of the each found dependency (recursively). Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain+"
|
||||
],
|
||||
"description": "The same behavior as for the chain but evaluates C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep+"
|
||||
],
|
||||
"description": "The same behavior as for the deep but evaluates C/C++ Preprocessor conditional syntax."
|
||||
}
|
||||
],
|
||||
"description": "Specify Library Dependency Finder Mode. See Dependency Finder Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"libCompatMode": {
|
||||
"type": "string",
|
||||
"description": "Specify Library Compatibility Mode. See Compatibility Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"builder": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"PlatformIOLibBuilder"
|
||||
],
|
||||
"description": "Default Builder"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"ArduinoLibBuilder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"MbedLibBuilder"
|
||||
]
|
||||
}
|
||||
],
|
||||
"description": "Override default PlatformIOLibBuilder with another builder.",
|
||||
"required": false
|
||||
}
|
||||
},
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
@ -36,6 +36,8 @@ ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVIC
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
@ -76,12 +78,17 @@ ATTRS{idVendor}=="28e9", ATTRS{idProduct}=="0189", MODE="0666", ENV{ID_MM_DEVICE
|
||||
# FireBeetle-ESP32
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7522", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#Wio Terminal
|
||||
# Wio Terminal
|
||||
ATTRS{idVendor}=="2886", ATTRS{idProduct}=="[08]02d", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raspberry Pi Pico
|
||||
ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
@ -168,3 +175,9 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Zephyr framework USB CDC-ACM
|
||||
ATTRS{idVendor}=="2fe3", ATTRS{idProduct}=="0100", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
@ -28,7 +28,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import app, compat, fs
|
||||
from platformio import app, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
@ -38,7 +38,6 @@ AllowSubstExceptions(NameError)
|
||||
# append CLI arguments to build environment
|
||||
clivars = Variables(None)
|
||||
clivars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
("BUILD_SCRIPT",),
|
||||
("PROJECT_CONFIG",),
|
||||
("PIOENV",),
|
||||
@ -99,6 +98,7 @@ if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
env.SConscriptChdir(False)
|
||||
|
||||
# Load variables from CLI
|
||||
env.Replace(
|
||||
@ -139,51 +139,36 @@ if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if compat.IS_WINDOWS and sys.version_info >= (3, 8) and os.getcwd().startswith("\\\\"):
|
||||
click.secho("!!! WARNING !!!\t\t" * 3, fg="red")
|
||||
click.secho(
|
||||
"Your project is located on a mapped network drive but the "
|
||||
"current command-line shell does not support the UNC paths.",
|
||||
fg="yellow",
|
||||
)
|
||||
click.secho(
|
||||
"Please move your project to a physical drive or check this workaround: "
|
||||
"https://bit.ly/3kuU5mP\n",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
is_clean_all = "cleanall" in COMMAND_LINE_TARGETS
|
||||
if env.GetOption("clean") or is_clean_all:
|
||||
env.PioClean(is_clean_all)
|
||||
env.Exit(0)
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(
|
||||
os.path.join(
|
||||
"$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1])
|
||||
"$BUILD_CACHE_DIR" if env.subst("$BUILD_CACHE_DIR") else "$BUILD_DIR",
|
||||
".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]),
|
||||
)
|
||||
)
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("pre"), exports="env")
|
||||
|
||||
if env.IsCleanTarget():
|
||||
env.CleanProject(fullclean=int(ARGUMENTS.get("FULLCLEAN", 0)))
|
||||
env.Exit(0)
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
@ -192,8 +177,7 @@ if "UPLOAD_FLAGS" in env:
|
||||
if env.GetProjectOption("upload_command"):
|
||||
env.Replace(UPLOADCMD=env.GetProjectOption("upload_command"))
|
||||
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("post"), exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
@ -258,3 +242,9 @@ if "sizedata" in COMMAND_LINE_TARGETS:
|
||||
)
|
||||
|
||||
Default("sizedata")
|
||||
|
||||
# issue #4604: process targets sequentially
|
||||
for index, target in enumerate(
|
||||
[t for t in COMMAND_LINE_TARGETS if not t.startswith("__")][1:]
|
||||
):
|
||||
env.Depends(target, COMMAND_LINE_TARGETS[index])
|
||||
|
@ -1,224 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
|
||||
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
|
||||
import SCons
|
||||
|
||||
from platformio.builder.tools.piobuild import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
# Implements the ability for SCons to emit a compilation database for the MongoDB project. See
|
||||
# http://clang.llvm.org/docs/JSONCompilationDatabase.html for details on what a compilation
|
||||
# database is, and why you might want one. The only user visible entry point here is
|
||||
# 'env.CompilationDatabase'. This method takes an optional 'target' to name the file that
|
||||
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
|
||||
# which is the name that most clang tools search for by default.
|
||||
|
||||
# Is there a better way to do this than this global? Right now this exists so that the
|
||||
# emitter we add can record all of the things it emits, so that the scanner for the top level
|
||||
# compilation database can access the complete list, and also so that the writer has easy
|
||||
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
|
||||
# communicate more gracefully?
|
||||
__COMPILATION_DB_ENTRIES = []
|
||||
|
||||
|
||||
# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even
|
||||
# integrate with the cache, but there doesn't seem to be much call for it.
|
||||
class __CompilationDbNode(SCons.Node.Python.Value):
|
||||
def __init__(self, value):
|
||||
SCons.Node.Python.Value.__init__(self, value)
|
||||
self.Decider(changed_since_last_build_node)
|
||||
|
||||
|
||||
def changed_since_last_build_node(*args, **kwargs):
|
||||
"""Dummy decider to force always building"""
|
||||
return True
|
||||
|
||||
|
||||
def makeEmitCompilationDbEntry(comstr):
|
||||
"""
|
||||
Effectively this creates a lambda function to capture:
|
||||
* command line
|
||||
* source
|
||||
* target
|
||||
:param comstr: unevaluated command line
|
||||
:return: an emitter which has captured the above
|
||||
"""
|
||||
user_action = SCons.Action.Action(comstr)
|
||||
|
||||
def EmitCompilationDbEntry(target, source, env):
|
||||
"""
|
||||
This emitter will be added to each c/c++ object build to capture the info needed
|
||||
for clang tools
|
||||
:param target: target node(s)
|
||||
:param source: source node(s)
|
||||
:param env: Environment for use building this node
|
||||
:return: target(s), source(s)
|
||||
"""
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
|
||||
dbtarget = __CompilationDbNode(source)
|
||||
|
||||
entry = env.__COMPILATIONDB_Entry(
|
||||
target=dbtarget,
|
||||
source=[],
|
||||
__COMPILATIONDB_UTARGET=target,
|
||||
__COMPILATIONDB_USOURCE=source,
|
||||
__COMPILATIONDB_UACTION=user_action,
|
||||
__COMPILATIONDB_ENV=env,
|
||||
)
|
||||
|
||||
# Technically, these next two lines should not be required: it should be fine to
|
||||
# cache the entries. However, they don't seem to update properly. Since they are quick
|
||||
# to re-generate disable caching and sidestep this problem.
|
||||
env.AlwaysBuild(entry)
|
||||
env.NoCache(entry)
|
||||
|
||||
__COMPILATION_DB_ENTRIES.append(dbtarget)
|
||||
|
||||
return target, source
|
||||
|
||||
return EmitCompilationDbEntry
|
||||
|
||||
|
||||
def CompilationDbEntryAction(target, source, env, **kw):
|
||||
"""
|
||||
Create a dictionary with evaluated command line, target, source
|
||||
and store that info as an attribute on the target
|
||||
(Which has been stored in __COMPILATION_DB_ENTRIES array
|
||||
:param target: target node(s)
|
||||
:param source: source node(s)
|
||||
:param env: Environment for use building this node
|
||||
:param kw:
|
||||
:return: None
|
||||
"""
|
||||
|
||||
command = env["__COMPILATIONDB_UACTION"].strfunction(
|
||||
target=env["__COMPILATIONDB_UTARGET"],
|
||||
source=env["__COMPILATIONDB_USOURCE"],
|
||||
env=env["__COMPILATIONDB_ENV"],
|
||||
)
|
||||
|
||||
entry = {
|
||||
"directory": env.Dir("#").abspath,
|
||||
"command": command,
|
||||
"file": str(env["__COMPILATIONDB_USOURCE"][0]),
|
||||
}
|
||||
|
||||
target[0].write(entry)
|
||||
|
||||
|
||||
def WriteCompilationDb(target, source, env):
|
||||
entries = []
|
||||
|
||||
for s in __COMPILATION_DB_ENTRIES:
|
||||
item = s.read()
|
||||
item["file"] = os.path.abspath(item["file"])
|
||||
entries.append(item)
|
||||
|
||||
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
|
||||
json.dump(
|
||||
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
||||
)
|
||||
|
||||
|
||||
def ScanCompilationDb(node, env, path):
|
||||
return __COMPILATION_DB_ENTRIES
|
||||
|
||||
|
||||
def generate(env, **kwargs):
|
||||
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
|
||||
|
||||
env["COMPILATIONDB_COMSTR"] = kwargs.get(
|
||||
"COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
|
||||
)
|
||||
|
||||
components_by_suffix = itertools.chain(
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_C_EXT],
|
||||
[
|
||||
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CCCOM"),
|
||||
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCCCOM"),
|
||||
],
|
||||
),
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_CXX_EXT],
|
||||
[
|
||||
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CXXCOM"),
|
||||
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCXXCOM"),
|
||||
],
|
||||
),
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_ASM_EXT],
|
||||
[(static_obj, SCons.Defaults.StaticObjectEmitter, "$ASCOM")],
|
||||
),
|
||||
)
|
||||
|
||||
for entry in components_by_suffix:
|
||||
suffix = entry[0]
|
||||
builder, base_emitter, command = entry[1]
|
||||
|
||||
# Assumes a dictionary emitter
|
||||
emitter = builder.emitter[suffix]
|
||||
builder.emitter[suffix] = SCons.Builder.ListEmitter(
|
||||
[emitter, makeEmitCompilationDbEntry(command)]
|
||||
)
|
||||
|
||||
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
|
||||
action=SCons.Action.Action(CompilationDbEntryAction, None),
|
||||
)
|
||||
|
||||
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
|
||||
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
|
||||
target_scanner=SCons.Scanner.Scanner(
|
||||
function=ScanCompilationDb, node_class=None
|
||||
),
|
||||
)
|
||||
|
||||
def CompilationDatabase(env, target):
|
||||
result = env.__COMPILATIONDB_Database(target=target, source=[])
|
||||
|
||||
env.AlwaysBuild(result)
|
||||
env.NoCache(result)
|
||||
|
||||
return result
|
||||
|
||||
env.AddMethod(CompilationDatabase, "CompilationDatabase")
|
||||
|
||||
|
||||
def exists(env):
|
||||
return True
|
@ -26,6 +26,7 @@ from SCons.Script import SConscript # pylint: disable=import-error
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import IS_MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
@ -53,11 +54,12 @@ def GetBuildType(env):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return "+".join(modes or ["release"])
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
@ -125,9 +127,23 @@ def ProcessProgramDeps(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS and env.get(
|
||||
"COMPILATIONDB_INCLUDE_TOOLCHAIN"
|
||||
):
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
@ -200,13 +216,19 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
p = env.subst(p)
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result["CCFLAGS"][i] = (f[0], env.File(os.path.abspath(f[1].get_path())))
|
||||
result["CCFLAGS"][i] = (f[0], env.subst(f[1].get_path()))
|
||||
|
||||
return result
|
||||
|
||||
@ -239,7 +261,7 @@ def ProcessUnFlags(env, flags):
|
||||
for scope in unflag_scopes:
|
||||
for unflags in parsed.values():
|
||||
for unflag in unflags:
|
||||
for current in env.get(scope, []):
|
||||
for current in list(env.get(scope, [])):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
not isinstance(unflag, (tuple, list))
|
||||
@ -365,6 +387,7 @@ def generate(env):
|
||||
env.AddMethod(GetBuildType)
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessProgramDeps)
|
||||
env.AddMethod(ProcessCompileDbToolchainOption)
|
||||
env.AddMethod(ProcessProjectDeps)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
|
@ -25,7 +25,6 @@ from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
|
||||
|
||||
class InoToCPPConverter:
|
||||
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
@ -103,7 +102,7 @@ class InoToCPPConverter:
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
out_file = re.sub(r"[\"\'\;]+", "", self._main_ino) + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = self.read_safe_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
|
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
@ -30,12 +29,7 @@ def IsIntegrationDump(_):
|
||||
def DumpIntegrationIncludes(env):
|
||||
result = dict(build=[], compatlib=[], toolchain=[])
|
||||
|
||||
result["build"].extend(
|
||||
[
|
||||
env.subst("$PROJECT_INCLUDE_DIR"),
|
||||
env.subst("$PROJECT_SRC_DIR"),
|
||||
]
|
||||
)
|
||||
# `env`(project) CPPPATH
|
||||
result["build"].extend(
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
@ -139,9 +133,9 @@ def dump_svd_path(env):
|
||||
return None
|
||||
|
||||
|
||||
def _subst_cmd(env, cmd):
|
||||
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
|
||||
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
|
||||
def _split_flags_string(env, s):
|
||||
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
|
||||
return [str(arg) for arg in args]
|
||||
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
@ -154,8 +148,8 @@ def DumpIntegrationData(*args):
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _subst_cmd(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
|
@ -29,7 +29,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from platformio import exception, fs
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.http import HTTPClientError, InternetIsOffline
|
||||
from platformio.http import HTTPClientError, InternetConnectionError
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
@ -39,7 +39,7 @@ from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem, PackageSpec
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@ -109,7 +109,6 @@ class LibBuilderFactory:
|
||||
|
||||
|
||||
class LibBuilderBase:
|
||||
|
||||
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
|
||||
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
|
||||
# Max depth of nested includes:
|
||||
@ -298,21 +297,22 @@ class LibBuilderBase:
|
||||
with fs.cd(self.path):
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
self.env.SConscriptChdir(True)
|
||||
self.env.SConscript(
|
||||
os.path.abspath(self.extra_script),
|
||||
exports={"env": self.env, "pio_lib_builder": self},
|
||||
)
|
||||
self.env.SConscriptChdir(False)
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
|
||||
def process_dependencies(self):
|
||||
if not self.dependencies or self._deps_are_processed:
|
||||
return
|
||||
self._deps_are_processed = True
|
||||
for item in self.dependencies:
|
||||
for dependency in self.dependencies:
|
||||
found = False
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item["name"] != lb.name:
|
||||
if not lb.is_dependency_compatible(dependency):
|
||||
continue
|
||||
found = True
|
||||
if lb not in self.depbuilders:
|
||||
@ -322,9 +322,28 @@ class LibBuilderBase:
|
||||
if not found and self.verbose:
|
||||
sys.stderr.write(
|
||||
"Warning: Ignored `%s` dependency for `%s` "
|
||||
"library\n" % (item["name"], self.name)
|
||||
"library\n" % (dependency["name"], self.name)
|
||||
)
|
||||
|
||||
def is_dependency_compatible(self, dependency):
|
||||
pkg = PackageItem(self.path)
|
||||
qualifiers = {"name": self.name, "version": self.version}
|
||||
if pkg.metadata:
|
||||
qualifiers = {"name": pkg.metadata.name, "version": pkg.metadata.version}
|
||||
if pkg.metadata.spec and pkg.metadata.spec.owner:
|
||||
qualifiers["owner"] = pkg.metadata.spec.owner
|
||||
dep_qualifiers = {
|
||||
k: v for k, v in dependency.items() if k in ("owner", "name", "version")
|
||||
}
|
||||
if (
|
||||
"version" in dep_qualifiers
|
||||
and not PackageSpec(dep_qualifiers["version"]).requirements
|
||||
):
|
||||
del dep_qualifiers["version"]
|
||||
return PackageCompatibility.from_dependency(dep_qualifiers).is_compatible(
|
||||
PackageCompatibility(**qualifiers)
|
||||
)
|
||||
|
||||
def get_search_files(self):
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
@ -477,6 +496,7 @@ class LibBuilderBase:
|
||||
self.is_built = True
|
||||
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
self.env.ProcessCompileDbToolchainOption()
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
@ -791,7 +811,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
if path not in include_dirs and path not in self.envorigin.get(
|
||||
"CPPPATH", []
|
||||
):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
@ -982,7 +1004,11 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
try:
|
||||
lm.install(spec)
|
||||
did_install = True
|
||||
except (HTTPClientError, UnknownPackageError, InternetIsOffline) as exc:
|
||||
except (
|
||||
HTTPClientError,
|
||||
UnknownPackageError,
|
||||
InternetConnectionError,
|
||||
) as exc:
|
||||
click.secho("Warning! %s" % exc, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
@ -1133,6 +1159,8 @@ def ConfigureProjectLibBuilder(env):
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
# refill found libs after recursive search
|
||||
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
|
||||
for lb in lib_builders:
|
||||
for deplb in lb.depbuilders[:]:
|
||||
if deplb not in found_lbs:
|
||||
@ -1157,7 +1185,7 @@ def ConfigureProjectLibBuilder(env):
|
||||
click.echo("Path: %s" % lb.path, nl=False)
|
||||
click.echo(")", nl=False)
|
||||
click.echo("")
|
||||
if lb.depbuilders:
|
||||
if lb.verbose and lb.depbuilders:
|
||||
_print_deps_tree(lb, level + 1)
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
|
@ -23,10 +23,10 @@ from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8192
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
@ -20,19 +20,23 @@ from platformio.proc import exec_command
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def GetCompilerType(env):
|
||||
if env.subst("$CC").endswith("-gcc"):
|
||||
def GetCompilerType(env): # pylint: disable=too-many-return-statements
|
||||
CC = env.subst("$CC")
|
||||
if CC.endswith("-gcc"):
|
||||
return "gcc"
|
||||
if os.path.basename(CC) == "clang":
|
||||
return "clang"
|
||||
try:
|
||||
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
result = exec_command([CC, "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
output = "".join([result["out"], result["err"]]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
if "clang version" in output:
|
||||
return "clang"
|
||||
if "gcc" in output:
|
||||
return "gcc"
|
||||
|
@ -33,9 +33,7 @@ from platformio.project.config import ProjectOptions
|
||||
@util.memoized()
|
||||
def _PioPlatform():
|
||||
env = DefaultEnvironment()
|
||||
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
|
||||
p.configure_project_packages(env["PIOENV"], COMMAND_LINE_TARGETS)
|
||||
return p
|
||||
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def PioPlatform(_):
|
||||
@ -77,9 +75,11 @@ def LoadPioPlatform(env):
|
||||
continue
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path,
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
|
@ -53,7 +53,7 @@ def _get_symbol_locations(env, elf_path, addrs):
|
||||
locations = [line for line in result["out"].split("\n") if line]
|
||||
assert len(addrs) == len(locations)
|
||||
|
||||
return dict(zip(addrs, [l.strip() for l in locations]))
|
||||
return dict(zip(addrs, [loc.strip() for loc in locations]))
|
||||
|
||||
|
||||
def _get_demangled_names(env, mangled_names):
|
||||
@ -73,31 +73,7 @@ def _get_demangled_names(env, mangled_names):
|
||||
)
|
||||
|
||||
|
||||
def _determine_section(sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not _is_flash_section(info) and not _is_ram_section(info):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _is_ram_section(section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def _is_flash_section(section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def _is_valid_symbol(symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def _collect_sections_info(elffile):
|
||||
def _collect_sections_info(env, elffile):
|
||||
sections = {}
|
||||
for section in elffile.iter_sections():
|
||||
if section.is_null() or section.name.startswith(".debug"):
|
||||
@ -107,13 +83,18 @@ def _collect_sections_info(elffile):
|
||||
section_flags = describe_sh_flags(section["sh_flags"])
|
||||
section_size = section.data_size
|
||||
|
||||
sections[section.name] = {
|
||||
section_data = {
|
||||
"name": section.name,
|
||||
"size": section_size,
|
||||
"start_addr": section["sh_addr"],
|
||||
"type": section_type,
|
||||
"flags": section_flags,
|
||||
}
|
||||
|
||||
sections[section.name] = section_data
|
||||
sections[section.name]["in_flash"] = env.pioSizeIsFlashSection(section_data)
|
||||
sections[section.name]["in_ram"] = env.pioSizeIsRamSection(section_data)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
@ -136,7 +117,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
symbol_size = s["st_size"]
|
||||
symbol_type = symbol_info["type"]
|
||||
|
||||
if not _is_valid_symbol(s.name, symbol_type, symbol_addr):
|
||||
if not env.pioSizeIsValidSymbol(s.name, symbol_type, symbol_addr):
|
||||
continue
|
||||
|
||||
symbol = {
|
||||
@ -145,7 +126,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
"name": s.name,
|
||||
"type": symbol_type,
|
||||
"size": symbol_size,
|
||||
"section": _determine_section(sections, symbol_addr),
|
||||
"section": env.pioSizeDetermineSection(sections, symbol_addr),
|
||||
}
|
||||
|
||||
if s.name.startswith("_Z"):
|
||||
@ -175,12 +156,36 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
return symbols
|
||||
|
||||
|
||||
def _calculate_firmware_size(sections):
|
||||
def pioSizeDetermineSection(_, sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not info.get("in_flash", False) and not info.get("in_ram", False):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def pioSizeIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def pioSizeIsRamSection(_, section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def pioSizeIsFlashSection(_, section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def pioSizeCalculateFirmwareSize(_, sections):
|
||||
flash_size = ram_size = 0
|
||||
for section_info in sections.values():
|
||||
if _is_flash_section(section_info):
|
||||
if section_info.get("in_flash", False):
|
||||
flash_size += section_info.get("size", 0)
|
||||
if _is_ram_section(section_info):
|
||||
if section_info.get("in_ram", False):
|
||||
ram_size += section_info.get("size", 0)
|
||||
|
||||
return ram_size, flash_size
|
||||
@ -210,8 +215,8 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
sys.stderr.write("Elf file doesn't contain DWARF information")
|
||||
env.Exit(1)
|
||||
|
||||
sections = _collect_sections_info(elffile)
|
||||
firmware_ram, firmware_flash = _calculate_firmware_size(sections)
|
||||
sections = _collect_sections_info(env, elffile)
|
||||
firmware_ram, firmware_flash = env.pioSizeCalculateFirmwareSize(sections)
|
||||
data["memory"]["total"] = {
|
||||
"ram_size": firmware_ram,
|
||||
"flash_size": firmware_flash,
|
||||
@ -226,9 +231,11 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
|
||||
symbol_size = symbol.get("size", 0)
|
||||
section = sections.get(symbol.get("section", ""), {})
|
||||
if _is_ram_section(section):
|
||||
if not section:
|
||||
continue
|
||||
if section.get("in_ram", False):
|
||||
files[file_path]["ram_size"] += symbol_size
|
||||
if _is_flash_section(section):
|
||||
if section.get("in_flash", False):
|
||||
files[file_path]["flash_size"] += symbol_size
|
||||
|
||||
files[file_path]["symbols"].append(symbol)
|
||||
@ -250,5 +257,10 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(pioSizeIsRamSection)
|
||||
env.AddMethod(pioSizeIsFlashSection)
|
||||
env.AddMethod(pioSizeCalculateFirmwareSize)
|
||||
env.AddMethod(pioSizeDetermineSection)
|
||||
env.AddMethod(pioSizeIsValidSymbol)
|
||||
env.AddMethod(DumpSizeData)
|
||||
return env
|
||||
|
@ -27,7 +27,11 @@ def VerboseAction(_, act, actstr):
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def PioClean(env, clean_all=False):
|
||||
def IsCleanTarget(env):
|
||||
return env.GetOption("clean")
|
||||
|
||||
|
||||
def CleanProject(env, fullclean=False):
|
||||
def _relpath(path):
|
||||
if compat.IS_WINDOWS:
|
||||
prefix = os.getcwd()[:2].lower()
|
||||
@ -51,13 +55,13 @@ def PioClean(env, clean_all=False):
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
|
||||
if clean_all and os.path.isdir(libdeps_dir):
|
||||
if fullclean and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments
|
||||
def AddTarget( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
env,
|
||||
name,
|
||||
dependencies,
|
||||
@ -103,7 +107,8 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(IsCleanTarget)
|
||||
env.AddMethod(CleanProject)
|
||||
env.AddMethod(AddTarget)
|
||||
env.AddMethod(AddPlatformTarget)
|
||||
env.AddMethod(AddCustomTarget)
|
||||
|
@ -218,12 +218,11 @@ def CheckUploadSize(_, target, source, env):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if data_max_size and data_size > data_max_size:
|
||||
sys.stderr.write(
|
||||
"Warning! The data size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
|
||||
)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
|
@ -19,7 +19,6 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@ -38,18 +37,15 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--pattern", multiple=True)
|
||||
@click.option("--pattern", multiple=True, hidden=True)
|
||||
@click.option("-f", "--src-filters", multiple=True)
|
||||
@click.option("--flags", multiple=True)
|
||||
@click.option(
|
||||
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
|
||||
@ -63,10 +59,11 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
src_filters,
|
||||
pattern,
|
||||
flags,
|
||||
severity,
|
||||
@ -79,7 +76,7 @@ def cli(
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
@ -105,18 +102,41 @@ def cli(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_patterns = [
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
]
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
"check_src_filters",
|
||||
env_options.get("check_patterns", default_src_filters),
|
||||
)
|
||||
)
|
||||
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
patterns=pattern or env_options.get("check_patterns", default_patterns),
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
@ -129,14 +149,16 @@ def cli(
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, project_dir, config, envname, tool_options
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
@ -227,12 +249,12 @@ def collect_component_stats(result):
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = dirname(defect.file) or defect.file
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = dirname(component)
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
@ -265,7 +287,7 @@ def print_defects_stats(results):
|
||||
tabular_data.append(total)
|
||||
|
||||
headers = ["Component"]
|
||||
headers.extend([l.upper() for l in severity_labels])
|
||||
headers.extend([label.upper() for label in severity_labels])
|
||||
headers = [click.style(h, bold=True) for h in headers]
|
||||
click.echo(tabulate(tabular_data, headers=headers, numalign="center"))
|
||||
click.echo()
|
||||
|
@ -16,6 +16,7 @@ import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
# pylint: disable=too-many-instance-attributes, redefined-builtin
|
||||
@ -23,13 +24,12 @@ from platformio.project.helpers import get_project_dir
|
||||
|
||||
|
||||
class DefectItem:
|
||||
|
||||
SEVERITY_HIGH = 1
|
||||
SEVERITY_MEDIUM = 2
|
||||
SEVERITY_LOW = 4
|
||||
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
|
||||
|
||||
def __init__(
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
severity,
|
||||
category,
|
||||
@ -79,7 +79,7 @@ class DefectItem:
|
||||
for key, value in DefectItem.SEVERITY_LABELS.items():
|
||||
if label == value:
|
||||
return key
|
||||
raise Exception("Unknown severity label -> %s" % label)
|
||||
raise PlatformioException("Unknown severity label -> %s" % label)
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
|
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
@ -30,6 +29,7 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
self.config = config
|
||||
self.envname = envname
|
||||
self.options = options
|
||||
self.project_dir = project_dir
|
||||
self.cc_flags = []
|
||||
self.cxx_flags = []
|
||||
self.cpp_includes = []
|
||||
@ -41,7 +41,7 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
self._defects = []
|
||||
self._on_defect_callback = None
|
||||
self._bad_input = False
|
||||
self._load_cpp_data(project_dir)
|
||||
self._load_cpp_data()
|
||||
|
||||
# detect all defects by default
|
||||
if not self.options.get("severity"):
|
||||
@ -56,12 +56,12 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
for s in self.options["severity"]
|
||||
]
|
||||
|
||||
def _load_cpp_data(self, project_dir):
|
||||
data = load_build_metadata(project_dir, self.envname)
|
||||
def _load_cpp_data(self):
|
||||
data = load_build_metadata(self.project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
|
||||
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
|
||||
self.cc_flags = data.get("cc_flags", [])
|
||||
self.cxx_flags = data.get("cxx_flags", [])
|
||||
self.cpp_includes = self._dump_includes(data.get("includes", {}))
|
||||
self.cpp_defines = data.get("defines", [])
|
||||
self.cc_path = data.get("cc_path")
|
||||
@ -99,6 +99,13 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
includes_file,
|
||||
)
|
||||
result = proc.exec_command(cmd, shell=True)
|
||||
|
||||
if result["returncode"] != 0:
|
||||
click.echo("Warning: Failed to extract toolchain defines!")
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
@ -201,7 +208,7 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(patterns):
|
||||
def get_project_target_files(project_dir, src_filters):
|
||||
c_extension = (".c",)
|
||||
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
|
||||
header_extensions = (".h", ".hh", ".hpp", ".hxx")
|
||||
@ -216,13 +223,9 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.abspath(path))
|
||||
|
||||
for pattern in patterns:
|
||||
for item in glob.glob(pattern, recursive=True):
|
||||
if not os.path.isdir(item):
|
||||
_add_file(item)
|
||||
for root, _, files in os.walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_add_file(os.path.join(root, f))
|
||||
src_filters = normalize_src_filters(src_filters)
|
||||
for f in fs.match_src_files(project_dir, src_filters):
|
||||
_add_file(f)
|
||||
|
||||
return result
|
||||
|
||||
@ -243,3 +246,22 @@ class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def normalize_src_filters(src_filters):
|
||||
def _normalize(src_filters):
|
||||
return (
|
||||
src_filters
|
||||
if src_filters.startswith(("+<", "-<"))
|
||||
else "+<%s>" % src_filters
|
||||
)
|
||||
|
||||
if isinstance(src_filters, (list, tuple)):
|
||||
return " ".join([_normalize(f) for f in src_filters])
|
||||
|
||||
return _normalize(src_filters)
|
||||
|
@ -64,7 +64,9 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
):
|
||||
cmd.append("--checks=*")
|
||||
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
|
||||
src_files = []
|
||||
for items in project_files.values():
|
||||
|
@ -96,7 +96,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
self._buffer = ""
|
||||
self._buffer = ""
|
||||
return None
|
||||
|
||||
self._buffer = ""
|
||||
@ -214,7 +214,9 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
|
||||
return
|
||||
|
||||
for files in self.get_project_target_files(self.options["patterns"]).values():
|
||||
for files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).values():
|
||||
for f in files:
|
||||
dump_file = f + ".dump"
|
||||
if os.path.isfile(dump_file):
|
||||
@ -243,7 +245,9 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
src_files_scope = ("c", "c++")
|
||||
if not any(project_files[t] for t in src_files_scope):
|
||||
click.echo("Error: Nothing to check.")
|
||||
|
@ -227,7 +227,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
self.options["patterns"]
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).items():
|
||||
if scope not in ("c", "c++"):
|
||||
continue
|
||||
|
@ -19,7 +19,6 @@ import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -64,6 +63,21 @@ class PlatformioCLI(click.MultiCommand):
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def reveal_cmd_path_args(cls, ctx):
|
||||
result = []
|
||||
group = ctx.command
|
||||
args = cls.leftover_args[::]
|
||||
while args:
|
||||
cmd_name = args.pop(0)
|
||||
next_group = group.get_command(ctx, cmd_name)
|
||||
if next_group:
|
||||
group = next_group
|
||||
result.append(cmd_name)
|
||||
if not hasattr(group, "get_command"):
|
||||
break
|
||||
return result
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
|
@ -42,7 +42,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
for platform, boards in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
|
@ -51,21 +51,19 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=tempfile.mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, too-many-branches
|
||||
ctx,
|
||||
src,
|
||||
lib,
|
||||
@ -109,8 +107,8 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
ctx.invoke(
|
||||
project_init_cmd,
|
||||
project_dir=build_dir,
|
||||
board=board,
|
||||
project_option=project_option,
|
||||
boards=board,
|
||||
project_options=project_option,
|
||||
)
|
||||
|
||||
# process project
|
||||
|
@ -65,9 +65,7 @@ def invoke_command(ctx, cmd, **kwargs):
|
||||
"--storage-dir",
|
||||
multiple=True,
|
||||
default=None,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
|
||||
help="Manage custom library storage",
|
||||
)
|
||||
@click.option(
|
||||
@ -154,7 +152,7 @@ def cli(ctx, **options):
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
def lib_install( # pylint: disable=too-many-arguments,too-many-positional-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
click.secho(
|
||||
@ -212,7 +210,7 @@ def lib_uninstall(ctx, libraries, save, silent):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_update( # pylint: disable=too-many-arguments
|
||||
def lib_update( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
@ -159,7 +159,7 @@ def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branc
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
@ -224,7 +224,7 @@ def platform_uninstall(ctx, platforms):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
def platform_update( # pylint: disable=too-many-locals,too-many-arguments,too-many-positional-arguments
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
@ -13,24 +13,35 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from zipfile import ZipFile
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.dependencies import get_pip_dependencies
|
||||
from platformio.http import fetch_remote_content
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.proc import exec_command, get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
from platformio.proc import get_pythonexe_path
|
||||
|
||||
PYPI_JSON_URL = "https://pypi.org/pypi/platformio/json"
|
||||
DEVELOP_ZIP_URL = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
DEVELOP_INIT_SCRIPT_URL = (
|
||||
"https://raw.githubusercontent.com/platformio/platformio-core"
|
||||
"/develop/platformio/__init__.py"
|
||||
)
|
||||
|
||||
|
||||
@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version")
|
||||
@click.option("--dev", is_flag=True, help="Use development branch")
|
||||
def cli(dev):
|
||||
@click.option("--only-dependencies", is_flag=True)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(dev, only_dependencies, verbose):
|
||||
if only_dependencies:
|
||||
return upgrade_pip_dependencies(verbose)
|
||||
|
||||
update_core_packages()
|
||||
|
||||
if not dev and __version__ == get_latest_version():
|
||||
return click.secho(
|
||||
"You're up-to-date!\nPlatformIO %s is currently the "
|
||||
@ -38,29 +49,35 @@ def cli(dev):
|
||||
fg="green",
|
||||
)
|
||||
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
click.secho("Please wait while upgrading PlatformIO Core ...", fg="yellow")
|
||||
|
||||
python_exe = get_pythonexe_path()
|
||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||
cmds = (
|
||||
["pip", "install", "--upgrade", download_dist_package(to_develop)],
|
||||
["platformio", "--version"],
|
||||
)
|
||||
pkg_spec = DEVELOP_ZIP_URL if to_develop else "platformio"
|
||||
|
||||
cmd = None
|
||||
r = {}
|
||||
try:
|
||||
for cmd in cmds:
|
||||
cmd = [get_pythonexe_path(), "-m"] + cmd
|
||||
r = exec_command(cmd)
|
||||
# PIO Core
|
||||
subprocess.run(
|
||||
[python_exe, "-m", "pip", "install", "--upgrade", pkg_spec],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE if not verbose else None,
|
||||
)
|
||||
|
||||
# try pip with disabled cache
|
||||
if r["returncode"] != 0 and cmd[2] == "pip":
|
||||
cmd.insert(3, "--no-cache-dir")
|
||||
r = exec_command(cmd)
|
||||
# PyPI dependencies
|
||||
subprocess.run(
|
||||
[python_exe, "-m", "platformio", "upgrade", "--only-dependencies"],
|
||||
check=False,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
assert r["returncode"] == 0
|
||||
assert "version" in r["out"]
|
||||
actual_version = r["out"].strip().split("version", 1)[1].strip()
|
||||
# Check version
|
||||
output = subprocess.run(
|
||||
[python_exe, "-m", "platformio", "--version"],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
).stdout.decode()
|
||||
assert "version" in output
|
||||
actual_version = output.split("version", 1)[1].strip()
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s" % actual_version,
|
||||
fg="green",
|
||||
@ -71,52 +88,35 @@ def cli(dev):
|
||||
click.secho(
|
||||
"Warning! Please restart IDE to affect PIO Home changes", fg="yellow"
|
||||
)
|
||||
except Exception as exc:
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(exc)])) from exc
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if any(m in r["err"].lower() for m in permission_errors) and not IS_WINDOWS:
|
||||
click.secho(
|
||||
"""
|
||||
-----------------
|
||||
Permission denied
|
||||
-----------------
|
||||
You need the `sudo` permission to install Python packages. Try
|
||||
|
||||
> sudo pip install -U platformio
|
||||
|
||||
WARNING! Don't use `sudo` for the rest PlatformIO commands.
|
||||
""",
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
raise exception.ReturnErrorCode(1)
|
||||
raise exception.UpgradeError("\n".join([str(cmd), r["out"], r["err"]]))
|
||||
except (AssertionError, subprocess.CalledProcessError) as exc:
|
||||
click.secho(
|
||||
"\nWarning!!! Could not automatically upgrade the PlatformIO Core.",
|
||||
fg="red",
|
||||
)
|
||||
click.secho(
|
||||
"Please upgrade it manually using the following command:\n",
|
||||
fg="red",
|
||||
)
|
||||
click.secho(f'"{python_exe}" -m pip install -U {pkg_spec}\n', fg="cyan")
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def download_dist_package(to_develop):
|
||||
if not to_develop:
|
||||
return "platformio"
|
||||
dl_url = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
cache_dir = get_project_cache_dir()
|
||||
if not os.path.isdir(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||
try:
|
||||
with open(pkg_name, "wb") as fp:
|
||||
r = exec_command(
|
||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
||||
)
|
||||
assert r["returncode"] == 0
|
||||
# check ZIP structure
|
||||
with ZipFile(pkg_name) as zp:
|
||||
assert zp.testzip() is None
|
||||
return pkg_name
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return dl_url
|
||||
def upgrade_pip_dependencies(verbose):
|
||||
subprocess.run(
|
||||
[
|
||||
get_pythonexe_path(),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--upgrade",
|
||||
"pip",
|
||||
*get_pip_dependencies(),
|
||||
],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE if not verbose else None,
|
||||
)
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
@ -133,10 +133,7 @@ def get_latest_version():
|
||||
|
||||
def get_develop_latest_version():
|
||||
version = None
|
||||
content = fetch_remote_content(
|
||||
"https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/platformio/__init__.py"
|
||||
)
|
||||
content = fetch_remote_content(DEVELOP_INIT_SCRIPT_URL)
|
||||
for line in content.split("\n"):
|
||||
line = line.strip()
|
||||
if not line.startswith("VERSION"):
|
||||
@ -153,5 +150,5 @@ def get_develop_latest_version():
|
||||
|
||||
|
||||
def get_pypi_latest_version():
|
||||
content = fetch_remote_content("https://pypi.org/pypi/platformio/json")
|
||||
content = fetch_remote_content(PYPI_JSON_URL)
|
||||
return json.loads(content)["info"]["version"]
|
||||
|
@ -17,6 +17,8 @@
|
||||
import importlib.util
|
||||
import inspect
|
||||
import locale
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
@ -29,7 +31,25 @@ else:
|
||||
from asyncio import get_event_loop as aio_get_running_loop
|
||||
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from shlex import join as shlex_join
|
||||
else:
|
||||
|
||||
def shlex_join(split_command):
|
||||
return " ".join(shlex.quote(arg) for arg in split_command)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from asyncio import to_thread as aio_to_thread
|
||||
else:
|
||||
try:
|
||||
from starlette.concurrency import run_in_threadpool as aio_to_thread
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2 # DO NOT REMOVE IT. ESP8266/ESP32 depend on it
|
||||
PY36 = sys.version_info[0:2] == (3, 6)
|
||||
IS_CYGWIN = sys.platform.startswith("cygwin")
|
||||
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
|
||||
IS_MACOS = sys.platform.startswith("darwin")
|
||||
@ -85,10 +105,7 @@ def get_filesystem_encoding():
|
||||
|
||||
|
||||
def get_locale_encoding():
|
||||
try:
|
||||
return locale.getdefaultlocale()[1]
|
||||
except ValueError:
|
||||
return None
|
||||
return locale.getpreferredencoding()
|
||||
|
||||
|
||||
def get_object_members(obj, ignore_private=True):
|
||||
@ -120,3 +137,12 @@ def path_to_unicode(path):
|
||||
and custom device monitor filters
|
||||
"""
|
||||
return path
|
||||
|
||||
|
||||
def is_proxy_set(socks=False):
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
|
||||
value = os.getenv(var, os.getenv(var.lower()))
|
||||
if not value or (socks and not value.startswith("socks5://")):
|
||||
continue
|
||||
return True
|
||||
return False
|
||||
|
@ -28,9 +28,9 @@ from platformio.debug import helpers
|
||||
from platformio.debug.config.factory import DebugConfigFactory
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.debug.process.gdb import GDBClientProcess
|
||||
from platformio.exception import ReturnErrorCode
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
@ -44,24 +44,20 @@ from platformio.project.options import ProjectOptions
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.argument("client_extra_args", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@ -69,10 +65,13 @@ def cli(
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
client_extra_args,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
if not interface and client_extra_args:
|
||||
raise click.UsageError("Please specify debugging interface")
|
||||
|
||||
# use env variables from Eclipse or CLion
|
||||
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
@ -81,61 +80,57 @@ def cli(
|
||||
project_dir = os.getenv(name)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
return _debug_in_project_dir(
|
||||
project_config = ProjectConfig.get_instance(project_conf)
|
||||
project_config.validate(envs=[environment] if environment else None)
|
||||
env_name = environment or helpers.get_default_debug_env(project_config)
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, os.getcwd(), project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
configure_args = (
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
environment,
|
||||
project_config,
|
||||
env_name,
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
client_extra_args,
|
||||
)
|
||||
if helpers.is_gdbmi_mode():
|
||||
os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true"
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
debug_config = _configure(*configure_args)
|
||||
stream.close()
|
||||
else:
|
||||
debug_config = _configure(*configure_args)
|
||||
|
||||
_run(os.getcwd(), debug_config, client_extra_args)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _debug_in_project_dir(
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
environment,
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
):
|
||||
project_config = ProjectConfig.get_instance(project_conf)
|
||||
project_config.validate(envs=[environment] if environment else None)
|
||||
env_name = environment or helpers.get_default_debug_env(project_config)
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
env_options = project_config.items(env=env_name, as_dict=True)
|
||||
if "platform" not in env_options:
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
|
||||
def _configure(
|
||||
ctx, project_config, env_name, load_mode, verbose, client_extra_args
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
platform = PlatformFactory.from_env(env_name, autoinstall=True)
|
||||
debug_config = DebugConfigFactory.new(
|
||||
PlatformFactory.new(env_options["platform"], autoinstall=True),
|
||||
platform,
|
||||
project_config,
|
||||
env_name,
|
||||
)
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
return subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
if "--version" in client_extra_args:
|
||||
raise ReturnErrorCode(
|
||||
subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
).returncode
|
||||
)
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as exc:
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream("~", str(exc) + "\n")
|
||||
if helpers.is_gdbmi_mode()
|
||||
else str(exc) + "\n",
|
||||
nl=False,
|
||||
)
|
||||
click.echo(str(exc))
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_config.load_cmds == ["preload"]
|
||||
@ -157,25 +152,10 @@ def _debug_in_project_dir(
|
||||
debug_config.load_cmds = []
|
||||
|
||||
if rebuild_prog:
|
||||
if helpers.is_gdbmi_mode():
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream(
|
||||
"~", "Preparing firmware for debugging...\n"
|
||||
),
|
||||
nl=False,
|
||||
)
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
)
|
||||
stream.close()
|
||||
else:
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
)
|
||||
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(
|
||||
ctx, os.getcwd(), project_config, env_name, preload, verbose
|
||||
)
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(debug_config.program_path)
|
||||
@ -183,11 +163,15 @@ def _debug_in_project_dir(
|
||||
if not os.path.isfile(debug_config.program_path):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
return debug_config
|
||||
|
||||
|
||||
def _run(project_dir, debug_config, client_extra_args):
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
client = GDBClientProcess(project_dir, debug_config)
|
||||
coro = client.run(__unprocessed)
|
||||
coro = client.run(client_extra_args)
|
||||
try:
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
loop.run_until_complete(coro)
|
||||
@ -199,5 +183,3 @@ def _debug_in_project_dir(
|
||||
finally:
|
||||
client.close()
|
||||
loop.close()
|
||||
|
||||
return True
|
||||
|
@ -24,7 +24,9 @@ from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, platform, project_config, env_name, port=None):
|
||||
DEFAULT_PORT = None
|
||||
|
||||
def __init__(self, platform, project_config, env_name):
|
||||
self.platform = platform
|
||||
self.project_config = project_config
|
||||
self.env_name = env_name
|
||||
@ -48,7 +50,6 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
self._load_cmds = None
|
||||
self._port = None
|
||||
|
||||
self.port = port
|
||||
self.server = self._configure_server()
|
||||
|
||||
try:
|
||||
@ -120,8 +121,10 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
@property
|
||||
def port(self):
|
||||
return (
|
||||
self.env_options.get("debug_port", self.tool_settings.get("port"))
|
||||
or self._port
|
||||
self._port
|
||||
or self.env_options.get("debug_port")
|
||||
or self.tool_settings.get("port")
|
||||
or self.DEFAULT_PORT
|
||||
)
|
||||
|
||||
@port.setter
|
||||
@ -145,10 +148,12 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
|
||||
if data:
|
||||
return data
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
data = load_build_metadata(
|
||||
os.getcwd(), self.env_name, cache=True, build_type="debug"
|
||||
)
|
||||
if not data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
return data
|
||||
|
||||
def _configure_server(self):
|
||||
# user disabled server in platformio.ini
|
||||
@ -191,9 +196,11 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=result.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
(
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
)
|
||||
for a in result.get("arguments", [])
|
||||
],
|
||||
)
|
||||
|
@ -18,7 +18,6 @@ from platformio.device.finder import SerialPortFinder, is_pattern_port
|
||||
|
||||
|
||||
class BlackmagicDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
set language c
|
||||
|
@ -27,17 +27,13 @@ class DebugConfigFactory:
|
||||
|
||||
@classmethod
|
||||
def new(cls, platform, project_config, env_name):
|
||||
board_config = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
)
|
||||
tool_name = (
|
||||
board_config.get_debug_tool_name(
|
||||
project_config.get("env:" + env_name, "debug_tool")
|
||||
)
|
||||
if board_config
|
||||
else None
|
||||
)
|
||||
board_id = project_config.get("env:" + env_name, "board")
|
||||
config_cls = None
|
||||
tool_name = None
|
||||
if board_id:
|
||||
tool_name = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
).get_debug_tool_name(project_config.get("env:" + env_name, "debug_tool"))
|
||||
try:
|
||||
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
|
||||
config_cls = getattr(mod, cls.get_clsname(tool_name))
|
||||
|
@ -16,7 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class GenericDebugConfig(DebugConfigBase):
|
||||
|
||||
DEFAULT_PORT = ":3333"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
@ -32,8 +32,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,7 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class JlinkDebugConfig(DebugConfigBase):
|
||||
|
||||
DEFAULT_PORT = ":2331"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
@ -37,11 +37,6 @@ $LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2331"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super().server_ready_pattern or ("Waiting for GDB connection")
|
||||
|
@ -16,7 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class MspdebugDebugConfig(DebugConfigBase):
|
||||
|
||||
DEFAULT_PORT = ":2000"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
@ -30,8 +30,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2000"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -17,7 +17,6 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class NativeDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
@ -16,7 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class QemuDebugConfig(DebugConfigBase):
|
||||
|
||||
DEFAULT_PORT = ":1234"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
@ -31,8 +31,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":1234"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,7 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class RenodeDebugConfig(DebugConfigBase):
|
||||
|
||||
DEFAULT_PORT = ":3333"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
@ -34,11 +34,6 @@ $INIT_BREAK
|
||||
monitor start
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super().server_ready_pattern or (
|
||||
|
@ -20,7 +20,6 @@ class DebugError(PlatformioException):
|
||||
|
||||
|
||||
class DebugSupportError(DebugError, UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"Currently, PlatformIO does not support debugging for `{0}`.\n"
|
||||
"Please request support at https://github.com/platformio/"
|
||||
@ -31,3 +30,7 @@ class DebugSupportError(DebugError, UserSideException):
|
||||
|
||||
class DebugInvalidOptionsError(DebugError, UserSideException):
|
||||
pass
|
||||
|
||||
|
||||
class DebugInitError(DebugError, UserSideException):
|
||||
pass
|
||||
|
@ -31,7 +31,6 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
|
||||
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
||||
|
||||
STDOUT = sys.stdout
|
||||
|
||||
def write(self, text):
|
||||
@ -77,7 +76,7 @@ def get_default_debug_env(config):
|
||||
|
||||
def predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
): # pylint: disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
debug_testname = project_config.get("env:" + env_name, "debug_test")
|
||||
if debug_testname:
|
||||
test_names = list_test_names(project_config)
|
||||
@ -91,7 +90,7 @@ def predebug_project(
|
||||
TestSuite(env_name, debug_testname),
|
||||
project_config,
|
||||
TestRunnerOptions(
|
||||
verbose=verbose,
|
||||
verbose=3 if verbose else 0,
|
||||
without_building=False,
|
||||
without_debugging=False,
|
||||
without_uploading=not preload,
|
||||
|
@ -53,7 +53,6 @@ class DebugSubprocessProtocol(asyncio.SubprocessProtocol):
|
||||
|
||||
|
||||
class DebugBaseProcess:
|
||||
|
||||
STDOUT_CHUNK_SIZE = 2048
|
||||
LOG_FILE = None
|
||||
|
||||
|
@ -13,18 +13,17 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import time
|
||||
|
||||
from platformio import telemetry
|
||||
from platformio.compat import aio_get_running_loop, is_bytes
|
||||
from platformio.debug import helpers
|
||||
from platformio.debug.exception import DebugInitError
|
||||
from platformio.debug.process.client import DebugClientProcess
|
||||
|
||||
|
||||
class GDBClientProcess(DebugClientProcess):
|
||||
|
||||
PIO_SRC_NAME = ".pioinit"
|
||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||
|
||||
@ -131,11 +130,7 @@ class GDBClientProcess(DebugClientProcess):
|
||||
self._handle_error(data)
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
telemetry.send_event(
|
||||
"Debug",
|
||||
"Started",
|
||||
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||
)
|
||||
telemetry.log_debug_started(self.debug_config)
|
||||
self._auto_exec_continue()
|
||||
|
||||
def console_log(self, msg):
|
||||
@ -180,14 +175,7 @@ class GDBClientProcess(DebugClientProcess):
|
||||
and b"Error in sourced" in self._errors_buffer
|
||||
):
|
||||
return
|
||||
|
||||
last_erros = self._errors_buffer.decode()
|
||||
last_erros = " ".join(reversed(last_erros.split("\n")))
|
||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||
|
||||
err = "%s -> %s" % (
|
||||
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||
last_erros,
|
||||
telemetry.log_debug_exception(
|
||||
DebugInitError(self._errors_buffer.decode()), self.debug_config
|
||||
)
|
||||
telemetry.send_exception("DebugInitError: %s" % err)
|
||||
self.transport.close()
|
||||
|
@ -26,7 +26,6 @@ from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class DebugServerProcess(DebugBaseProcess):
|
||||
|
||||
STD_BUFFER_SIZE = 1024
|
||||
|
||||
def __init__(self, debug_config):
|
||||
@ -63,7 +62,9 @@ class DebugServerProcess(DebugBaseProcess):
|
||||
|
||||
openocd_pipe_allowed = all(
|
||||
[
|
||||
not self.debug_config.env_options.get("debug_port"),
|
||||
not self.debug_config.env_options.get(
|
||||
"debug_port", self.debug_config.tool_settings.get("port")
|
||||
),
|
||||
"gdb" in self.debug_config.client_executable_path,
|
||||
"openocd" in server_executable,
|
||||
]
|
||||
|
69
platformio/dependencies.py
Normal file
69
platformio/dependencies.py
Normal file
@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.compat import is_proxy_set
|
||||
|
||||
|
||||
def get_core_dependencies():
|
||||
return {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40801.0",
|
||||
"tool-cppcheck": "~1.21100.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
|
||||
def get_pip_dependencies():
|
||||
core = [
|
||||
"bottle == 0.13.*",
|
||||
"click >=8.0.4, <8.1.8",
|
||||
"colorama",
|
||||
"marshmallow == 3.*",
|
||||
"pyelftools >=0.27, <1",
|
||||
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
|
||||
"requests%s == 2.*" % ("[socks]" if is_proxy_set(socks=True) else ""),
|
||||
"semantic_version == 2.10.*",
|
||||
"tabulate == 0.*",
|
||||
]
|
||||
|
||||
home = [
|
||||
# PIO Home requirements
|
||||
"ajsonrpc == 1.2.*",
|
||||
"starlette >=0.19, <0.47",
|
||||
"uvicorn >=0.16, <0.35",
|
||||
"wsproto == 1.*",
|
||||
]
|
||||
|
||||
extra = []
|
||||
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||
extra.append(
|
||||
'chardet >= 3.0.2,<6; platform_system == "Darwin" and "arm" in platform_machine'
|
||||
)
|
||||
|
||||
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
|
||||
try:
|
||||
import ssl # pylint: disable=import-outside-toplevel
|
||||
|
||||
if ssl.OPENSSL_VERSION.startswith("OpenSSL ") and ssl.OPENSSL_VERSION_INFO < (
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
):
|
||||
extra.append("urllib3<2")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return core + home + extra
|
@ -89,7 +89,7 @@ def is_serial_port_ready(port, timeout=1):
|
||||
|
||||
|
||||
class SerialPortFinder:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
@ -163,7 +163,7 @@ class SerialPortFinder:
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if item.vid == device.vid and item.pid == device.pid:
|
||||
candidates.append(item)
|
||||
if len(candidates) == 1:
|
||||
if len(candidates) <= 1:
|
||||
return device.device
|
||||
for item in candidates:
|
||||
if ("GDB" if self.prefer_gdb_port else "UART") in item.description:
|
||||
|
@ -18,8 +18,6 @@ import re
|
||||
import time
|
||||
from glob import glob
|
||||
|
||||
import zeroconf
|
||||
|
||||
from platformio import __version__, exception, proc
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
|
||||
@ -84,6 +82,16 @@ def list_logical_devices():
|
||||
|
||||
|
||||
def list_mdns_services():
|
||||
try:
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
except ImportError:
|
||||
result = proc.exec_command(
|
||||
[proc.get_pythonexe_path(), "-m", "pip", "install", "zeroconf"]
|
||||
)
|
||||
if result.get("returncode") != 0:
|
||||
print(result.get("err"))
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
|
||||
class mDNSListener:
|
||||
def __init__(self):
|
||||
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
|
||||
@ -136,9 +144,9 @@ def list_mdns_services():
|
||||
if service.properties:
|
||||
try:
|
||||
properties = {
|
||||
k.decode("utf8"): v.decode("utf8")
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
k.decode("utf8"): (
|
||||
v.decode("utf8") if isinstance(v, bytes) else v
|
||||
)
|
||||
for k, v in service.properties.items()
|
||||
}
|
||||
json.dumps(properties)
|
||||
|
@ -58,7 +58,7 @@ from platformio.project.options import ProjectOptions
|
||||
"--encoding",
|
||||
help=(
|
||||
"Set the encoding for the serial port "
|
||||
"(e.g. hexlify, Latin1, UTF-8) [default=%s]"
|
||||
"(e.g. hexlify, Latin-1, UTF-8) [default=%s]"
|
||||
% ProjectOptions["env.monitor_encoding"].default
|
||||
),
|
||||
)
|
||||
@ -104,7 +104,7 @@ from platformio.project.options import ProjectOptions
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
@ -125,31 +125,33 @@ def device_monitor_cmd(**options):
|
||||
options = apply_project_monitor_options(options, project_options)
|
||||
register_filters(platform=platform, options=options)
|
||||
options["port"] = SerialPortFinder(
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
board_config=(
|
||||
platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None
|
||||
),
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
).find(initial_port=options["port"])
|
||||
|
||||
if options["menu_char"] == options["exit_char"]:
|
||||
raise exception.UserSideException(
|
||||
"--exit-char can not be the same as --menu-char"
|
||||
)
|
||||
|
||||
# check for unknown filters
|
||||
if options["filters"]:
|
||||
known_filters = set(get_available_filters())
|
||||
unknown_filters = set(options["filters"]) - known_filters
|
||||
if unknown_filters:
|
||||
options["filters"] = list(known_filters & set(options["filters"]))
|
||||
click.secho(
|
||||
("Warning! Skipping unknown filters `%s`. Known filters are `%s`")
|
||||
% (", ".join(unknown_filters), ", ".join(sorted(known_filters))),
|
||||
fg="yellow",
|
||||
if options["menu_char"] == options["exit_char"]:
|
||||
raise exception.UserSideException(
|
||||
"--exit-char can not be the same as --menu-char"
|
||||
)
|
||||
|
||||
start_terminal(options)
|
||||
# check for unknown filters
|
||||
if options["filters"]:
|
||||
known_filters = set(get_available_filters())
|
||||
unknown_filters = set(options["filters"]) - known_filters
|
||||
if unknown_filters:
|
||||
options["filters"] = list(known_filters & set(options["filters"]))
|
||||
click.secho(
|
||||
("Warning! Skipping unknown filters `%s`. Known filters are `%s`")
|
||||
% (", ".join(unknown_filters), ", ".join(sorted(known_filters))),
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
start_terminal(options)
|
||||
|
||||
|
||||
def get_project_options(environment=None):
|
||||
|
@ -25,11 +25,12 @@ from platformio.project.config import ProjectConfig
|
||||
class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def __init__(self, options=None):
|
||||
"""Called by PlatformIO to pass context"""
|
||||
miniterm.Transform.__init__(self)
|
||||
super().__init__()
|
||||
|
||||
self.options = options or {}
|
||||
self.project_dir = self.options.get("project_dir")
|
||||
self.environment = self.options.get("environment")
|
||||
self._running_terminal = None
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
if not self.environment:
|
||||
@ -47,6 +48,12 @@ class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def NAME(self):
|
||||
raise NotImplementedError("Please declare NAME attribute for the filter class")
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
self._running_terminal = terminal
|
||||
|
||||
def get_running_terminal(self):
|
||||
return self._running_terminal
|
||||
|
||||
|
||||
def register_filters(platform=None, options=None):
|
||||
# project filters
|
||||
|
@ -24,12 +24,18 @@ class Hexlify(DeviceMonitorFilterBase):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._counter = 0
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
# force to Latin-1, issue #4732
|
||||
if terminal.input_encoding == "UTF-8":
|
||||
terminal.set_rx_encoding("Latin-1")
|
||||
super().set_running_terminal(terminal)
|
||||
|
||||
def rx(self, text):
|
||||
result = ""
|
||||
for b in serial.iterbytes(text):
|
||||
for c in serial.iterbytes(text):
|
||||
if (self._counter % 16) == 0:
|
||||
result += "\n{:04X} | ".format(self._counter)
|
||||
asciicode = ord(b)
|
||||
asciicode = ord(c)
|
||||
if asciicode <= 255:
|
||||
result += "{:02X} ".format(asciicode)
|
||||
else:
|
||||
|
@ -13,7 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import os.path
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from platformio.device.monitor.filters.base import DeviceMonitorFilterBase
|
||||
@ -27,8 +27,10 @@ class LogToFile(DeviceMonitorFilterBase):
|
||||
self._log_fp = None
|
||||
|
||||
def __call__(self):
|
||||
log_file_name = "platformio-device-monitor-%s.log" % datetime.now().strftime(
|
||||
"%y%m%d-%H%M%S"
|
||||
if not os.path.isdir("logs"):
|
||||
os.makedirs("logs")
|
||||
log_file_name = os.path.join(
|
||||
"logs", "device-monitor-%s.log" % datetime.now().strftime("%y%m%d-%H%M%S")
|
||||
)
|
||||
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
|
||||
# pylint: disable=consider-using-with
|
||||
|
@ -110,6 +110,12 @@ def new_terminal(options):
|
||||
term.raw = options["raw"]
|
||||
term.set_rx_encoding(options["encoding"])
|
||||
term.set_tx_encoding(options["encoding"])
|
||||
for ts in (term.tx_transformations, term.rx_transformations):
|
||||
for t in ts:
|
||||
try:
|
||||
t.set_running_terminal(term)
|
||||
except AttributeError:
|
||||
pass
|
||||
return term
|
||||
|
||||
|
||||
@ -144,9 +150,8 @@ def new_serial_instance(options): # pylint: disable=too-many-branches
|
||||
except KeyboardInterrupt as exc:
|
||||
click.echo("", err=True)
|
||||
raise UserSideException("User aborted and port is not given") from exc
|
||||
else:
|
||||
if not port:
|
||||
raise UserSideException("Port is not given")
|
||||
if not port:
|
||||
raise UserSideException("Port is not given")
|
||||
try:
|
||||
serial_instance = serial.serial_for_url(
|
||||
port,
|
||||
|
@ -14,7 +14,6 @@
|
||||
|
||||
|
||||
class PlatformioException(Exception):
|
||||
|
||||
MESSAGE = None
|
||||
|
||||
def __str__(self): # pragma: no cover
|
||||
@ -26,7 +25,6 @@ class PlatformioException(Exception):
|
||||
|
||||
|
||||
class ReturnErrorCode(PlatformioException):
|
||||
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
@ -35,7 +33,6 @@ class UserSideException(PlatformioException):
|
||||
|
||||
|
||||
class AbortedByUser(UserSideException):
|
||||
|
||||
MESSAGE = "Aborted by user"
|
||||
|
||||
|
||||
@ -49,7 +46,6 @@ class InvalidUdevRules(UserSideException):
|
||||
|
||||
|
||||
class MissedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Please install `99-platformio-udev.rules`. \nMore details: "
|
||||
"https://docs.platformio.org/en/latest/core/installation/udev-rules.html"
|
||||
@ -57,7 +53,6 @@ class MissedUdevRules(InvalidUdevRules):
|
||||
|
||||
|
||||
class OutdatedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Your `{0}` are outdated. Please update or reinstall them."
|
||||
"\nMore details: "
|
||||
@ -71,32 +66,26 @@ class OutdatedUdevRules(InvalidUdevRules):
|
||||
|
||||
|
||||
class GetSerialPortsError(PlatformioException):
|
||||
|
||||
MESSAGE = "No implementation for your platform ('{0}') available"
|
||||
|
||||
|
||||
class GetLatestVersionError(PlatformioException):
|
||||
|
||||
MESSAGE = "Can not retrieve the latest PlatformIO version"
|
||||
|
||||
|
||||
class InvalidSettingName(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid setting with the name '{0}'"
|
||||
|
||||
|
||||
class InvalidSettingValue(UserSideException):
|
||||
|
||||
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
|
||||
|
||||
|
||||
class InvalidJSONFile(PlatformioException):
|
||||
|
||||
class InvalidJSONFile(ValueError, UserSideException):
|
||||
MESSAGE = "Could not load broken JSON: {0}"
|
||||
|
||||
|
||||
class CIBuildEnvsEmpty(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"Can't find PlatformIO build environments.\n"
|
||||
"Please specify `--board` or path to `platformio.ini` with "
|
||||
@ -104,18 +93,7 @@ class CIBuildEnvsEmpty(UserSideException):
|
||||
)
|
||||
|
||||
|
||||
class UpgradeError(PlatformioException):
|
||||
|
||||
MESSAGE = """{0}
|
||||
|
||||
* Upgrade using `pip install -U platformio`
|
||||
* Try different installation/upgrading steps:
|
||||
https://docs.platformio.org/page/installation.html
|
||||
"""
|
||||
|
||||
|
||||
class HomeDirPermissionsError(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"The directory `{0}` or its parent directory is not owned by the "
|
||||
"current user and PlatformIO can not store configuration data.\n"
|
||||
@ -126,7 +104,6 @@ class HomeDirPermissionsError(UserSideException):
|
||||
|
||||
|
||||
class CygwinEnvDetected(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"PlatformIO does not work within Cygwin environment. "
|
||||
"Use native Terminal instead."
|
||||
|
@ -24,7 +24,7 @@ import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, proc
|
||||
from platformio import exception
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
@ -50,6 +50,10 @@ def get_source_dir():
|
||||
return os.path.dirname(curpath)
|
||||
|
||||
|
||||
def get_assets_dir():
|
||||
return os.path.join(get_source_dir(), "assets")
|
||||
|
||||
|
||||
def load_json(file_path):
|
||||
try:
|
||||
with open(file_path, mode="r", encoding="utf8") as f:
|
||||
@ -99,7 +103,7 @@ def calculate_folder_size(path):
|
||||
|
||||
def get_platformio_udev_rules_path():
|
||||
return os.path.abspath(
|
||||
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
|
||||
os.path.join(get_assets_dir(), "system", "99-platformio-udev.rules")
|
||||
)
|
||||
|
||||
|
||||
@ -177,7 +181,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
||||
result = set()
|
||||
# correct fs directory separator
|
||||
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
||||
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||
for action, pattern in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||
candidates = _find_candidates(pattern)
|
||||
if action == "+":
|
||||
result |= candidates
|
||||
@ -189,26 +193,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
||||
def to_unix_path(path):
|
||||
if not IS_WINDOWS or not path:
|
||||
return path
|
||||
return re.sub(r"[\\]+", "/", path)
|
||||
|
||||
|
||||
def normalize_path(path):
|
||||
path = os.path.abspath(path)
|
||||
if not IS_WINDOWS or not path.startswith("\\\\"):
|
||||
return path
|
||||
try:
|
||||
result = proc.exec_command(["net", "use"])
|
||||
if result["returncode"] != 0:
|
||||
return path
|
||||
share_re = re.compile(r"\s([A-Z]\:)\s+(\\\\[^\s]+)")
|
||||
for line in result["out"].split("\n"):
|
||||
share = share_re.search(line)
|
||||
if not share:
|
||||
continue
|
||||
path = path.replace(share.group(2), share.group(1))
|
||||
except OSError:
|
||||
pass
|
||||
return path
|
||||
return path.replace("\\", "/")
|
||||
|
||||
|
||||
def expanduser(path):
|
||||
@ -225,7 +210,7 @@ def change_filemtime(path, mtime):
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
def _onerror(func, path, __):
|
||||
def _onexc(func, path, _):
|
||||
try:
|
||||
st_mode = os.stat(path).st_mode
|
||||
if st_mode & stat.S_IREAD:
|
||||
@ -238,4 +223,7 @@ def rmtree(path):
|
||||
err=True,
|
||||
)
|
||||
|
||||
return shutil.rmtree(path, onerror=_onerror)
|
||||
# pylint: disable=unexpected-keyword-arg, deprecated-argument
|
||||
if sys.version_info < (3, 12):
|
||||
return shutil.rmtree(path, onerror=_onexc)
|
||||
return shutil.rmtree(path, onexc=_onexc)
|
||||
|
@ -13,11 +13,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import mimetypes
|
||||
import socket
|
||||
|
||||
import click
|
||||
|
||||
from platformio.home.helpers import is_port_used
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.home.run import run_server
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
|
||||
|
||||
@click.command("home", short_help="GUI to manage PlatformIO")
|
||||
@ -48,15 +50,17 @@ from platformio.home.run import run_server
|
||||
),
|
||||
)
|
||||
def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
# download all dependent packages
|
||||
get_core_package_dir("contrib-piohome")
|
||||
return
|
||||
|
||||
# Ensure PIO Home mimetypes are known
|
||||
mimetypes.add_type("text/html", ".html")
|
||||
mimetypes.add_type("text/css", ".css")
|
||||
mimetypes.add_type("application/javascript", ".js")
|
||||
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
return
|
||||
|
||||
home_url = "http://%s:%d%s" % (
|
||||
host,
|
||||
port,
|
||||
@ -92,3 +96,23 @@ def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
home_url=home_url,
|
||||
)
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if IS_WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -1,60 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import socket
|
||||
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.http import HTTPSession
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class AsyncSession(HTTPSession):
|
||||
async def request( # pylint: disable=signature-differs,invalid-overridden-method
|
||||
self, *args, **kwargs
|
||||
):
|
||||
func = super().request
|
||||
return await run_in_threadpool(func, *args, **kwargs)
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def requests_session():
|
||||
return AsyncSession()
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def get_core_fullpath():
|
||||
return where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if IS_WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
@ -15,9 +15,10 @@
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
|
||||
|
||||
class AccountRPC:
|
||||
class AccountRPC(BaseRPCHandler):
|
||||
@staticmethod
|
||||
def call_client(method, *args, **kwargs):
|
||||
try:
|
||||
@ -25,5 +26,5 @@ class AccountRPC:
|
||||
return getattr(client, method)(*args, **kwargs)
|
||||
except Exception as exc: # pylint: disable=bare-except
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4003, message="PIO Account Call Error", data=str(exc)
|
||||
code=5000, message="PIO Account Call Error", data=str(exc)
|
||||
) from exc
|
||||
|
@ -12,16 +12,14 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from platformio import __version__, app, fs, util
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
|
||||
|
||||
class AppRPC:
|
||||
|
||||
class AppRPC(BaseRPCHandler):
|
||||
IGNORE_STORAGE_KEYS = [
|
||||
"cid",
|
||||
"coreVersion",
|
||||
@ -32,16 +30,11 @@ class AppRPC:
|
||||
"projectsDir",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_state_path():
|
||||
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
|
||||
if not os.path.isdir(core_dir):
|
||||
os.makedirs(core_dir)
|
||||
return os.path.join(core_dir, "homestate.json")
|
||||
|
||||
@staticmethod
|
||||
def load_state():
|
||||
with app.State(AppRPC.get_state_path(), lock=True) as state:
|
||||
with app.State(
|
||||
app.resolve_state_path("core_dir", "homestate.json"), lock=True
|
||||
) as state:
|
||||
storage = state.get("storage", {})
|
||||
|
||||
# base data
|
||||
@ -81,7 +74,9 @@ class AppRPC:
|
||||
|
||||
@staticmethod
|
||||
def save_state(state):
|
||||
with app.State(AppRPC.get_state_path(), lock=True) as s:
|
||||
with app.State(
|
||||
app.resolve_state_path("core_dir", "homestate.json"), lock=True
|
||||
) as s:
|
||||
s.clear()
|
||||
s.update(state)
|
||||
storage = s.get("storage", {})
|
||||
|
17
platformio/home/rpc/handlers/base.py
Normal file
17
platformio/home/rpc/handlers/base.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class BaseRPCHandler:
|
||||
factory = None
|
@ -18,10 +18,10 @@ from pathlib import Path
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio.compat import aio_get_running_loop
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
|
||||
|
||||
class IDERPC:
|
||||
|
||||
class IDERPC(BaseRPCHandler):
|
||||
COMMAND_TIMEOUT = 1.5 # in seconds
|
||||
|
||||
def __init__(self):
|
||||
@ -51,11 +51,12 @@ class IDERPC:
|
||||
|
||||
def on_command_result(self, cmd_id, value):
|
||||
if cmd_id not in self._cmd_queue:
|
||||
return
|
||||
return False
|
||||
if self._cmd_queue[cmd_id]["method"] == "get_pio_project_dirs":
|
||||
value = [str(Path(p).resolve()) for p in value]
|
||||
self._cmd_queue[cmd_id]["future"].set_result(value)
|
||||
del self._cmd_queue[cmd_id]
|
||||
return True
|
||||
|
||||
def _process_commands(self):
|
||||
for cmd_id in list(self._cmd_queue):
|
||||
|
@ -17,10 +17,11 @@ import time
|
||||
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.compat import aio_create_task
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.home.rpc.handlers.os import OSRPC
|
||||
|
||||
|
||||
class MiscRPC:
|
||||
class MiscRPC(BaseRPCHandler):
|
||||
async def load_latest_tweets(self, data_url):
|
||||
cache_key = ContentCache.key_from_args(data_url, "tweets")
|
||||
cache_valid = "180d"
|
||||
|
@ -22,14 +22,25 @@ import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.device.list.util import list_logical_devices
|
||||
from platformio.home import helpers
|
||||
from platformio.http import ensure_internet_on
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.http import HTTPSession, ensure_internet_on
|
||||
|
||||
|
||||
class OSRPC:
|
||||
@staticmethod
|
||||
async def fetch_content(uri, data=None, headers=None, cache_valid=None):
|
||||
class HTTPAsyncSession(HTTPSession):
|
||||
async def request( # pylint: disable=signature-differs,invalid-overridden-method
|
||||
self, *args, **kwargs
|
||||
):
|
||||
func = super().request
|
||||
return await aio_to_thread(func, *args, **kwargs)
|
||||
|
||||
|
||||
class OSRPC(BaseRPCHandler):
|
||||
_http_session = None
|
||||
|
||||
@classmethod
|
||||
async def fetch_content(cls, url, data=None, headers=None, cache_valid=None):
|
||||
if not headers:
|
||||
headers = {
|
||||
"User-Agent": (
|
||||
@ -38,7 +49,7 @@ class OSRPC:
|
||||
"Safari/603.3.8"
|
||||
)
|
||||
}
|
||||
cache_key = ContentCache.key_from_args(uri, data) if cache_valid else None
|
||||
cache_key = ContentCache.key_from_args(url, data) if cache_valid else None
|
||||
with ContentCache() as cc:
|
||||
if cache_key:
|
||||
result = cc.get(cache_key)
|
||||
@ -48,11 +59,13 @@ class OSRPC:
|
||||
# check internet before and resolve issue with 60 seconds timeout
|
||||
ensure_internet_on(raise_exception=True)
|
||||
|
||||
session = helpers.requests_session()
|
||||
if not cls._http_session:
|
||||
cls._http_session = HTTPAsyncSession()
|
||||
|
||||
if data:
|
||||
r = await session.post(uri, data=data, headers=headers)
|
||||
r = await cls._http_session.post(url, data=data, headers=headers)
|
||||
else:
|
||||
r = await session.get(uri, headers=headers)
|
||||
r = await cls._http_session.get(url, headers=headers)
|
||||
|
||||
r.raise_for_status()
|
||||
result = r.text
|
||||
@ -64,9 +77,9 @@ class OSRPC:
|
||||
async def request_content(self, uri, data=None, headers=None, cache_valid=None):
|
||||
if uri.startswith("http"):
|
||||
return await self.fetch_content(uri, data, headers, cache_valid)
|
||||
if os.path.isfile(uri):
|
||||
with io.open(uri, encoding="utf-8") as fp:
|
||||
return fp.read()
|
||||
local_path = uri[7:] if uri.startswith("file://") else uri
|
||||
with io.open(local_path, encoding="utf-8") as fp:
|
||||
return fp.read()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@ -81,6 +94,14 @@ class OSRPC:
|
||||
def open_file(path):
|
||||
return click.launch(path)
|
||||
|
||||
@staticmethod
|
||||
def call_path_module_func(name, args, **kwargs):
|
||||
return getattr(os.path, name)(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_path_separator():
|
||||
return os.sep
|
||||
|
||||
@staticmethod
|
||||
def is_file(path):
|
||||
return os.path.isfile(path)
|
||||
@ -148,9 +169,4 @@ class OSRPC:
|
||||
|
||||
@staticmethod
|
||||
def get_logical_devices():
|
||||
items = []
|
||||
for item in list_logical_devices():
|
||||
if item["name"]:
|
||||
item["name"] = item["name"]
|
||||
items.append(item)
|
||||
return items
|
||||
return list_logical_devices()
|
||||
|
@ -12,6 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
@ -20,11 +22,47 @@ import threading
|
||||
|
||||
import click
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import __main__, __version__, fs, proc
|
||||
from platformio.compat import get_locale_encoding, is_bytes
|
||||
from platformio.home import helpers
|
||||
from platformio import __main__, __version__, app, fs, proc, util
|
||||
from platformio.compat import (
|
||||
IS_WINDOWS,
|
||||
aio_create_task,
|
||||
aio_get_running_loop,
|
||||
aio_to_thread,
|
||||
get_locale_encoding,
|
||||
is_bytes,
|
||||
)
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
|
||||
|
||||
class PIOCoreProtocol(asyncio.SubprocessProtocol):
|
||||
def __init__(self, exit_future, on_data_callback=None):
|
||||
self.exit_future = exit_future
|
||||
self.on_data_callback = on_data_callback
|
||||
self.stdout = ""
|
||||
self.stderr = ""
|
||||
self._is_exited = False
|
||||
self._encoding = get_locale_encoding()
|
||||
|
||||
def pipe_data_received(self, fd, data):
|
||||
data = data.decode(self._encoding, "replace")
|
||||
pipe = ["stdin", "stdout", "stderr"][fd]
|
||||
if pipe == "stdout":
|
||||
self.stdout += data
|
||||
if pipe == "stderr":
|
||||
self.stderr += data
|
||||
if self.on_data_callback:
|
||||
self.on_data_callback(pipe=pipe, data=data)
|
||||
|
||||
def connection_lost(self, exc):
|
||||
self.process_exited()
|
||||
|
||||
def process_exited(self):
|
||||
if self._is_exited:
|
||||
return
|
||||
self.exit_future.set_result(True)
|
||||
self._is_exited = True
|
||||
|
||||
|
||||
class MultiThreadingStdStream:
|
||||
@ -58,11 +96,51 @@ class MultiThreadingStdStream:
|
||||
return result
|
||||
|
||||
|
||||
class PIOCoreRPC:
|
||||
@util.memoized(expire="60s")
|
||||
def get_core_fullpath():
|
||||
return proc.where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
|
||||
|
||||
|
||||
class PIOCoreRPC(BaseRPCHandler):
|
||||
@staticmethod
|
||||
def version():
|
||||
return __version__
|
||||
|
||||
async def exec(self, args, options=None):
|
||||
loop = aio_get_running_loop()
|
||||
exit_future = loop.create_future()
|
||||
data_callback = functools.partial(
|
||||
self._on_exec_data_received, exec_options=options
|
||||
)
|
||||
if args[0] != "--caller" and app.get_session_var("caller_id"):
|
||||
args = ["--caller", app.get_session_var("caller_id")] + args
|
||||
transport, protocol = await loop.subprocess_exec(
|
||||
lambda: PIOCoreProtocol(exit_future, data_callback),
|
||||
get_core_fullpath(),
|
||||
*args,
|
||||
stdin=None,
|
||||
**options.get("spawn", {}),
|
||||
)
|
||||
await exit_future
|
||||
transport.close()
|
||||
return {
|
||||
"stdout": protocol.stdout,
|
||||
"stderr": protocol.stderr,
|
||||
"returncode": transport.get_returncode(),
|
||||
}
|
||||
|
||||
def _on_exec_data_received(self, exec_options, pipe, data):
|
||||
notification_method = exec_options.get(f"{pipe}NotificationMethod")
|
||||
if not notification_method:
|
||||
return
|
||||
aio_create_task(
|
||||
self.factory.notify_clients(
|
||||
method=notification_method,
|
||||
params=[data],
|
||||
actor="frontend",
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def setup_multithreading_std_streams():
|
||||
if isinstance(sys.stdout, MultiThreadingStdStream):
|
||||
@ -94,14 +172,14 @@ class PIOCoreRPC:
|
||||
return PIOCoreRPC._process_result(result, to_json)
|
||||
except Exception as exc: # pylint: disable=bare-except
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4003, message="PIO Core Call Error", data=str(exc)
|
||||
code=5000, message="PIO Core Call Error", data=str(exc)
|
||||
) from exc
|
||||
|
||||
@staticmethod
|
||||
async def _call_subprocess(args, options):
|
||||
result = await run_in_threadpool(
|
||||
result = await aio_to_thread(
|
||||
proc.exec_command,
|
||||
[helpers.get_core_fullpath()] + args,
|
||||
[get_core_fullpath()] + args,
|
||||
cwd=options.get("cwd") or os.getcwd(),
|
||||
)
|
||||
return (result["out"], result["err"], result["returncode"])
|
||||
@ -119,7 +197,7 @@ class PIOCoreRPC:
|
||||
exit_code,
|
||||
)
|
||||
|
||||
return await run_in_threadpool(
|
||||
return await aio_to_thread(
|
||||
_thread_safe_call, args=args, cwd=options.get("cwd") or os.getcwd()
|
||||
)
|
||||
|
||||
@ -132,7 +210,7 @@ class PIOCoreRPC:
|
||||
err = err.decode(get_locale_encoding())
|
||||
text = ("%s\n\n%s" % (out, err)).strip()
|
||||
if code != 0:
|
||||
raise Exception(text)
|
||||
raise PlatformioException(text)
|
||||
if not to_json:
|
||||
return text
|
||||
try:
|
||||
|
140
platformio/home/rpc/handlers/platform.py
Normal file
140
platformio/home/rpc/handlers/platform.py
Normal file
@ -0,0 +1,140 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os.path
|
||||
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
|
||||
|
||||
class PlatformRPC(BaseRPCHandler):
|
||||
async def fetch_platforms(self, search_query=None, page=0, force_installed=False):
|
||||
if force_installed:
|
||||
return {
|
||||
"items": await aio_to_thread(
|
||||
self._load_installed_platforms, search_query
|
||||
)
|
||||
}
|
||||
|
||||
search_result = await self.factory.manager.dispatcher["registry.call_client"](
|
||||
method="list_packages",
|
||||
query=search_query,
|
||||
qualifiers={
|
||||
"types": ["platform"],
|
||||
},
|
||||
page=page,
|
||||
)
|
||||
return {
|
||||
"page": search_result["page"],
|
||||
"limit": search_result["limit"],
|
||||
"total": search_result["total"],
|
||||
"items": [
|
||||
{
|
||||
"id": item["id"],
|
||||
"ownername": item["owner"]["username"],
|
||||
"name": item["name"],
|
||||
"version": item["version"]["name"],
|
||||
"description": item["description"],
|
||||
"tier": item["tier"],
|
||||
}
|
||||
for item in search_result["items"]
|
||||
],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _load_installed_platforms(search_query=None):
|
||||
search_query = (search_query or "").strip()
|
||||
|
||||
def _matchSearchQuery(p):
|
||||
content_blocks = [p.name, p.title, p.description]
|
||||
if p.frameworks:
|
||||
content_blocks.append(" ".join(p.frameworks.keys()))
|
||||
for board in p.get_boards().values():
|
||||
board_data = board.get_brief_data()
|
||||
for key in ("id", "mcu", "vendor"):
|
||||
content_blocks.append(board_data.get(key))
|
||||
return search_query in " ".join(content_blocks)
|
||||
|
||||
items = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
p = PlatformFactory.new(pkg)
|
||||
if search_query and not _matchSearchQuery(p):
|
||||
continue
|
||||
items.append(
|
||||
{
|
||||
"__pkg_path": pkg.path,
|
||||
"ownername": pkg.metadata.spec.owner if pkg.metadata.spec else None,
|
||||
"name": p.name,
|
||||
"version": str(pkg.metadata.version),
|
||||
"title": p.title,
|
||||
"description": p.description,
|
||||
}
|
||||
)
|
||||
return items
|
||||
|
||||
async def fetch_boards(self, platform_spec):
|
||||
spec = PackageSpec(platform_spec)
|
||||
if spec.owner:
|
||||
return await self.factory.manager.dispatcher["registry.call_client"](
|
||||
method="get_package",
|
||||
typex="platform",
|
||||
owner=spec.owner,
|
||||
name=spec.name,
|
||||
extra_path="/boards",
|
||||
)
|
||||
return await aio_to_thread(self._load_installed_boards, spec)
|
||||
|
||||
@staticmethod
|
||||
def _load_installed_boards(platform_spec):
|
||||
p = PlatformFactory.new(platform_spec)
|
||||
return sorted(
|
||||
[b.get_brief_data() for b in p.get_boards().values()],
|
||||
key=lambda item: item["name"],
|
||||
)
|
||||
|
||||
async def fetch_examples(self, platform_spec):
|
||||
spec = PackageSpec(platform_spec)
|
||||
if spec.owner:
|
||||
return await self.factory.manager.dispatcher["registry.call_client"](
|
||||
method="get_package",
|
||||
typex="platform",
|
||||
owner=spec.owner,
|
||||
name=spec.name,
|
||||
extra_path="/examples",
|
||||
)
|
||||
return await aio_to_thread(self._load_installed_examples, spec)
|
||||
|
||||
@staticmethod
|
||||
def _load_installed_examples(platform_spec):
|
||||
platform = PlatformFactory.new(platform_spec)
|
||||
platform_dir = platform.get_dir()
|
||||
parser = ManifestParserFactory.new_from_dir(platform_dir)
|
||||
result = parser.as_dict().get("examples") or []
|
||||
for example in result:
|
||||
example["files"] = [
|
||||
{
|
||||
"path": item,
|
||||
"url": (
|
||||
"file://%s"
|
||||
+ os.path.join(platform_dir, "examples", example["name"], item)
|
||||
),
|
||||
}
|
||||
for item in example["files"]
|
||||
]
|
||||
return result
|
@ -15,13 +15,17 @@
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import semantic_version
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio import app, exception, fs
|
||||
from platformio.home.rpc.handlers.app import AppRPC
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectError
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
@ -29,14 +33,18 @@ from platformio.project.integration.generator import ProjectGenerator
|
||||
from platformio.project.options import get_config_options_schema
|
||||
|
||||
|
||||
class ProjectRPC:
|
||||
class ProjectRPC(BaseRPCHandler):
|
||||
@staticmethod
|
||||
def config_call(init_kwargs, method, *args):
|
||||
assert isinstance(init_kwargs, dict)
|
||||
assert "path" in init_kwargs
|
||||
project_dir = get_project_dir()
|
||||
if os.path.isfile(init_kwargs["path"]):
|
||||
if os.path.isdir(init_kwargs["path"]):
|
||||
project_dir = init_kwargs["path"]
|
||||
init_kwargs["path"] = os.path.join(init_kwargs["path"], "platformio.ini")
|
||||
elif os.path.isfile(init_kwargs["path"]):
|
||||
project_dir = os.path.dirname(init_kwargs["path"])
|
||||
else:
|
||||
project_dir = get_project_dir()
|
||||
with fs.cd(project_dir):
|
||||
return getattr(ProjectConfig(**init_kwargs), method)(*args)
|
||||
|
||||
@ -184,83 +192,17 @@ class ProjectRPC:
|
||||
|
||||
async def init(self, board, framework, project_dir):
|
||||
assert project_dir
|
||||
state = AppRPC.load_state()
|
||||
if not os.path.isdir(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
args = ["init", "--board", board]
|
||||
args = ["init", "--board", board, "--sample-code"]
|
||||
if framework:
|
||||
args.extend(["--project-option", "framework = %s" % framework])
|
||||
if (
|
||||
state["storage"]["coreCaller"]
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
ide = app.get_session_var("caller_id")
|
||||
if ide in ProjectGenerator.get_supported_ides():
|
||||
args.extend(["--ide", ide])
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
return self._generate_project_main(project_dir, board, framework)
|
||||
|
||||
@staticmethod
|
||||
def _generate_project_main(project_dir, board, framework):
|
||||
main_content = None
|
||||
if framework == "arduino":
|
||||
main_content = "\n".join(
|
||||
[
|
||||
"#include <Arduino.h>",
|
||||
"",
|
||||
"void setup() {",
|
||||
" // put your setup code here, to run once:",
|
||||
"}",
|
||||
"",
|
||||
"void loop() {",
|
||||
" // put your main code here, to run repeatedly:",
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
)
|
||||
elif framework == "mbed":
|
||||
main_content = "\n".join(
|
||||
[
|
||||
"#include <mbed.h>",
|
||||
"",
|
||||
"int main() {",
|
||||
"",
|
||||
" // put your setup code here, to run once:",
|
||||
"",
|
||||
" while(1) {",
|
||||
" // put your main code here, to run repeatedly:",
|
||||
" }",
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
)
|
||||
if not main_content:
|
||||
return project_dir
|
||||
|
||||
is_cpp_project = True
|
||||
pm = PlatformPackageManager()
|
||||
try:
|
||||
board = pm.board_config(board)
|
||||
platforms = board.get("platforms", board.get("platform"))
|
||||
if not isinstance(platforms, list):
|
||||
platforms = [platforms]
|
||||
c_based_platforms = ["intel_mcs51", "ststm8"]
|
||||
is_cpp_project = not set(platforms) & set(c_based_platforms)
|
||||
except exception.PlatformioException:
|
||||
pass
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig()
|
||||
src_dir = config.get("platformio", "src_dir")
|
||||
main_path = os.path.join(
|
||||
src_dir, "main.%s" % ("cpp" if is_cpp_project else "c")
|
||||
)
|
||||
if os.path.isfile(main_path):
|
||||
return project_dir
|
||||
if not os.path.isdir(src_dir):
|
||||
os.makedirs(src_dir)
|
||||
with open(main_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(main_content.strip())
|
||||
return project_dir
|
||||
|
||||
@staticmethod
|
||||
@ -296,11 +238,9 @@ class ProjectRPC:
|
||||
args.extend(
|
||||
["--project-option", "lib_extra_dirs = ~/Documents/Arduino/libraries"]
|
||||
)
|
||||
if (
|
||||
state["storage"]["coreCaller"]
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
ide = app.get_session_var("caller_id")
|
||||
if ide in ProjectGenerator.get_supported_ides():
|
||||
args.extend(["--ide", ide])
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
@ -324,14 +264,127 @@ class ProjectRPC:
|
||||
)
|
||||
shutil.copytree(project_dir, new_project_dir, symlinks=True)
|
||||
|
||||
state = AppRPC.load_state()
|
||||
args = ["init"]
|
||||
if (
|
||||
state["storage"]["coreCaller"]
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
ide = app.get_session_var("caller_id")
|
||||
if ide in ProjectGenerator.get_supported_ides():
|
||||
args.extend(["--ide", ide])
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": new_project_dir, "force_subprocess": True}
|
||||
)
|
||||
return new_project_dir
|
||||
|
||||
async def init_v2(self, configuration, options=None):
|
||||
project_dir = os.path.join(configuration["location"], configuration["name"])
|
||||
if not os.path.isdir(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
|
||||
envclone = os.environ.copy()
|
||||
envclone["PLATFORMIO_FORCE_ANSI"] = "true"
|
||||
options = options or {}
|
||||
options["spawn"] = {"env": envclone, "cwd": project_dir}
|
||||
|
||||
args = ["project", "init"]
|
||||
ide = app.get_session_var("caller_id")
|
||||
if ide in ProjectGenerator.get_supported_ides():
|
||||
args.extend(["--ide", ide])
|
||||
|
||||
if configuration.get("example"):
|
||||
await self.factory.notify_clients(
|
||||
method=options.get("stdoutNotificationMethod"),
|
||||
params=["Copying example files...\n"],
|
||||
actor="frontend",
|
||||
)
|
||||
await self._pre_init_example(configuration, project_dir)
|
||||
else:
|
||||
args.extend(self._pre_init_empty(configuration))
|
||||
|
||||
return await self.factory.manager.dispatcher["core.exec"](args, options=options)
|
||||
|
||||
@staticmethod
|
||||
def _pre_init_empty(configuration):
|
||||
project_options = []
|
||||
platform = configuration["platform"]
|
||||
board_id = configuration.get("board", {}).get("id")
|
||||
env_name = board_id or platform["name"]
|
||||
if configuration.get("description"):
|
||||
project_options.append(("description", configuration.get("description")))
|
||||
try:
|
||||
v = semantic_version.Version(platform.get("version"))
|
||||
assert not v.prerelease
|
||||
project_options.append(
|
||||
("platform", "{name} @ ^{version}".format(**platform))
|
||||
)
|
||||
except (AssertionError, ValueError):
|
||||
project_options.append(
|
||||
("platform", "{name} @ {version}".format(**platform))
|
||||
)
|
||||
if board_id:
|
||||
project_options.append(("board", board_id))
|
||||
if configuration.get("framework"):
|
||||
project_options.append(("framework", configuration["framework"]["name"]))
|
||||
|
||||
args = ["-e", env_name, "--sample-code"]
|
||||
for name, value in project_options:
|
||||
args.extend(["-O", f"{name}={value}"])
|
||||
return args
|
||||
|
||||
async def _pre_init_example(self, configuration, project_dir):
|
||||
for item in configuration["example"]["files"]:
|
||||
p = Path(project_dir).joinpath(item["path"])
|
||||
if not p.parent.is_dir():
|
||||
p.parent.mkdir(parents=True)
|
||||
p.write_text(
|
||||
await self.factory.manager.dispatcher["os.request_content"](
|
||||
item["url"]
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def configuration(project_dir, env):
|
||||
assert is_platformio_project(project_dir)
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig(os.path.join(project_dir, "platformio.ini"))
|
||||
platform = PlatformFactory.from_env(env, autoinstall=True)
|
||||
platform_pkg = PlatformPackageManager().get_package(platform.get_dir())
|
||||
board_id = config.get(f"env:{env}", "board", None)
|
||||
|
||||
# frameworks
|
||||
frameworks = []
|
||||
for name in config.get(f"env:{env}", "framework", []):
|
||||
if name not in platform.frameworks:
|
||||
continue
|
||||
f_pkg_name = platform.frameworks[name].get("package")
|
||||
if not f_pkg_name:
|
||||
continue
|
||||
f_pkg = platform.get_package(f_pkg_name)
|
||||
if not f_pkg:
|
||||
continue
|
||||
f_manifest = platform.pm.load_manifest(f_pkg)
|
||||
frameworks.append(
|
||||
dict(
|
||||
name=name,
|
||||
title=f_manifest.get("title"),
|
||||
version=str(f_pkg.metadata.version),
|
||||
)
|
||||
)
|
||||
|
||||
return dict(
|
||||
platform=dict(
|
||||
ownername=(
|
||||
platform_pkg.metadata.spec.owner
|
||||
if platform_pkg.metadata.spec
|
||||
else None
|
||||
),
|
||||
name=platform.name,
|
||||
title=platform.title,
|
||||
version=str(platform_pkg.metadata.version),
|
||||
),
|
||||
board=(
|
||||
platform.board_config(board_id).get_brief_data()
|
||||
if board_id
|
||||
else None
|
||||
),
|
||||
frameworks=frameworks or None,
|
||||
)
|
||||
|
31
platformio/home/rpc/handlers/registry.py
Normal file
31
platformio/home/rpc/handlers/registry.py
Normal file
@ -0,0 +1,31 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.registry.client import RegistryClient
|
||||
|
||||
|
||||
class RegistryRPC(BaseRPCHandler):
|
||||
@staticmethod
|
||||
async def call_client(method, *args, **kwargs):
|
||||
try:
|
||||
client = RegistryClient()
|
||||
return await aio_to_thread(getattr(client, method), *args, **kwargs)
|
||||
except Exception as exc: # pylint: disable=bare-except
|
||||
raise JSONRPC20DispatchException(
|
||||
code=5000, message="Registry Call Error", data=str(exc)
|
||||
) from exc
|
@ -12,17 +12,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
import ajsonrpc.utils
|
||||
import click
|
||||
from ajsonrpc.core import JSONRPC20Error, JSONRPC20Request
|
||||
from ajsonrpc.dispatcher import Dispatcher
|
||||
from ajsonrpc.manager import AsyncJSONRPCResponseManager
|
||||
from ajsonrpc.manager import AsyncJSONRPCResponseManager, JSONRPC20Response
|
||||
from starlette.endpoints import WebSocketEndpoint
|
||||
|
||||
from platformio.compat import aio_create_task, aio_get_running_loop
|
||||
from platformio.http import InternetConnectionError
|
||||
from platformio.proc import force_exit
|
||||
|
||||
# Remove this line when PR is merged
|
||||
# https://github.com/pavlov99/ajsonrpc/pull/22
|
||||
ajsonrpc.utils.is_invalid_params = lambda: False
|
||||
|
||||
|
||||
class JSONRPCServerFactoryBase:
|
||||
|
||||
connection_nums = 0
|
||||
shutdown_timer = None
|
||||
|
||||
@ -31,20 +39,25 @@ class JSONRPCServerFactoryBase:
|
||||
self.manager = AsyncJSONRPCResponseManager(
|
||||
Dispatcher(), is_server_error_verbose=True
|
||||
)
|
||||
self._clients = {}
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def add_object_handler(self, handler, namespace):
|
||||
handler.factory = self
|
||||
self.manager.dispatcher.add_object(handler, prefix="%s." % namespace)
|
||||
|
||||
def on_client_connect(self):
|
||||
def on_client_connect(self, connection, actor=None):
|
||||
self._clients[connection] = {"actor": actor}
|
||||
self.connection_nums += 1
|
||||
if self.shutdown_timer:
|
||||
self.shutdown_timer.cancel()
|
||||
self.shutdown_timer = None
|
||||
|
||||
def on_client_disconnect(self):
|
||||
def on_client_disconnect(self, connection):
|
||||
if connection in self._clients:
|
||||
del self._clients[connection]
|
||||
self.connection_nums -= 1
|
||||
if self.connection_nums < 1:
|
||||
self.connection_nums = 0
|
||||
@ -67,6 +80,14 @@ class JSONRPCServerFactoryBase:
|
||||
self.shutdown_timeout, _auto_shutdown_server
|
||||
)
|
||||
|
||||
async def notify_clients(self, method, params=None, actor=None):
|
||||
for client, options in self._clients.items():
|
||||
if actor and options["actor"] != actor:
|
||||
continue
|
||||
request = JSONRPC20Request(method, params, is_notification=True)
|
||||
await client.send_text(self.manager.serialize(request.body))
|
||||
return True
|
||||
|
||||
|
||||
class WebSocketJSONRPCServerFactory(JSONRPCServerFactoryBase):
|
||||
def __call__(self, *args, **kwargs):
|
||||
@ -81,17 +102,30 @@ class WebSocketJSONRPCServer(WebSocketEndpoint):
|
||||
|
||||
async def on_connect(self, websocket):
|
||||
await websocket.accept()
|
||||
self.factory.on_client_connect() # pylint: disable=no-member
|
||||
qs = parse_qs(self.scope.get("query_string", b""))
|
||||
actors = qs.get(b"actor")
|
||||
self.factory.on_client_connect( # pylint: disable=no-member
|
||||
websocket, actor=actors[0].decode() if actors else None
|
||||
)
|
||||
|
||||
async def on_receive(self, websocket, data):
|
||||
aio_create_task(self._handle_rpc(websocket, data))
|
||||
|
||||
async def on_disconnect(self, websocket, close_code):
|
||||
self.factory.on_client_disconnect() # pylint: disable=no-member
|
||||
self.factory.on_client_disconnect(websocket) # pylint: disable=no-member
|
||||
|
||||
async def _handle_rpc(self, websocket, data):
|
||||
# pylint: disable=no-member
|
||||
response = await self.factory.manager.get_response_for_payload(data)
|
||||
if response.error and response.error.data:
|
||||
click.secho("Error: %s" % response.error.data, fg="red", err=True)
|
||||
if InternetConnectionError.MESSAGE in response.error.data:
|
||||
response = JSONRPC20Response(
|
||||
id=response.id,
|
||||
error=JSONRPC20Error(
|
||||
code=4008,
|
||||
message="No Internet Connection",
|
||||
data=response.error.data,
|
||||
),
|
||||
)
|
||||
await websocket.send_text(self.factory.manager.serialize(response.body))
|
||||
|
@ -32,7 +32,9 @@ from platformio.home.rpc.handlers.ide import IDERPC
|
||||
from platformio.home.rpc.handlers.misc import MiscRPC
|
||||
from platformio.home.rpc.handlers.os import OSRPC
|
||||
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.home.rpc.handlers.platform import PlatformRPC
|
||||
from platformio.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.home.rpc.handlers.registry import RegistryRPC
|
||||
from platformio.home.rpc.server import WebSocketJSONRPCServerFactory
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import force_exit
|
||||
@ -43,7 +45,7 @@ class ShutdownMiddleware:
|
||||
self.app = app
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
if scope["type"] == "http" and b"__shutdown__" in scope.get("query_string", {}):
|
||||
if scope["type"] == "http" and b"__shutdown__" in scope.get("query_string", ""):
|
||||
await shutdown_server()
|
||||
await self.app(scope, receive, send)
|
||||
|
||||
@ -72,6 +74,8 @@ def run_server(host, port, no_open, shutdown_timeout, home_url):
|
||||
ws_rpc_factory.add_object_handler(OSRPC(), namespace="os")
|
||||
ws_rpc_factory.add_object_handler(PIOCoreRPC(), namespace="core")
|
||||
ws_rpc_factory.add_object_handler(ProjectRPC(), namespace="project")
|
||||
ws_rpc_factory.add_object_handler(PlatformRPC(), namespace="platform")
|
||||
ws_rpc_factory.add_object_handler(RegistryRPC(), namespace="registry")
|
||||
|
||||
path = urlparse(home_url).path
|
||||
routes = [
|
||||
|
@ -13,21 +13,21 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests.adapters
|
||||
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from platformio import __check_internet_hosts__, app, util
|
||||
from platformio.cache import ContentCache, cleanup_content_cache
|
||||
from platformio.compat import is_proxy_set
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
|
||||
class HTTPClientError(PlatformioException):
|
||||
class HTTPClientError(UserSideException):
|
||||
def __init__(self, message, response=None):
|
||||
super().__init__()
|
||||
self.message = message
|
||||
@ -37,8 +37,7 @@ class HTTPClientError(PlatformioException):
|
||||
return self.message
|
||||
|
||||
|
||||
class InternetIsOffline(UserSideException):
|
||||
|
||||
class InternetConnectionError(UserSideException):
|
||||
MESSAGE = (
|
||||
"You are not connected to the Internet.\n"
|
||||
"PlatformIO needs the Internet connection to"
|
||||
@ -51,7 +50,10 @@ class HTTPSession(requests.Session):
|
||||
self._x_base_url = kwargs.pop("x_base_url") if "x_base_url" in kwargs else None
|
||||
super().__init__(*args, **kwargs)
|
||||
self.headers.update({"User-Agent": app.get_user_agent()})
|
||||
self.verify = app.get_setting("enable_proxy_strict_ssl")
|
||||
try:
|
||||
self.verify = app.get_setting("enable_proxy_strict_ssl")
|
||||
except PlatformioException:
|
||||
self.verify = True
|
||||
|
||||
def request( # pylint: disable=signature-differs,arguments-differ
|
||||
self, method, url, *args, **kwargs
|
||||
@ -61,9 +63,11 @@ class HTTPSession(requests.Session):
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
return super().request(
|
||||
method,
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url),
|
||||
(
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url)
|
||||
),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
@ -155,7 +159,10 @@ class HTTPClient:
|
||||
with ContentCache("http") as cc:
|
||||
result = cc.get(cache_key)
|
||||
if result is not None:
|
||||
return json.loads(result)
|
||||
try:
|
||||
return json.loads(result)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
response = self.send_request(method, path, **kwargs)
|
||||
data = self._parse_json_response(response)
|
||||
cc.set(cache_key, response.text, cache_valid)
|
||||
@ -183,12 +190,11 @@ class HTTPClient:
|
||||
@util.memoized(expire="10s")
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
use_proxy = is_proxy_set()
|
||||
socket.setdefaulttimeout(timeout)
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
|
||||
if not os.getenv(var) and not os.getenv(var.lower()):
|
||||
continue
|
||||
if use_proxy:
|
||||
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
return True
|
||||
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
|
||||
@ -198,13 +204,22 @@ def _internet_on():
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# falling back to HTTPs, issue #4980
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
requests.get("https://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
except requests.exceptions.RequestException:
|
||||
pass
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def ensure_internet_on(raise_exception=False):
|
||||
result = _internet_on()
|
||||
if raise_exception and not result:
|
||||
raise InternetIsOffline()
|
||||
raise InternetConnectionError()
|
||||
return result
|
||||
|
||||
|
||||
|
@ -22,27 +22,23 @@ import semantic_version
|
||||
from platformio import __version__, app, exception, fs, telemetry
|
||||
from platformio.cache import cleanup_content_cache
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.http import HTTPClientError, InternetIsOffline, ensure_internet_on
|
||||
from platformio.http import HTTPClientError, InternetConnectionError, ensure_internet_on
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.system.prune import calculate_unnecessary_system_data
|
||||
|
||||
|
||||
def on_platformio_start(ctx, caller):
|
||||
def on_cmd_start(ctx, caller):
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
telemetry.on_cmd_start(ctx)
|
||||
if PlatformioCLI.in_silence():
|
||||
return
|
||||
after_upgrade(ctx)
|
||||
|
||||
|
||||
def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
def on_cmd_end():
|
||||
if PlatformioCLI.in_silence():
|
||||
return
|
||||
|
||||
@ -51,7 +47,7 @@ def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
check_prune_system()
|
||||
except (
|
||||
HTTPClientError,
|
||||
InternetIsOffline,
|
||||
InternetConnectionError,
|
||||
exception.GetLatestVersionError,
|
||||
):
|
||||
click.secho(
|
||||
@ -61,31 +57,33 @@ def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
)
|
||||
|
||||
|
||||
def on_platformio_exception(e):
|
||||
telemetry.on_exception(e)
|
||||
def on_platformio_exception(exc):
|
||||
telemetry.log_exception(exc)
|
||||
|
||||
|
||||
def on_platformio_exit():
|
||||
telemetry.on_exit()
|
||||
|
||||
|
||||
def set_caller(caller=None):
|
||||
caller = caller or os.getenv("PLATFORMIO_CALLER")
|
||||
if not caller:
|
||||
if os.getenv("CODESPACES"):
|
||||
caller = "codespaces"
|
||||
elif os.getenv("VSCODE_PID") or os.getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif os.getenv("GITPOD_WORKSPACE_ID") or os.getenv("GITPOD_WORKSPACE_URL"):
|
||||
caller = "gitpod"
|
||||
if caller:
|
||||
return app.set_session_var("caller_id", caller)
|
||||
if os.getenv("CODESPACES"):
|
||||
caller = "codespaces"
|
||||
elif os.getenv("VSCODE_PID") or os.getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif os.getenv("GITPOD_WORKSPACE_ID") or os.getenv("GITPOD_WORKSPACE_URL"):
|
||||
caller = "gitpod"
|
||||
return app.set_session_var("caller_id", caller)
|
||||
app.set_session_var("caller_id", caller)
|
||||
|
||||
|
||||
class Upgrader:
|
||||
def __init__(self, from_version, to_version):
|
||||
self.from_version = pepver_to_semver(from_version)
|
||||
self.to_version = pepver_to_semver(to_version)
|
||||
|
||||
self.from_version = from_version
|
||||
self.to_version = to_version
|
||||
self._upgraders = [
|
||||
(semantic_version.Version("3.5.0-a.2"), self._update_dev_platforms),
|
||||
(semantic_version.Version("4.4.0-a.8"), self._update_pkg_metadata),
|
||||
(semantic_version.Version("6.1.8-a.1"), self._appstate_migration),
|
||||
]
|
||||
|
||||
def run(self, ctx):
|
||||
@ -101,42 +99,43 @@ class Upgrader:
|
||||
return all(result)
|
||||
|
||||
@staticmethod
|
||||
def _update_dev_platforms(ctx):
|
||||
ctx.invoke(cmd_platform_update)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _update_pkg_metadata(_):
|
||||
pm = ToolPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
if not pkg.metadata or pkg.metadata.spec.external or pkg.metadata.spec.id:
|
||||
continue
|
||||
result = pm.search_registry_packages(PackageSpec(name=pkg.metadata.name))
|
||||
if len(result) != 1:
|
||||
continue
|
||||
result = result[0]
|
||||
pkg.metadata.spec = PackageSpec(
|
||||
id=result["id"],
|
||||
owner=result["owner"]["username"],
|
||||
name=result["name"],
|
||||
def _appstate_migration(_):
|
||||
state_path = app.resolve_state_path("core_dir", "appstate.json")
|
||||
if not os.path.isfile(state_path):
|
||||
return True
|
||||
app.delete_state_item("telemetry")
|
||||
created_at = app.get_state_item("created_at", None)
|
||||
if not created_at:
|
||||
state_stat = os.stat(state_path)
|
||||
app.set_state_item(
|
||||
"created_at",
|
||||
int(
|
||||
state_stat.st_birthtime
|
||||
if hasattr(state_stat, "st_birthtime")
|
||||
else state_stat.st_ctime
|
||||
),
|
||||
)
|
||||
pkg.dump_meta()
|
||||
return True
|
||||
|
||||
|
||||
def after_upgrade(ctx):
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
last_version = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version == __version__:
|
||||
return
|
||||
last_version_str = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version_str == __version__:
|
||||
return None
|
||||
|
||||
if last_version == "0.0.0":
|
||||
if last_version_str == "0.0.0":
|
||||
app.set_state_item("last_version", __version__)
|
||||
elif pepver_to_semver(last_version) > pepver_to_semver(__version__):
|
||||
return print_welcome_banner()
|
||||
|
||||
last_version = pepver_to_semver(last_version_str)
|
||||
current_version = pepver_to_semver(__version__)
|
||||
|
||||
if last_version > current_version and not last_version.prerelease:
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
click.secho(
|
||||
"Obsolete PIO Core v%s is used (previous was %s)"
|
||||
% (__version__, last_version),
|
||||
% (__version__, last_version_str),
|
||||
fg="yellow",
|
||||
)
|
||||
click.secho("Please remove multiple PIO Cores from a system:", fg="yellow")
|
||||
@ -146,45 +145,50 @@ def after_upgrade(ctx):
|
||||
fg="cyan",
|
||||
)
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
return
|
||||
else:
|
||||
click.secho("Please wait while upgrading PlatformIO...", fg="yellow")
|
||||
return None
|
||||
|
||||
# Update PlatformIO's Core packages
|
||||
cleanup_content_cache("http")
|
||||
update_core_packages()
|
||||
click.secho("Please wait while upgrading PlatformIO...", fg="yellow")
|
||||
|
||||
u = Upgrader(last_version, __version__)
|
||||
if u.run(ctx):
|
||||
app.set_state_item("last_version", __version__)
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s!\n" % __version__,
|
||||
fg="green",
|
||||
)
|
||||
telemetry.send_event(
|
||||
category="Auto",
|
||||
action="Upgrade",
|
||||
label="%s > %s" % (last_version, __version__),
|
||||
)
|
||||
else:
|
||||
raise exception.UpgradeError("Auto upgrading...")
|
||||
# Update PlatformIO's Core packages
|
||||
cleanup_content_cache("http")
|
||||
update_core_packages()
|
||||
|
||||
# PlatformIO banner
|
||||
u = Upgrader(last_version, current_version)
|
||||
if u.run(ctx):
|
||||
app.set_state_item("last_version", __version__)
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s!\n" % __version__,
|
||||
fg="green",
|
||||
)
|
||||
telemetry.log_event(
|
||||
"pio_upgrade_core",
|
||||
{
|
||||
"label": "%s > %s" % (last_version_str, __version__),
|
||||
"from_version": last_version_str,
|
||||
"to_version": __version__,
|
||||
},
|
||||
)
|
||||
|
||||
return print_welcome_banner()
|
||||
|
||||
|
||||
def print_welcome_banner():
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.echo("*" * terminal_width)
|
||||
click.echo("If you like %s, please:" % (click.style("PlatformIO", fg="cyan")))
|
||||
click.echo(
|
||||
"- %s us on Twitter to stay up-to-date "
|
||||
"on the latest project news > %s"
|
||||
% (
|
||||
click.style("follow", fg="cyan"),
|
||||
click.style("https://twitter.com/PlatformIO_Org", fg="cyan"),
|
||||
)
|
||||
)
|
||||
click.echo(
|
||||
"- %s it on GitHub > %s"
|
||||
% (
|
||||
click.style("star", fg="cyan"),
|
||||
click.style("https://github.com/platformio/platformio", fg="cyan"),
|
||||
click.style("https://github.com/platformio/platformio-core", fg="cyan"),
|
||||
)
|
||||
)
|
||||
click.echo(
|
||||
"- %s us on LinkedIn to stay up-to-date "
|
||||
"on the latest project news > %s"
|
||||
% (
|
||||
click.style("follow", fg="cyan"),
|
||||
click.style("https://www.linkedin.com/company/platformio/", fg="cyan"),
|
||||
)
|
||||
)
|
||||
if not os.getenv("PLATFORMIO_IDE"):
|
||||
@ -237,7 +241,7 @@ def check_platformio_upgrade():
|
||||
else:
|
||||
click.secho("platformio upgrade", fg="cyan", nl=False)
|
||||
click.secho("` or `", fg="yellow", nl=False)
|
||||
click.secho("pip install -U platformio", fg="cyan", nl=False)
|
||||
click.secho("python -m pip install -U platformio", fg="cyan", nl=False)
|
||||
click.secho("` command.", fg="yellow")
|
||||
click.secho("Changes: ", fg="yellow", nl=False)
|
||||
click.secho("https://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
|
@ -20,7 +20,7 @@ import click
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.exception import ReturnErrorCode, UserSideException
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.proc import get_pythonexe_path, where_is_program
|
||||
|
||||
|
||||
@click.command("exec", short_help="Run command from package tool")
|
||||
@ -52,9 +52,13 @@ def package_exec_cmd(obj, package, call, args):
|
||||
|
||||
inject_pkg_to_environ(pkg)
|
||||
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
||||
|
||||
# inject current python interpreter on Windows
|
||||
if IS_WINDOWS and args and args[0].endswith(".py"):
|
||||
if args and args[0].endswith(".py"):
|
||||
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
||||
if not os.path.exists(args[1]):
|
||||
args[1] = where_is_program(args[1])
|
||||
|
||||
result = None
|
||||
try:
|
||||
run_options = dict(shell=call is not None, env=os.environ)
|
||||
|
@ -20,6 +20,7 @@ import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
@ -39,7 +40,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-p", "--platform", "platforms", metavar="SPECIFICATION", multiple=True)
|
||||
@ -55,7 +56,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
@click.option(
|
||||
"--storage-dir",
|
||||
default=None,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
help="Custom Package Manager storage for global packages",
|
||||
)
|
||||
@click.option("-f", "--force", is_flag=True, help="Reinstall package if it exists")
|
||||
@ -120,7 +121,7 @@ def install_project_env_dependencies(project_env, options=None):
|
||||
# custom tools
|
||||
if options.get("tools"):
|
||||
installed_conds.append(_install_project_env_custom_tools(project_env, options))
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
installed_conds.append(
|
||||
_install_project_env_custom_libraries(project_env, options)
|
||||
@ -152,6 +153,8 @@ def _install_project_env_platform(project_env, options):
|
||||
skip_dependencies=options.get("skip_dependencies"),
|
||||
force=options.get("force"),
|
||||
)
|
||||
# ensure SCons is installed
|
||||
get_core_package_dir("tool-scons")
|
||||
return not already_up_to_date
|
||||
|
||||
|
||||
@ -206,7 +209,7 @@ def _install_project_env_libraries(project_env, options):
|
||||
config = ProjectConfig.get_instance()
|
||||
|
||||
compatibility_qualifiers = {}
|
||||
if config.get(f"env:{project_env}", "platform"):
|
||||
if config.get(f"env:{project_env}", "platform", None):
|
||||
try:
|
||||
p = PlatformFactory.new(config.get(f"env:{project_env}", "platform"))
|
||||
compatibility_qualifiers["platforms"] = [p.name]
|
||||
@ -219,9 +222,11 @@ def _install_project_env_libraries(project_env, options):
|
||||
|
||||
env_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), project_env),
|
||||
compatibility=PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None,
|
||||
compatibility=(
|
||||
PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None
|
||||
),
|
||||
)
|
||||
private_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "lib_dir"))
|
||||
@ -292,7 +297,11 @@ def _install_project_private_library_deps(private_pkg, private_lm, env_lm, optio
|
||||
if not spec.external and not spec.owner:
|
||||
continue
|
||||
pkg = private_lm.get_package(spec)
|
||||
if not pkg and not env_lm.get_package(spec):
|
||||
if (
|
||||
not pkg
|
||||
and not private_lm.get_package(spec)
|
||||
and not env_lm.get_package(spec)
|
||||
):
|
||||
pkg = env_lm.install(
|
||||
spec,
|
||||
skip_dependencies=True,
|
||||
|
@ -22,6 +22,7 @@ from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
@ -31,7 +32,7 @@ from platformio.project.config import ProjectConfig
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-p", "--platform", "platforms", metavar="SPECIFICATION", multiple=True)
|
||||
@ -41,7 +42,7 @@ from platformio.project.config import ProjectConfig
|
||||
@click.option(
|
||||
"--storage-dir",
|
||||
default=None,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
help="Custom Package Manager storage for global packages",
|
||||
)
|
||||
@click.option("--only-platforms", is_flag=True, help="List only platform packages")
|
||||
@ -59,7 +60,8 @@ def humanize_package(pkg, spec=None, verbose=False):
|
||||
if spec and not isinstance(spec, PackageSpec):
|
||||
spec = PackageSpec(spec)
|
||||
data = [
|
||||
click.style("{name} @ {version}".format(**pkg.metadata.as_dict()), fg="cyan")
|
||||
click.style(pkg.metadata.name, fg="cyan"),
|
||||
click.style(f"@ {str(pkg.metadata.version)}", bold=True),
|
||||
]
|
||||
extra_data = ["required: %s" % (spec.humanize() if spec else "Any")]
|
||||
if verbose:
|
||||
@ -84,10 +86,16 @@ def print_dependency_tree(pm, specs=None, filter_specs=None, level=0, verbose=Fa
|
||||
if not candidates:
|
||||
return
|
||||
candidates = sorted(candidates.values(), key=lambda item: item[0].metadata.name)
|
||||
|
||||
for index, (pkg, spec) in enumerate(candidates):
|
||||
if filtered_pkgs and not _pkg_tree_contains(pm, pkg, filtered_pkgs):
|
||||
continue
|
||||
dependencies = pm.get_pkg_dependencies(pkg)
|
||||
printed_pkgs = pm.memcache_get("__printed_pkgs", [])
|
||||
if printed_pkgs and pkg.path in printed_pkgs:
|
||||
continue
|
||||
printed_pkgs.append(pkg.path)
|
||||
pm.memcache_set("__printed_pkgs", printed_pkgs)
|
||||
|
||||
click.echo(
|
||||
"%s%s %s"
|
||||
% (
|
||||
@ -100,6 +108,8 @@ def print_dependency_tree(pm, specs=None, filter_specs=None, level=0, verbose=Fa
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
dependencies = pm.get_pkg_dependencies(pkg)
|
||||
if dependencies:
|
||||
print_dependency_tree(
|
||||
pm,
|
||||
@ -127,20 +137,20 @@ def list_global_packages(options):
|
||||
("libraries", LibraryPackageManager(options.get("storage_dir"))),
|
||||
]
|
||||
only_packages = any(
|
||||
options.get(type_) or options.get(f"only_{type_}") for (type_, _) in data
|
||||
options.get(typex) or options.get(f"only_{typex}") for (typex, _) in data
|
||||
)
|
||||
for (type_, pm) in data:
|
||||
for typex, pm in data:
|
||||
skip_conds = [
|
||||
only_packages
|
||||
and not options.get(type_)
|
||||
and not options.get(f"only_{type_}"),
|
||||
and not options.get(typex)
|
||||
and not options.get(f"only_{typex}"),
|
||||
not pm.get_installed(),
|
||||
]
|
||||
if any(skip_conds):
|
||||
continue
|
||||
click.secho(type_.capitalize(), bold=True)
|
||||
click.secho(typex.capitalize(), bold=True)
|
||||
print_dependency_tree(
|
||||
pm, filter_specs=options.get(type_), verbose=options.get("verbose")
|
||||
pm, filter_specs=options.get(typex), verbose=options.get("verbose")
|
||||
)
|
||||
click.echo()
|
||||
|
||||
@ -148,12 +158,12 @@ def list_global_packages(options):
|
||||
def list_project_packages(options):
|
||||
environments = options["environments"]
|
||||
only_packages = any(
|
||||
options.get(type_) or options.get(f"only_{type_}")
|
||||
for type_ in ("platforms", "tools", "libraries")
|
||||
options.get(typex) or options.get(f"only_{typex}")
|
||||
for typex in ("platforms", "tools", "libraries")
|
||||
)
|
||||
only_platform_packages = any(
|
||||
options.get(type_) or options.get(f"only_{type_}")
|
||||
for type_ in ("platforms", "tools")
|
||||
options.get(typex) or options.get(f"only_{typex}")
|
||||
for typex in ("platforms", "tools")
|
||||
)
|
||||
only_library_packages = options.get("libraries") or options.get("only_libraries")
|
||||
|
||||
@ -178,20 +188,20 @@ def list_project_packages(options):
|
||||
|
||||
|
||||
def print_project_env_platform_packages(project_env, options):
|
||||
config = ProjectConfig.get_instance()
|
||||
platform = config.get(f"env:{project_env}", "platform")
|
||||
if not platform:
|
||||
return None
|
||||
pkg = PlatformPackageManager().get_package(platform)
|
||||
if not pkg:
|
||||
try:
|
||||
p = PlatformFactory.from_env(project_env)
|
||||
except UnknownPlatform:
|
||||
return None
|
||||
click.echo(
|
||||
"Platform %s"
|
||||
% (humanize_package(pkg, platform, verbose=options.get("verbose")))
|
||||
% (
|
||||
humanize_package(
|
||||
PlatformPackageManager().get_package(p.get_dir()),
|
||||
p.config.get(f"env:{project_env}", "platform"),
|
||||
verbose=options.get("verbose"),
|
||||
)
|
||||
)
|
||||
)
|
||||
p = PlatformFactory.new(pkg)
|
||||
if project_env:
|
||||
p.configure_project_packages(project_env)
|
||||
print_dependency_tree(
|
||||
p.pm,
|
||||
specs=[p.get_package_spec(name) for name in p.packages],
|
||||
|
@ -58,14 +58,13 @@ class OutdatedCandidate:
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
def package_outdated_cmd(project_dir, environments):
|
||||
candidates = fetch_outdated_candidates(
|
||||
project_dir, environments, with_progress=True
|
||||
)
|
||||
print_outdated_candidates(candidates)
|
||||
with fs.cd(project_dir):
|
||||
candidates = fetch_outdated_candidates(environments, with_progress=True)
|
||||
print_outdated_candidates(candidates)
|
||||
|
||||
|
||||
def print_outdated_candidates(candidates):
|
||||
@ -126,8 +125,10 @@ def get_candidate_update_color(outdated):
|
||||
return None
|
||||
|
||||
|
||||
def fetch_outdated_candidates(project_dir, environments, with_progress=False):
|
||||
def fetch_outdated_candidates(environments, with_progress=False):
|
||||
candidates = []
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(environments)
|
||||
|
||||
def _add_candidate(data):
|
||||
new_candidate = OutdatedCandidate(
|
||||
@ -139,20 +140,16 @@ def fetch_outdated_candidates(project_dir, environments, with_progress=False):
|
||||
return
|
||||
candidates.append(new_candidate)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(environments)
|
||||
# platforms
|
||||
for item in find_platform_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
# platform package dependencies
|
||||
for dep_item in find_platform_dependency_candidates(item["env"]):
|
||||
_add_candidate(dep_item)
|
||||
|
||||
# platforms
|
||||
for item in find_platform_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
# platform package dependencies
|
||||
for dep_item in find_platform_dependency_candidates(item):
|
||||
_add_candidate(dep_item)
|
||||
|
||||
# libraries
|
||||
for item in find_library_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
# libraries
|
||||
for item in find_library_candidates(config, environments):
|
||||
_add_candidate(item)
|
||||
|
||||
result = []
|
||||
if not with_progress:
|
||||
@ -172,7 +169,7 @@ def find_platform_candidates(config, environments):
|
||||
result = []
|
||||
pm = PlatformPackageManager()
|
||||
for env in config.envs():
|
||||
platform = config.get(f"env:{env}", "platform")
|
||||
platform = config.get(f"env:{env}", "platform", None)
|
||||
if not platform or (environments and env not in environments):
|
||||
continue
|
||||
spec = PackageSpec(platform)
|
||||
@ -183,14 +180,13 @@ def find_platform_candidates(config, environments):
|
||||
return result
|
||||
|
||||
|
||||
def find_platform_dependency_candidates(platform_candidate):
|
||||
def find_platform_dependency_candidates(env):
|
||||
result = []
|
||||
p = PlatformFactory.new(platform_candidate["spec"])
|
||||
p.configure_project_packages(platform_candidate["env"])
|
||||
p = PlatformFactory.from_env(env)
|
||||
for pkg in p.get_installed_packages():
|
||||
result.append(
|
||||
dict(
|
||||
env=platform_candidate["env"],
|
||||
env=env,
|
||||
pm=p.pm,
|
||||
pkg=pkg,
|
||||
spec=p.get_package_spec(pkg.metadata.name),
|
||||
|
@ -26,7 +26,7 @@ from platformio.package.pack import PackagePacker
|
||||
"package",
|
||||
default=os.getcwd,
|
||||
metavar="<source directory, tar.gz or zip>",
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True),
|
||||
)
|
||||
@click.option(
|
||||
"-o", "--output", help="A destination path (folder or a full path to file)"
|
||||
|
@ -47,7 +47,7 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
"package",
|
||||
default=os.getcwd,
|
||||
metavar="<source directory, tar.gz or zip>",
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, resolve_path=True),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True),
|
||||
)
|
||||
@click.option(
|
||||
"--owner",
|
||||
@ -56,7 +56,7 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
)
|
||||
@click.option(
|
||||
"--type",
|
||||
"type_",
|
||||
"typex",
|
||||
type=click.Choice(list(PackageType.items().values())),
|
||||
help="Custom package type",
|
||||
)
|
||||
@ -82,10 +82,11 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
help="Do not show interactive prompt",
|
||||
hidden=True,
|
||||
)
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
package, owner, type_, released_at, private, notify, no_interactive, non_interactive
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals
|
||||
package, owner, typex, released_at, private, notify, no_interactive, non_interactive
|
||||
):
|
||||
click.secho("Preparing a package...", fg="cyan")
|
||||
package = os.path.abspath(package)
|
||||
no_interactive = no_interactive or non_interactive
|
||||
owner = owner or AccountClient().get_logged_username()
|
||||
do_not_pack = (
|
||||
@ -103,14 +104,14 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
|
||||
type_ = type_ or PackageType.from_archive(archive_path)
|
||||
typex = typex or PackageType.from_archive(archive_path)
|
||||
manifest = ManifestSchema().load_manifest(
|
||||
ManifestParserFactory.new_from_archive(archive_path).as_dict()
|
||||
)
|
||||
name = manifest.get("name")
|
||||
version = manifest.get("version")
|
||||
data = [
|
||||
("Type:", type_),
|
||||
("Type:", typex),
|
||||
("Owner:", owner),
|
||||
("Name:", name),
|
||||
("Version:", version),
|
||||
@ -124,13 +125,13 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
check_archive_file_names(archive_path)
|
||||
|
||||
# look for duplicates
|
||||
check_package_duplicates(owner, type_, name, version, manifest.get("system"))
|
||||
check_package_duplicates(owner, typex, name, version, manifest.get("system"))
|
||||
|
||||
if not no_interactive:
|
||||
click.confirm(
|
||||
"Are you sure you want to publish the %s %s to the registry?\n"
|
||||
% (
|
||||
type_,
|
||||
typex,
|
||||
click.style(
|
||||
"%s/%s@%s" % (owner, name, version),
|
||||
fg="cyan",
|
||||
@ -146,7 +147,7 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
)
|
||||
click.echo("Publishing...")
|
||||
response = RegistryClient().publish_package(
|
||||
owner, type_, archive_path, released_at, private, notify
|
||||
owner, typex, archive_path, released_at, private, notify
|
||||
)
|
||||
if not do_not_pack:
|
||||
os.remove(archive_path)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user