mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 15:18:03 +01:00
Compare commits
329 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dae8dfe1fc | ||
|
|
100def7609 | ||
|
|
8594012fa1 | ||
|
|
27400f66a9 | ||
|
|
bb1e590222 | ||
|
|
a4b414010d | ||
|
|
1d72a96654 | ||
|
|
9b85ed86a9 | ||
|
|
f4c692eed2 | ||
|
|
2e0688db5f | ||
|
|
ac2b358f87 | ||
|
|
251a2c9fa4 | ||
|
|
0064d4b2c5 | ||
|
|
ebbac6b483 | ||
|
|
d5373a62f4 | ||
|
|
681b91a6a4 | ||
|
|
8c66352994 | ||
|
|
4e1ec1215a | ||
|
|
6981894060 | ||
|
|
57c92e877c | ||
|
|
e8c0b8504a | ||
|
|
93bbe8f2a3 | ||
|
|
c78bb1f572 | ||
|
|
7256102785 | ||
|
|
fc907c568d | ||
|
|
9e078ff4d7 | ||
|
|
5658e7f718 | ||
|
|
111eb55a9f | ||
|
|
0630ec5503 | ||
|
|
38cc493eb7 | ||
|
|
254507c3a3 | ||
|
|
7cdcc9099b | ||
|
|
fb046c43ea | ||
|
|
73ddf80fc1 | ||
|
|
a5a224ac6f | ||
|
|
c56dfda833 | ||
|
|
6081f9ff1b | ||
|
|
f3c7d71b3b | ||
|
|
5748bf9549 | ||
|
|
84a0a6a418 | ||
|
|
1ee9f183cc | ||
|
|
55e8523925 | ||
|
|
c9efe24959 | ||
|
|
69aff39205 | ||
|
|
f6e9e15253 | ||
|
|
b7f685ed62 | ||
|
|
6e03eff303 | ||
|
|
3e0b95e1e1 | ||
|
|
a32997ceba | ||
|
|
63674d85e8 | ||
|
|
56848ece7a | ||
|
|
449722f08c | ||
|
|
949b4562c7 | ||
|
|
75f68c8be1 | ||
|
|
1b117712cf | ||
|
|
11356af502 | ||
|
|
9dbdf7fc8d | ||
|
|
dec38273b6 | ||
|
|
5098f5f420 | ||
|
|
d32fd72d13 | ||
|
|
a4692d5457 | ||
|
|
24ea7aaede | ||
|
|
b7f10982c3 | ||
|
|
8f28d1ad43 | ||
|
|
d5db2f0eb7 | ||
|
|
fe69f3de04 | ||
|
|
5534394b06 | ||
|
|
24fc2f7e14 | ||
|
|
5b23c9a294 | ||
|
|
7338a02b48 | ||
|
|
8555e83cb1 | ||
|
|
39494d18bf | ||
|
|
aab42c3cff | ||
|
|
f5a23c3817 | ||
|
|
b3eb81c3b4 | ||
|
|
4f4c88aca9 | ||
|
|
c3ad3ebb57 | ||
|
|
f13734dda4 | ||
|
|
24e63e7a02 | ||
|
|
a163048396 | ||
|
|
55f8471aff | ||
|
|
04e9f38e0e | ||
|
|
90972e9ce0 | ||
|
|
6e8f60a27a | ||
|
|
014090c407 | ||
|
|
e40b251c06 | ||
|
|
414a194c9d | ||
|
|
7bffe3993d | ||
|
|
3828e6d15e | ||
|
|
85c582bc93 | ||
|
|
ea1c9dec12 | ||
|
|
6753121a6a | ||
|
|
f63d899c42 | ||
|
|
7219c9f806 | ||
|
|
df2f1d10fd | ||
|
|
3f71067b67 | ||
|
|
8dc68a01fd | ||
|
|
9e0ded958c | ||
|
|
68243aa95b | ||
|
|
507df1f507 | ||
|
|
1800c29b44 | ||
|
|
0343548f6e | ||
|
|
5cb5c9713e | ||
|
|
5e2c5c793f | ||
|
|
3022cb6955 | ||
|
|
4687665ff3 | ||
|
|
001f075a49 | ||
|
|
7d78e4a60a | ||
|
|
2786bfbeb8 | ||
|
|
d3049a8d62 | ||
|
|
831a2582ed | ||
|
|
0919019123 | ||
|
|
7dd9c99c91 | ||
|
|
326c24911a | ||
|
|
133fa1495b | ||
|
|
7c040ed99f | ||
|
|
f88a2de8a9 | ||
|
|
a24ec8b07a | ||
|
|
d6ad6f96e8 | ||
|
|
411764854b | ||
|
|
973f77012f | ||
|
|
1d80da2559 | ||
|
|
00d298935a | ||
|
|
4a9a478243 | ||
|
|
9040bbb75a | ||
|
|
abcc4c0a12 | ||
|
|
ceb3a19b81 | ||
|
|
2a2f7825cc | ||
|
|
a0e9f6a92d | ||
|
|
dbc73f5086 | ||
|
|
78a67b754e | ||
|
|
de4b02eaf1 | ||
|
|
751c82fd29 | ||
|
|
8c8a94fc71 | ||
|
|
1174958e8b | ||
|
|
6399de7a66 | ||
|
|
c0f2275b61 | ||
|
|
256a9ee45d | ||
|
|
c835ce780a | ||
|
|
d7b7d2de6e | ||
|
|
1dd0635e5e | ||
|
|
67506511c3 | ||
|
|
3fbb4cde36 | ||
|
|
9aaa80a213 | ||
|
|
acb6cbffa0 | ||
|
|
6a70ab74bc | ||
|
|
852c252302 | ||
|
|
3a670b55b6 | ||
|
|
d01435f4f2 | ||
|
|
f1638c9cd7 | ||
|
|
4943504898 | ||
|
|
7d7480c120 | ||
|
|
78182fea0a | ||
|
|
947e57b5b4 | ||
|
|
e0e4a594e9 | ||
|
|
4839fe37a3 | ||
|
|
9914b7ea38 | ||
|
|
f86ed97820 | ||
|
|
8d8b0807e2 | ||
|
|
e3c6237430 | ||
|
|
e964c7fa5c | ||
|
|
f1e84e145c | ||
|
|
2e2773fa6b | ||
|
|
a9c7a27d47 | ||
|
|
e41ecb19cf | ||
|
|
5b091b602f | ||
|
|
768681c4f2 | ||
|
|
2e4e5c1873 | ||
|
|
4a61806e60 | ||
|
|
883187f9ac | ||
|
|
2d9a5031e9 | ||
|
|
39c93f6512 | ||
|
|
a7905b373e | ||
|
|
a7c82ff9b9 | ||
|
|
5b4b4a4051 | ||
|
|
c348fec609 | ||
|
|
4af17356f3 | ||
|
|
384e5052bc | ||
|
|
a5adae1491 | ||
|
|
fe62b810db | ||
|
|
ee78496058 | ||
|
|
8afe4bae87 | ||
|
|
b04bb2b740 | ||
|
|
3d46f0d72f | ||
|
|
a65d973660 | ||
|
|
df83d90c06 | ||
|
|
a1d55f2529 | ||
|
|
aa097f3fd6 | ||
|
|
e0b72202fd | ||
|
|
e8769fff7d | ||
|
|
ed33652534 | ||
|
|
d1c1f972a6 | ||
|
|
6008275aae | ||
|
|
edf8bb3945 | ||
|
|
dd7d133263 | ||
|
|
b6f783674b | ||
|
|
eab70fae3b | ||
|
|
fed40ef104 | ||
|
|
6d087f5a38 | ||
|
|
0edcf33547 | ||
|
|
443417b0f4 | ||
|
|
369e994b0d | ||
|
|
55469327c6 | ||
|
|
27f326673c | ||
|
|
e6fd766fff | ||
|
|
7da3ccfacb | ||
|
|
624d6b3b0b | ||
|
|
9528083a66 | ||
|
|
55408f6ccb | ||
|
|
dce5a39b10 | ||
|
|
03a23876a7 | ||
|
|
775357dd94 | ||
|
|
d10cbb2823 | ||
|
|
63a2465bac | ||
|
|
d97ed52e91 | ||
|
|
e1dc12c14d | ||
|
|
7c755d4e2d | ||
|
|
55b786d9f0 | ||
|
|
131f4be4ea | ||
|
|
d819617d2b | ||
|
|
b9219a2b62 | ||
|
|
554e378dd6 | ||
|
|
cc11402bc9 | ||
|
|
40220f92c1 | ||
|
|
8c4d9021c2 | ||
|
|
efefb02d86 | ||
|
|
3ee281aaf9 | ||
|
|
097b6d5097 | ||
|
|
6cdaf05f98 | ||
|
|
3be0f58c30 | ||
|
|
f3489a3b01 | ||
|
|
173dbeb24a | ||
|
|
0607b86818 | ||
|
|
1282a65bcb | ||
|
|
45d3207dfe | ||
|
|
76b46f59e9 | ||
|
|
19fa108f61 | ||
|
|
2372d06591 | ||
|
|
7015375892 | ||
|
|
e9bf2b361f | ||
|
|
51b790b767 | ||
|
|
ac84431361 | ||
|
|
7dc8463da9 | ||
|
|
71ae579bc0 | ||
|
|
5036d25b60 | ||
|
|
ff6d169862 | ||
|
|
dde8898aae | ||
|
|
72cc23ef46 | ||
|
|
5390b4ed42 | ||
|
|
17c7d90d52 | ||
|
|
5c3b5be613 | ||
|
|
5ab7769745 | ||
|
|
05374d1145 | ||
|
|
311e10f91e | ||
|
|
2b94791387 | ||
|
|
fbcae11cd0 | ||
|
|
0d6eff2a9a | ||
|
|
6a9b7fdb6d | ||
|
|
e8f703648a | ||
|
|
710f82de0f | ||
|
|
bee35acfa6 | ||
|
|
90fdaf80e4 | ||
|
|
27feb1ddd7 | ||
|
|
2be7e0f7e6 | ||
|
|
186ab70bf9 | ||
|
|
0fa9006e45 | ||
|
|
60c83bae93 | ||
|
|
553c398c8e | ||
|
|
1c90bb383f | ||
|
|
4281225b02 | ||
|
|
14dc9c6c43 | ||
|
|
c9e10b1a3e | ||
|
|
915c850760 | ||
|
|
2c3f430203 | ||
|
|
1a152ed7fa | ||
|
|
5953480807 | ||
|
|
b5c1a195be | ||
|
|
310cc086c6 | ||
|
|
61d6cd3c18 | ||
|
|
cccabf5330 | ||
|
|
6f33460afd | ||
|
|
603d524aaf | ||
|
|
eb2cd001b6 | ||
|
|
b5b57790be | ||
|
|
286f4ef961 | ||
|
|
ad28d1906c | ||
|
|
dfdccac67d | ||
|
|
b8c2752237 | ||
|
|
834c7b0def | ||
|
|
5bfe70142e | ||
|
|
b35c5a22bb | ||
|
|
eecc825c90 | ||
|
|
3823c22dad | ||
|
|
551bd3dbfe | ||
|
|
7e9956963a | ||
|
|
80c24a1993 | ||
|
|
66091bae24 | ||
|
|
73d4f10f4b | ||
|
|
ee7ea77fc3 | ||
|
|
32e1cbe2a3 | ||
|
|
3539724843 | ||
|
|
940b25f158 | ||
|
|
37e601e5b5 | ||
|
|
0230374709 | ||
|
|
86db237e5d | ||
|
|
1542b1cebb | ||
|
|
990071af5c | ||
|
|
f543e00307 | ||
|
|
34b4f8265a | ||
|
|
a366d1af2a | ||
|
|
ebe5785a91 | ||
|
|
887d46725b | ||
|
|
a326b718f2 | ||
|
|
c14b298cb9 | ||
|
|
9cca8f3f55 | ||
|
|
f5cee56740 | ||
|
|
972d183d85 | ||
|
|
eebdf04357 | ||
|
|
9ede20a367 | ||
|
|
b0c3e22a52 | ||
|
|
a78db17784 | ||
|
|
dbb9998f69 | ||
|
|
2745dbd124 | ||
|
|
c0357daf01 | ||
|
|
064fa6027d | ||
|
|
779e02a05e | ||
|
|
e222d0356a | ||
|
|
764c42a810 | ||
|
|
b54a8b40a4 |
10
.github/workflows/core.yml
vendored
10
.github/workflows/core.yml
vendored
@@ -8,14 +8,19 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
python-version: "3.6"
|
||||
- os: windows-latest
|
||||
python-version: "3.10"
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
@@ -42,3 +47,4 @@ jobs:
|
||||
job_name: '*Core*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
81
.github/workflows/docs.yml
vendored
81
.github/workflows/docs.yml
vendored
@@ -4,13 +4,14 @@ on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: Install dependencies
|
||||
@@ -29,4 +30,80 @@ jobs:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Docs*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
- name: Preserve Docs
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
tar -czvf docs.tar.gz -C docs/_build html rtdpage
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
|
||||
deploy:
|
||||
name: Deploy Docs
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCS_REPO: platformio/platformio-docs
|
||||
DOCS_DIR: platformio-docs
|
||||
LATEST_DOCS_DIR: latest-docs
|
||||
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
run: |
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
id: get-destination-dir
|
||||
run: |
|
||||
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
|
||||
echo "::set-output name=dst_dir::stable"
|
||||
else
|
||||
echo "::set-output name=dst_dir::latest"
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
ref: gh-pages
|
||||
- name: Synchronize Docs
|
||||
run: |
|
||||
rm -rf ${{ env.DOCS_DIR }}/.git
|
||||
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
|
||||
rm -rf ${{ env.DOCS_DIR }}/page
|
||||
mkdir -p ${{ env.DOCS_DIR }}/page
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
|
||||
fi
|
||||
- name: Validate Docs
|
||||
run: |
|
||||
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
|
||||
echo "Docs folder is empty. Aborting!"
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
publish_dir: ./${{ env.DOCS_DIR }}
|
||||
commit_message: Sync Docs
|
||||
|
||||
3
.github/workflows/examples.yml
vendored
3
.github/workflows/examples.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-16.04, windows-latest, macos-latest]
|
||||
os: [ubuntu-18.04, windows-latest, macos-latest]
|
||||
python-version: [3.7]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -65,3 +65,4 @@ jobs:
|
||||
job_name: '*Examples*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
@@ -15,6 +15,8 @@ disable=
|
||||
useless-object-inheritance,
|
||||
useless-import-alias,
|
||||
bad-option-value,
|
||||
consider-using-dict-items,
|
||||
consider-using-f-string,
|
||||
|
||||
; PY2 Compat
|
||||
super-with-arguments,
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/index.html
|
||||
|
||||
version: 2
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
formats:
|
||||
- pdf
|
||||
|
||||
submodules:
|
||||
include: all
|
||||
@@ -3,7 +3,7 @@ Contributing
|
||||
|
||||
To get started, <a href="https://cla-assistant.io/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
|
||||
1. Fork the repository on GitHub.
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of project where is located `tox.ini` and run `tox -e py37`
|
||||
@@ -18,4 +18,4 @@ To get started, <a href="https://cla-assistant.io/platformio/platformio-core">si
|
||||
8. Run the tests `make test`
|
||||
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
|
||||
10. Commit changes to your forked repository
|
||||
11. Submit a Pull Request on GitHub.
|
||||
11. Submit a Pull Request on GitHub
|
||||
|
||||
193
HISTORY.rst
193
HISTORY.rst
@@ -8,11 +8,114 @@ PlatformIO Core 5
|
||||
|
||||
**A professional collaborative platform for embedded development**
|
||||
|
||||
5.2.5 (2022-02-10)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Improved support for private packages in `PlatformIO Registry <https://registry.platformio.org/>`__
|
||||
- Improved checking of available Internet connection for IPv6-only workstations (`pull #4151 <https://github.com/platformio/platformio-core/pull/4151>`_)
|
||||
- Better detecting of default PlatformIO project directory on Linux OS (`pull #4158 <https://github.com/platformio/platformio-core/pull/4158>`_)
|
||||
- Respect disabling debugging server from "platformio.ini" passing an empty value to the `debug_server <https://docs.platformio.org/en/latest/projectconf/section_env_debug.html#debug-server>`__ option
|
||||
- Fixed a "module 'asyncio' has no attribute 'run'" error when launching PIO Home using Python 3.6 (`issue #4169 <https://github.com/platformio/platformio-core/issues/4169>`_)
|
||||
|
||||
5.2.4 (2021-12-15)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added support for a new ``headers`` field in `library.json <https://docs.platformio.org/en/latest/librarymanager/config.html>`__ (declare a list of header files that can be included in a project source files using ``#include <...>`` directive)
|
||||
- Improved tab completion support for Bash, ZSH, and Fish shells (`issue #4114 <https://github.com/platformio/platformio-core/issues/4114>`_)
|
||||
- Improved support for projects located on a network share (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_, `issue #3926 <https://github.com/platformio/platformio-core/issues/3926>`_, `issue #4099 <https://github.com/platformio/platformio-core/issues/4099>`_)
|
||||
- Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) (`issue #3865 <https://github.com/platformio/platformio-core/issues/3865>`_)
|
||||
- Upgraded build engine to the SCons 4.3 (`release notes <https://github.com/SCons/scons/blob/rel_4.3.0/CHANGES.txt>`__)
|
||||
- Fixed an issue with the CLion project generator when a macro contains a space (`issue #4102 <https://github.com/platformio/platformio-core/issues/4102>`_)
|
||||
- Fixed an issue with the NetBeans project generator when the path to PlatformIO contains a space (`issue #4096 <https://github.com/platformio/platformio-core/issues/4096>`_)
|
||||
- Fixed an issue when the system environment variable does not override a project configuration option (`issue #4125 <https://github.com/platformio/platformio-core/issues/4125>`_)
|
||||
- Fixed an issue when referencing ``*_dir`` option from a custom project configuration environment (`issue #4110 <https://github.com/platformio/platformio-core/issues/4110>`_)
|
||||
- Fixed an issue with the CLion template that generated a broken CMake file if user's home directory contained an unescaped backslash (`issue #4071 <https://github.com/platformio/platformio-core/issues/4071>`_)
|
||||
- Fixed an issue with wrong detecting Windows architecture when Python 32bit is used (`issue #4134 <https://github.com/platformio/platformio-core/issues/4134>`_)
|
||||
|
||||
5.2.3 (2021-11-05)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Automatically synchronize active projects between IDE and `PlatformIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
- Added support for custom `device monitor filters <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #3924 <https://github.com/platformio/platformio-core/issues/3924>`_)
|
||||
- Show human-readable message when infinite recursion is detected while processing `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__ (`issue #3883 <https://github.com/platformio/platformio-core/issues/3883>`_)
|
||||
- Improved directory interpolation (``${platformio.***_dir}``) in `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__ configuration file (`issue #3934 <https://github.com/platformio/platformio-core/issues/3934>`_)
|
||||
- Ignore resolving of SCons variables (e.g., ``${(SOURCE.get_abspath())}``) when preprocessing interpolations (`issue #3933 <https://github.com/platformio/platformio-core/issues/3933>`_)
|
||||
- Added "inc" as a sign that it's the root of the library (`issue #4093 <https://github.com/platformio/platformio-core/issues/4093>`_)
|
||||
- Fixed an issue when the ``$PROJECT_DIR`` variable was not properly replaced in the `debug_server <https://docs.platformio.org/en/latest/projectconf/section_env_debug.html#debug-server>`__ option (`issue #4086 <https://github.com/platformio/platformio-core/issues/4086>`_)
|
||||
- Fixed an issue when `PIO Remote <https://docs.platformio.org/en/latest/plus/pio-remote.html>`__ device monitor crashes on the first keypress (`issue #3832 <https://github.com/platformio/platformio-core/issues/3832>`_)
|
||||
- Fixed "Do not know how to make File target 'debug'" issue when debugging project using `CLion IDE <https://docs.platformio.org/en/latest/integration/ide/clion.html>`__ (`pull #4089 <https://github.com/platformio/platformio-core/issues/4089>`_)
|
||||
- Fixed "UnicodeEncodeError" when a build output contains non-ASCII characters (`issue #3971 <https://github.com/platformio/platformio-core/issues/3971>`_)
|
||||
- Fixed an issue when VSCode's debugger does not the honor default environment (`issue #4098 <https://github.com/platformio/platformio-core/issues/4098>`_)
|
||||
|
||||
5.2.2 (2021-10-20)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Override debugging firmware loading mode using ``--load-mode`` option for `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ command
|
||||
- Added support for CLion IDE 2021.3 (`pull #4085 <https://github.com/platformio/platformio-core/issues/4085>`_)
|
||||
- Removed debugging "legacy Click" message from CLI (`issue #4083 <https://github.com/platformio/platformio-core/issues/4083>`_)
|
||||
- Fixed a "TypeError: sequence item 1: expected str instance, list found" issue when extending configuration option in `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__ with the multi-line default value (`issue #4082 <https://github.com/platformio/platformio-core/issues/4082>`_)
|
||||
|
||||
5.2.1 (2021-10-11)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Clean a build environment and installed library dependencies using a new ``cleanall`` target (`issue #4062 <https://github.com/platformio/platformio-core/issues/4062>`_)
|
||||
- Override a default library builder via a new ``builder`` field in a ``build`` group of `library.json <https://docs.platformio.org/en/latest/librarymanager/config.html#build>`__ manifest (`issue #3957 <https://github.com/platformio/platformio-core/issues/3957>`_)
|
||||
- Updated `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.6 with new checks, increased reliability of advanced addons (MISRA/CERT) and various improvements
|
||||
- Handle the "test" folder as a part of CLion project (`issue #4005 <https://github.com/platformio/platformio-core/issues/4005>`_)
|
||||
- Improved handling of a library root based on "Conan" or "CMake" build systems (`issue #3887 <https://github.com/platformio/platformio-core/issues/3887>`_)
|
||||
- Fixed a "KeyError: Invalid board option 'build.cpu'" when using a precompiled library with a board that does not have a CPU field in the manifest (`issue #4056 <https://github.com/platformio/platformio-core/issues/4056>`_)
|
||||
- Fixed a "FileExist" error when the `platformio ci <https://docs.platformio.org/en/latest/userguide/cmd_ci.html>`__ command is used in pair with the ``--keep-build-dir`` option (`issue #4011 <https://github.com/platformio/platformio-core/issues/4011>`_)
|
||||
- Fixed an issue with draft values of C++ language standards that broke static analysis via Cppcheck (`issue #3944 <https://github.com/platformio/platformio-core/issues/3944>`_)
|
||||
|
||||
5.2.0 (2021-09-13)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* **PlatformIO Debugging**
|
||||
|
||||
- Boosted `PlatformIO Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
|
||||
- `Debug unit tests <https://docs.platformio.org/en/latest/plus/debugging.html#debug-unit-tests>`__ created with `PlatformIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html>`__ solution (`issue #948 <https://github.com/platformio/platformio-core/issues/948>`_)
|
||||
- Debug native (desktop) applications on a host machine (`issue #980 <https://github.com/platformio/platformio-core/issues/980>`_)
|
||||
- Support debugging on Windows using Windows CMD/CLI (`pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__) (`issue #3793 <https://github.com/platformio/platformio-core/issues/3793>`_)
|
||||
- Configure a custom pattern to determine when debugging server is started with a new `debug_server_ready_pattern <https://docs.platformio.org/en/latest/projectconf/section_env_debug.html#debug-server-ready-pattern>`__ option
|
||||
- Fixed an issue with silent hanging when a custom debug server is not found (`issue #3756 <https://github.com/platformio/platformio-core/issues/3756>`_)
|
||||
|
||||
* **Package Management**
|
||||
|
||||
- Improved a package publishing process:
|
||||
|
||||
* Show package details
|
||||
* Check for conflicting names in the PlatformIO Trusted Registry
|
||||
* Check for duplicates and used version
|
||||
* Validate package manifest
|
||||
|
||||
- Added a new option ``--non-interactive`` to `pio package publish <https://docs.platformio.org/en/latest/core/userguide/package/cmd_publish.html>`__ command
|
||||
|
||||
* **Build System**
|
||||
|
||||
- Process "precompiled" and "ldflags" properties of the "library.properties" manifest (`issue #3994 <https://github.com/platformio/platformio-core/issues/3994>`_)
|
||||
- Upgraded build engine to the SCons 4.2 (`release notes <https://github.com/SCons/scons/blob/rel_4.2.0/CHANGES.txt>`__)
|
||||
- Fixed an issue with broken binary file extension when a custom ``PROGNAME`` contains dot symbols (`issue #3906 <https://github.com/platformio/platformio-core/issues/3906>`_)
|
||||
- Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files (`issue #4019 <https://github.com/platformio/platformio-core/issues/4019>`_)
|
||||
|
||||
* **Static Code Analysis**
|
||||
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Clang-Tidy <https://docs.platformio.org/en/latest/plus/check-tools/clang-tidy.html>`__ v12.0.1 with new modules and extended checks list
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.5.0 with improved code analysis and MISRA improvements
|
||||
* `PVS-Studio <https://docs.platformio.org/en/latest/plus/check-tools/pvs-studio.html>`__ v7.14 with support for intermodular analysis, improved MISRA support and new diagnostics
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Ensure that a serial port is ready before running unit tests on a remote target (`issue #3742 <https://github.com/platformio/platformio-core/issues/3742>`_)
|
||||
- Fixed an error "Unknown development platform" when running unit tests on a clean machine (`issue #3901 <https://github.com/platformio/platformio-core/issues/3901>`_)
|
||||
- Fixed an issue when "main.cpp" was generated for a new project for 8-bit development platforms (`issue #3872 <https://github.com/platformio/platformio-core/issues/3872>`_)
|
||||
|
||||
5.1.1 (2021-03-17)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a "The command line is too long" issue with a linking process on Windows (`issue #3827 <https://github.com/platformio/platformio-core/issues/3827>`_)
|
||||
* Fixed an issue with `device monitor <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html>`__ when the "send_on_enter" filter didn't send EOL chars (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
|
||||
* Fixed an issue with `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ when the "send_on_enter" filter didn't send EOL chars (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
|
||||
* Fixed an issue with silent mode when unwanted data is printed to stdout (`issue #3837 <https://github.com/platformio/platformio-core/issues/3837>`_)
|
||||
* Fixed an issue when code inspection fails with "Bad JSON" (`issue #3790 <https://github.com/platformio/platformio-core/issues/3790>`_)
|
||||
* Fixed an issue with overriding user-specified debugging configuration information in VSCode (`issue #3824 <https://github.com/platformio/platformio-core/issues/3824>`_)
|
||||
@@ -22,8 +125,8 @@ PlatformIO Core 5
|
||||
|
||||
* **PlatformIO Home**
|
||||
|
||||
- Boosted `PlatformIO Home <https://docs.platformio.org/page/home/index.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
|
||||
- Added a new ``--session-id`` option to `pio home <https://docs.platformio.org/page/core/userguide/cmd_home.html>`__ command that helps to keep PlatformIO Home isolated from other instances and protect from 3rd party access (`issue #3397 <https://github.com/platformio/platformio-core/issues/3397>`_)
|
||||
- Boosted `PlatformIO Home <https://docs.platformio.org/en/latest/home/index.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
|
||||
- Added a new ``--session-id`` option to `pio home <https://docs.platformio.org/en/latest/core/userguide/cmd_home.html>`__ command that helps to keep PlatformIO Home isolated from other instances and protect from 3rd party access (`issue #3397 <https://github.com/platformio/platformio-core/issues/3397>`_)
|
||||
|
||||
* **Build System**
|
||||
|
||||
@@ -33,28 +136,28 @@ PlatformIO Core 5
|
||||
|
||||
* **Package Management**
|
||||
|
||||
- New options for `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command:
|
||||
- New options for `pio system prune <https://docs.platformio.org/en/latest/core/userguide/system/cmd_prune.html>`__ command:
|
||||
|
||||
+ ``--dry-run`` option to show data that will be removed
|
||||
+ ``--core-packages`` option to remove unnecessary core packages
|
||||
+ ``--platform-packages`` option to remove unnecessary development platform packages (`issue #923 <https://github.com/platformio/platformio-core/issues/923>`_)
|
||||
|
||||
- Added new `check_prune_system_threshold <https://docs.platformio.org/page/core/userguide/cmd_settings.html#check-prune-system-threshold>`__ setting
|
||||
- Added new `check_prune_system_threshold <https://docs.platformio.org/en/latest/core/userguide/cmd_settings.html#check-prune-system-threshold>`__ setting
|
||||
- Disabled automatic removal of unnecessary development platform packages (`issue #3708 <https://github.com/platformio/platformio-core/issues/3708>`_, `issue #3770 <https://github.com/platformio/platformio-core/issues/3770>`_)
|
||||
- Fixed an issue when unnecessary packages were removed in ``update --dry-run`` mode (`issue #3809 <https://github.com/platformio/platformio-core/issues/3809>`_)
|
||||
- Fixed a "ValueError: Invalid simple block" when uninstalling a package with a custom name and external source (`issue #3816 <https://github.com/platformio/platformio-core/issues/3816>`_)
|
||||
|
||||
* **Debugging**
|
||||
|
||||
- Configure a custom debug adapter speed using a new `debug_speed <https://docs.platformio.org/page/projectconf/section_env_debug.html#debug-speed>`__ option (`issue #3799 <https://github.com/platformio/platformio-core/issues/3799>`_)
|
||||
- Configure a custom debug adapter speed using a new `debug_speed <https://docs.platformio.org/en/latest/projectconf/section_env_debug.html#debug-speed>`__ option (`issue #3799 <https://github.com/platformio/platformio-core/issues/3799>`_)
|
||||
- Handle debugging server's "ready_pattern" in "stderr" output
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Improved listing of `multicast DNS services <https://docs.platformio.org/page/core/userguide/device/cmd_list.html>`_
|
||||
- Improved listing of `multicast DNS services <https://docs.platformio.org/en/latest/core/userguide/device/cmd_list.html>`_
|
||||
- Fixed a "UnicodeDecodeError: 'utf-8' codec can't decode byte" when using J-Link for firmware uploading on Linux (`issue #3804 <https://github.com/platformio/platformio-core/issues/3804>`_)
|
||||
- Fixed an issue with a compiler driver for ".ccls" language server (`issue #3808 <https://github.com/platformio/platformio-core/issues/3808>`_)
|
||||
- Fixed an issue when `pio device monitor --eol <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#cmdoption-pio-device-monitor-eol>`__ and "send_on_enter" filter do not work properly (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
|
||||
- Fixed an issue when `pio device monitor --eol <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#cmdoption-pio-device-monitor-eol>`__ and "send_on_enter" filter do not work properly (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
|
||||
|
||||
5.0.4 (2020-12-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
@@ -63,8 +166,8 @@ PlatformIO Core 5
|
||||
- Improved ".ccls" configuration file for Emacs, Vim, and Sublime Text integrations
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.3 with improved C++ parser and several new MISRA rules
|
||||
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.11 with new diagnostics and updated mass suppression mechanism
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.3 with improved C++ parser and several new MISRA rules
|
||||
* `PVS-Studio <https://docs.platformio.org/en/latest/plus/check-tools/pvs-studio.html>`__ v7.11 with new diagnostics and updated mass suppression mechanism
|
||||
|
||||
- Show a warning message about deprecated support for Python 2 and Python 3.5
|
||||
- Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension
|
||||
@@ -74,12 +177,12 @@ PlatformIO Core 5
|
||||
5.0.3 (2020-11-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added an error selector for `Sublime Text <https://docs.platformio.org/page/integration/ide/sublimetext.html>`__ build runner (`issue #3733 <https://github.com/platformio/platformio-core/issues/3733>`_)
|
||||
- Added an error selector for `Sublime Text <https://docs.platformio.org/en/latest/integration/ide/sublimetext.html>`__ build runner (`issue #3733 <https://github.com/platformio/platformio-core/issues/3733>`_)
|
||||
- Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode
|
||||
- Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used
|
||||
- Print ignored test suites and environments in the test summary report only in verbose mode (`issue #3726 <https://github.com/platformio/platformio-core/issues/3726>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue when `pio package pack <https://docs.platformio.org/page/core/userguide/package/cmd_pack.html>`__ ignores some folders (`issue #3730 <https://github.com/platformio/platformio-core/issues/3730>`_)
|
||||
- Fixed an issue when `pio package pack <https://docs.platformio.org/en/latest/core/userguide/package/cmd_pack.html>`__ ignores some folders (`issue #3730 <https://github.com/platformio/platformio-core/issues/3730>`_)
|
||||
|
||||
5.0.2 (2020-10-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
@@ -87,21 +190,21 @@ PlatformIO Core 5
|
||||
- Initialize a new project or update the existing passing working environment name and its options (`issue #3686 <https://github.com/platformio/platformio-core/issues/3686>`_)
|
||||
- Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry (`issue #3700 <https://github.com/platformio/platformio-core/issues/3700>`_)
|
||||
- Improved "core.call" RPC for PlatformIO Home (`issue #3671 <https://github.com/platformio/platformio-core/issues/3671>`_)
|
||||
- Fixed a "PermissionError: [WinError 5]" on Windows when an external repository is used with `lib_deps <https://docs.platformio.org/page/projectconf/section_env_library.html#lib-deps>`__ option (`issue #3664 <https://github.com/platformio/platformio-core/issues/3664>`_)
|
||||
- Fixed a "PermissionError: [WinError 5]" on Windows when an external repository is used with `lib_deps <https://docs.platformio.org/en/latest/projectconf/section_env_library.html#lib-deps>`__ option (`issue #3664 <https://github.com/platformio/platformio-core/issues/3664>`_)
|
||||
- Fixed a "KeyError: 'versions'" when dependency does not exist in the registry (`issue #3666 <https://github.com/platformio/platformio-core/issues/3666>`_)
|
||||
- Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies (`issue #3669 <https://github.com/platformio/platformio-core/issues/3669>`_)
|
||||
- Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses (`issue #3677 <https://github.com/platformio/platformio-core/issues/3677>`_)
|
||||
- Fixed an issue when `pio package publish <https://docs.platformio.org/page/core/userguide/package/cmd_publish.html>`__ command removes original archive after submitting to the registry (`issue #3716 <https://github.com/platformio/platformio-core/issues/3716>`_)
|
||||
- Fixed an issue when multiple `pio lib install <https://docs.platformio.org/page/core/userguide/lib/cmd_install.html>`__ command with the same local library results in duplicates in ``lib_deps`` (`issue #3715 <https://github.com/platformio/platformio-core/issues/3715>`_)
|
||||
- Fixed an issue with a "wrong" timestamp in device monitor output using `"time" filter <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #3712 <https://github.com/platformio/platformio-core/issues/3712>`_)
|
||||
- Fixed an issue when `pio package publish <https://docs.platformio.org/en/latest/core/userguide/package/cmd_publish.html>`__ command removes original archive after submitting to the registry (`issue #3716 <https://github.com/platformio/platformio-core/issues/3716>`_)
|
||||
- Fixed an issue when multiple `pio lib install <https://docs.platformio.org/en/latest/core/userguide/lib/cmd_install.html>`__ command with the same local library results in duplicates in ``lib_deps`` (`issue #3715 <https://github.com/platformio/platformio-core/issues/3715>`_)
|
||||
- Fixed an issue with a "wrong" timestamp in device monitor output using `"time" filter <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #3712 <https://github.com/platformio/platformio-core/issues/3712>`_)
|
||||
|
||||
5.0.1 (2020-09-10)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added support for "owner" requirement when declaring ``dependencies`` using `library.json <https://docs.platformio.org/page/librarymanager/config.html#dependencies>`__
|
||||
- Fixed an issue when using a custom git/ssh package with `platform_packages <https://docs.platformio.org/page/projectconf/section_env_platform.html#platform-packages>`__ option (`issue #3624 <https://github.com/platformio/platformio-core/issues/3624>`_)
|
||||
- Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using `Remote Development <https://docs.platformio.org/page/plus/pio-remote.html>`__ on RaspberryPi device (`issue #3652 <https://github.com/platformio/platformio-core/issues/3652>`_)
|
||||
- Fixed an issue when `pio package unpublish <https://docs.platformio.org/page/core/userguide/package/cmd_unpublish.html>`__ command crashes (`issue #3660 <https://github.com/platformio/platformio-core/issues/3660>`_)
|
||||
- Added support for "owner" requirement when declaring ``dependencies`` using `library.json <https://docs.platformio.org/en/latest/librarymanager/config.html#dependencies>`__
|
||||
- Fixed an issue when using a custom git/ssh package with `platform_packages <https://docs.platformio.org/en/latest/projectconf/section_env_platform.html#platform-packages>`__ option (`issue #3624 <https://github.com/platformio/platformio-core/issues/3624>`_)
|
||||
- Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using `Remote Development <https://docs.platformio.org/en/latest/plus/pio-remote.html>`__ on RaspberryPi device (`issue #3652 <https://github.com/platformio/platformio-core/issues/3652>`_)
|
||||
- Fixed an issue when `pio package unpublish <https://docs.platformio.org/en/latest/core/userguide/package/cmd_unpublish.html>`__ command crashes (`issue #3660 <https://github.com/platformio/platformio-core/issues/3660>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue with incorrect value for C++ language standard in IDE projects when an in-progress language standard is used (`issue #3653 <https://github.com/platformio/platformio-core/issues/3653>`_)
|
||||
- Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) (`issue #3658 <https://github.com/platformio/platformio-core/issues/3658>`_)
|
||||
@@ -110,7 +213,7 @@ PlatformIO Core 5
|
||||
5.0.0 (2020-09-03)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/core/migration.html>`__.
|
||||
Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/en/latest/core/migration.html>`__.
|
||||
|
||||
* Integration with the new **PlatformIO Trusted Registry**
|
||||
|
||||
@@ -125,19 +228,19 @@ Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/
|
||||
- Built-in fine-grained access control (role-based, teams, organizations)
|
||||
- New CLI commands:
|
||||
|
||||
* `pio package <https://docs.platformio.org/page/core/userguide/package/index.html>`__ – manage packages in the registry
|
||||
* `pio access <https://docs.platformio.org/page/core/userguide/access/index.html>`__ – manage package access for users, teams, and maintainers
|
||||
* `pio package <https://docs.platformio.org/en/latest/core/userguide/package/index.html>`__ – manage packages in the registry
|
||||
* `pio access <https://docs.platformio.org/en/latest/core/userguide/access/index.html>`__ – manage package access for users, teams, and maintainers
|
||||
|
||||
* Integration with the new **Account Management System**
|
||||
|
||||
- `Manage organizations <https://docs.platformio.org/page/core/userguide/org/index.html>`__
|
||||
- `Manage teams and team memberships <https://docs.platformio.org/page/core/userguide/team/index.html>`__
|
||||
- `Manage organizations <https://docs.platformio.org/en/latest/core/userguide/org/index.html>`__
|
||||
- `Manage teams and team memberships <https://docs.platformio.org/en/latest/core/userguide/team/index.html>`__
|
||||
|
||||
* New **Package Management System**
|
||||
|
||||
- Integrated PlatformIO Core with the new PlatformIO Registry
|
||||
- Support for owner-based dependency declaration (resolves name conflicts) (`issue #1824 <https://github.com/platformio/platformio-core/issues/1824>`_)
|
||||
- Automatically save dependencies to `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__ when installing using PlatformIO CLI (`issue #2964 <https://github.com/platformio/platformio-core/issues/2964>`_)
|
||||
- Automatically save dependencies to `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__ when installing using PlatformIO CLI (`issue #2964 <https://github.com/platformio/platformio-core/issues/2964>`_)
|
||||
- Follow SemVer complaint version constraints when checking library updates `issue #1281 <https://github.com/platformio/platformio-core/issues/1281>`_)
|
||||
- Dropped support for "packageRepositories" section in "platform.json" manifest (please publish packages directly to the registry)
|
||||
|
||||
@@ -145,29 +248,29 @@ Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/
|
||||
|
||||
- Upgraded build engine to the `SCons 4.0 - a next-generation software construction tool <https://scons.org/>`__
|
||||
|
||||
* `Configuration files are Python scripts <https://docs.platformio.org/page/projectconf/advanced_scripting.html>`__ – use the power of a real programming language to solve build problems
|
||||
* `Configuration files are Python scripts <https://docs.platformio.org/en/latest/projectconf/advanced_scripting.html>`__ – use the power of a real programming language to solve build problems
|
||||
* Built-in reliable and automatic dependency analysis
|
||||
* Improved support for parallel builds
|
||||
* Ability to `share built files in a cache <https://docs.platformio.org/page/projectconf/section_platformio.html#projectconf-pio-build-cache-dir>`__ to speed up multiple builds
|
||||
* Ability to `share built files in a cache <https://docs.platformio.org/en/latest/projectconf/section_platformio.html#projectconf-pio-build-cache-dir>`__ to speed up multiple builds
|
||||
|
||||
- New `Custom Targets <https://docs.platformio.org/page/projectconf/advanced_scripting.html#custom-targets>`__
|
||||
- New `Custom Targets <https://docs.platformio.org/en/latest/projectconf/advanced_scripting.html#custom-targets>`__
|
||||
|
||||
* Pre/Post processing based on dependent sources (another target, source file, etc.)
|
||||
* Command launcher with own arguments
|
||||
* Launch command with custom options declared in `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__
|
||||
* Launch command with custom options declared in `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__
|
||||
* Python callback as a target (use the power of Python interpreter and PlatformIO Build API)
|
||||
* List available project targets (including dev-platform specific and custom targets) with a new `pio run --list-targets <https://docs.platformio.org/page/core/userguide/cmd_run.html#cmdoption-platformio-run-list-targets>`__ command (`issue #3544 <https://github.com/platformio/platformio-core/issues/3544>`_)
|
||||
* List available project targets (including dev-platform specific and custom targets) with a new `pio run --list-targets <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-platformio-run-list-targets>`__ command (`issue #3544 <https://github.com/platformio/platformio-core/issues/3544>`_)
|
||||
|
||||
- Enable "cyclic reference" for GCC linker only for the embedded dev-platforms (`issue #3570 <https://github.com/platformio/platformio-core/issues/3570>`_)
|
||||
- Automatically enable LDF dependency `chain+ mode (evaluates C/C++ Preprocessor conditional syntax) <https://docs.platformio.org/page/librarymanager/ldf.html#dependency-finder-mode>`__ for Arduino library when "library.property" has "depends" field (`issue #3607 <https://github.com/platformio/platformio-core/issues/3607>`_)
|
||||
- Automatically enable LDF dependency `chain+ mode (evaluates C/C++ Preprocessor conditional syntax) <https://docs.platformio.org/en/latest/librarymanager/ldf.html#dependency-finder-mode>`__ for Arduino library when "library.property" has "depends" field (`issue #3607 <https://github.com/platformio/platformio-core/issues/3607>`_)
|
||||
- Fixed an issue with improper processing of source files added via multiple Build Middlewares (`issue #3531 <https://github.com/platformio/platformio-core/issues/3531>`_)
|
||||
- Fixed an issue with the ``clean`` target on Windows when project and build directories are located on different logical drives (`issue #3542 <https://github.com/platformio/platformio-core/issues/3542>`_)
|
||||
|
||||
* **Project Management**
|
||||
|
||||
- Added support for "globstar/`**`" (recursive) pattern for the different commands and configuration options (`pio ci <https://docs.platformio.org/page/core/userguide/cmd_ci.html>`__, `src_filter <https://docs.platformio.org/page/projectconf/section_env_build.html#src-filter>`__, `check_patterns <https://docs.platformio.org/page/projectconf/section_env_check.html#check-patterns>`__, `library.json > srcFilter <https://docs.platformio.org/page/librarymanager/config.html#srcfilter>`__). Python 3.5+ is required
|
||||
- Added a new ``-e, --environment`` option to `pio project init <https://docs.platformio.org/page/core/userguide/project/cmd_init.html#cmdoption-platformio-project-init-e>`__ command that helps to update a PlatformIO project using the existing environment
|
||||
- Dump build system data intended for IDE extensions/plugins using a new `pio project data <https://docs.platformio.org/page/core/userguide/project/cmd_data.html>`__ command
|
||||
- Added support for "globstar/`**`" (recursive) pattern for the different commands and configuration options (`pio ci <https://docs.platformio.org/en/latest/core/userguide/cmd_ci.html>`__, `src_filter <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#src-filter>`__, `check_patterns <https://docs.platformio.org/en/latest/projectconf/section_env_check.html#check-patterns>`__, `library.json > srcFilter <https://docs.platformio.org/en/latest/librarymanager/config.html#srcfilter>`__). Python 3.5+ is required
|
||||
- Added a new ``-e, --environment`` option to `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html#cmdoption-platformio-project-init-e>`__ command that helps to update a PlatformIO project using the existing environment
|
||||
- Dump build system data intended for IDE extensions/plugins using a new `pio project data <https://docs.platformio.org/en/latest/core/userguide/project/cmd_data.html>`__ command
|
||||
- Do not generate ".travis.yml" for a new project, let the user have a choice
|
||||
|
||||
* **Unit Testing**
|
||||
@@ -180,43 +283,43 @@ Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/
|
||||
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.1 with a new "soundy" analysis option and improved code parser
|
||||
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.09 with a new file list analysis mode and an extended list of analysis diagnostics
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.1 with a new "soundy" analysis option and improved code parser
|
||||
* `PVS-Studio <https://docs.platformio.org/en/latest/plus/check-tools/pvs-studio.html>`__ v7.09 with a new file list analysis mode and an extended list of analysis diagnostics
|
||||
|
||||
- Added Cppcheck package for ARM-based single-board computers (`issue #3559 <https://github.com/platformio/platformio-core/issues/3559>`_)
|
||||
- Fixed an issue with PIO Check when a defect with a multiline error message is not reported in verbose mode (`issue #3631 <https://github.com/platformio/platformio-core/issues/3631>`_)
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Display system-wide information using a new `pio system info <https://docs.platformio.org/page/core/userguide/system/cmd_info.html>`__ command (`issue #3521 <https://github.com/platformio/platformio-core/issues/3521>`_)
|
||||
- Remove unused data using a new `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command (`issue #3522 <https://github.com/platformio/platformio-core/issues/3522>`_)
|
||||
- Display system-wide information using a new `pio system info <https://docs.platformio.org/en/latest/core/userguide/system/cmd_info.html>`__ command (`issue #3521 <https://github.com/platformio/platformio-core/issues/3521>`_)
|
||||
- Remove unused data using a new `pio system prune <https://docs.platformio.org/en/latest/core/userguide/system/cmd_prune.html>`__ command (`issue #3522 <https://github.com/platformio/platformio-core/issues/3522>`_)
|
||||
- Show ignored project environments only in the verbose mode (`issue #3641 <https://github.com/platformio/platformio-core/issues/3641>`_)
|
||||
- Do not escape compiler arguments in VSCode template on Windows
|
||||
- Drop support for Python 2 and 3.5.
|
||||
- Drop support for Python 2 and 3.5
|
||||
|
||||
.. _release_notes_4:
|
||||
|
||||
PlatformIO Core 4
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 4.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-4>`__.
|
||||
See `PlatformIO Core 4.0 history <https://github.com/platformio/platformio-core/blob/v4.3.4/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core 3
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 3.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-3>`__.
|
||||
See `PlatformIO Core 3.0 history <https://github.com/platformio/platformio-core/blob/v3.6.7/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core 2
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 2.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-2>`__.
|
||||
See `PlatformIO Core 2.0 history <https://github.com/platformio/platformio-core/blob/v2.11.2/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core 1
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 1.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-1>`__.
|
||||
See `PlatformIO Core 1.0 history <https://github.com/platformio/platformio-core/blob/v1.5.0/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core Preview
|
||||
-----------------------
|
||||
|
||||
See `PlatformIO Core Preview history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-preview>`__.
|
||||
See `PlatformIO Core Preview history <https://github.com/platformio/platformio-core/blob/v0.10.2/HISTORY.rst>`__.
|
||||
|
||||
6
Makefile
6
Makefile
@@ -1,14 +1,14 @@
|
||||
lint:
|
||||
pylint -j 6 --rcfile=./.pylintrc ./platformio
|
||||
pylint -j 6 --rcfile=./.pylintrc ./tests
|
||||
pylint -j 6 --rcfile=./.pylintrc ./platformio
|
||||
|
||||
isort:
|
||||
isort ./platformio
|
||||
isort ./tests
|
||||
|
||||
format:
|
||||
black --target-version py27 ./platformio
|
||||
black --target-version py27 ./tests
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
test:
|
||||
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
74
README.rst
74
README.rst
@@ -1,5 +1,5 @@
|
||||
PlatformIO
|
||||
==========
|
||||
PlatformIO Core
|
||||
===============
|
||||
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
|
||||
:target: https://docs.platformio.org/page/core/index.html
|
||||
@@ -17,11 +17,12 @@ PlatformIO
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
|
||||
:alt: Community Labs
|
||||
:alt: PlatformIO Labs
|
||||
:target: https://piolabs.com/?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
**Quick Links:** `Homepage <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Registry <https://registry.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
@@ -43,7 +44,7 @@ PlatformIO
|
||||
* Cross-platform IDE and Unified Debugger
|
||||
* Static Code Analyzer and Remote Unit Testing
|
||||
* Multi-platform and Multi-architecture Build System
|
||||
* Firmware File Explorer and Memory Inspection.
|
||||
* Firmware File Explorer and Memory Inspection
|
||||
|
||||
Get Started
|
||||
-----------
|
||||
@@ -70,66 +71,9 @@ Solutions
|
||||
Registry
|
||||
--------
|
||||
|
||||
* `Libraries <https://platformio.org/lib?utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://platformio.org/platforms?utm_source=github&utm_medium=core>`_
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
* `Aceinna IMU <https://platformio.org/platforms/aceinna_imu?utm_source=github&utm_medium=core>`_
|
||||
* `ASR Microelectronics ASR605x <https://platformio.org/platforms/asrmicro650x?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte K210 <https://platformio.org/platforms/kendryte210?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `Nuclei <https://platformio.org/platforms/nuclei?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V GAP <https://platformio.org/platforms/riscv_gap?utm_source=github&utm_medium=core>`_
|
||||
* `Shakti <https://platformio.org/platforms/shakti?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM8 <https://platformio.org/platforms/ststm8?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
|
||||
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
|
||||
* `WIZNet W7500 <https://platformio.org/platforms/wiznet7500?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Frameworks
|
||||
----------
|
||||
|
||||
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
|
||||
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
|
||||
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Freedom E SDK <https://platformio.org/frameworks/freedom-e-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `GigaDevice GD32V SDK <https://platformio.org/frameworks/gd32vf103-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte Standalone SDK <https://platformio.org/frameworks/kendryte-standalone-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte FreeRTOS SDK <https://platformio.org/frameworks/kendryte-freertos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
|
||||
* `Mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
|
||||
* `Nuclei SDK <https://platformio.org/frameworks/nuclei-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `PULP OS <https://platformio.org/frameworks/pulp-os?utm_source=github&utm_medium=core>`_
|
||||
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
|
||||
* `Shakti SDK <https://platformio.org/frameworks/shakti-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
|
||||
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
|
||||
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
|
||||
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
|
||||
* `Zephyr <https://platformio.org/frameworks/zephyr?utm_source=github&utm_medium=core>`_
|
||||
* `Libraries <https://registry.platformio.org/search?t=library&utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://registry.platformio.org/search?t=platform&utm_source=github&utm_medium=core>`_
|
||||
* `Development Tools <https://registry.platformio.org/search?t=tool&utm_source=github&utm_medium=core>`_
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: 3293903cac...bbf4d27508
2
examples
2
examples
Submodule examples updated: a0631a8b07...dcafbd192e
@@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (5, 1, 1)
|
||||
VERSION = (5, 2, 5)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -47,13 +47,13 @@ __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.3.4",
|
||||
"contrib-piohome": "~3.4.1",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-unity": "~1.20500.0",
|
||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40100.2",
|
||||
"tool-cppcheck": "~1.230.0",
|
||||
"tool-clangtidy": "~1.100000.0",
|
||||
"tool-pvs-studio": "~7.11.0",
|
||||
"tool-scons": "~4.40300.0",
|
||||
"tool-cppcheck": "~1.260.0",
|
||||
"tool-clangtidy": "~1.120001.0",
|
||||
"tool-pvs-studio": "~7.14.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
|
||||
@@ -12,22 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
|
||||
import os
|
||||
import sys
|
||||
from traceback import format_exc
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, maintenance, util
|
||||
from platformio import __version__, exception
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.compat import CYGWIN
|
||||
|
||||
try:
|
||||
import click_completion # pylint: disable=import-error
|
||||
|
||||
click_completion.init()
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
from platformio.compat import IS_CYGWIN, ensure_python3
|
||||
|
||||
|
||||
@click.command(
|
||||
@@ -60,18 +55,34 @@ def cli(ctx, force, caller, no_ansi):
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
from platformio import maintenance
|
||||
|
||||
maintenance.on_platformio_start(ctx, force, caller)
|
||||
|
||||
|
||||
@cli.resultcallback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, *_, **__):
|
||||
try:
|
||||
|
||||
@cli.result_callback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, *_, **__):
|
||||
_process_result(ctx, result)
|
||||
|
||||
except (AttributeError, TypeError): # legacy support for CLick > 8.0.1
|
||||
|
||||
@cli.resultcallback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, *_, **__):
|
||||
_process_result(ctx, result)
|
||||
|
||||
|
||||
def _process_result(ctx, result):
|
||||
from platformio import maintenance
|
||||
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def configure():
|
||||
if CYGWIN:
|
||||
if IS_CYGWIN:
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
@@ -105,6 +116,7 @@ def main(argv=None):
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
try:
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
cli() # pylint: disable=no-value-for-parameter
|
||||
except SystemExit as e:
|
||||
@@ -112,7 +124,10 @@ def main(argv=None):
|
||||
exit_code = int(e.code)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
if sys.version_info.major != 2:
|
||||
from platformio import maintenance
|
||||
|
||||
maintenance.on_platformio_exception(e)
|
||||
error_str = "Error: "
|
||||
if isinstance(e, exception.PlatformioException):
|
||||
error_str += str(e)
|
||||
|
||||
@@ -21,17 +21,17 @@ import os
|
||||
import platform
|
||||
import socket
|
||||
import uuid
|
||||
from os.path import dirname, isdir, isfile, join, realpath
|
||||
|
||||
from platformio import __version__, exception, fs, proc
|
||||
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_default_projects_dir
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
assert isdir(projects_dir)
|
||||
return realpath(projects_dir)
|
||||
assert os.path.isdir(projects_dir)
|
||||
return os.path.abspath(projects_dir)
|
||||
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
@@ -64,7 +64,7 @@ DEFAULT_SETTINGS = {
|
||||
"value": True,
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"description": ("Telemetry service <https://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"value": True,
|
||||
},
|
||||
"force_verbose": {
|
||||
@@ -91,7 +91,10 @@ class State(object):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
self.path = join(get_project_core_dir(), "appstate.json")
|
||||
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
|
||||
if not os.path.isdir(core_dir):
|
||||
os.makedirs(core_dir)
|
||||
self.path = os.path.join(core_dir, "appstate.json")
|
||||
self._storage = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
@@ -99,7 +102,7 @@ class State(object):
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._lock_state_file()
|
||||
if isfile(self.path):
|
||||
if os.path.isfile(self.path):
|
||||
self._storage = fs.load_json(self.path)
|
||||
assert isinstance(self._storage, dict)
|
||||
except (
|
||||
@@ -114,10 +117,10 @@ class State(object):
|
||||
def __exit__(self, type_, value, traceback):
|
||||
if self.modified:
|
||||
try:
|
||||
with open(self.path, "w") as fp:
|
||||
fp.write(dump_json_to_unicode(self._storage))
|
||||
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(json.dumps(self._storage))
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path))
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
@@ -127,7 +130,7 @@ class State(object):
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path))
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
@@ -250,34 +253,19 @@ def is_disabled_progressbar():
|
||||
|
||||
|
||||
def get_cid():
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.clients.http import fetch_remote_content
|
||||
|
||||
cid = get_state_item("cid")
|
||||
if cid:
|
||||
return cid
|
||||
uid = None
|
||||
if os.getenv("C9_UID"):
|
||||
uid = os.getenv("C9_UID")
|
||||
if os.getenv("GITHUB_USER"):
|
||||
uid = os.getenv("GITHUB_USER")
|
||||
elif os.getenv("GITPOD_GIT_USER_NAME"):
|
||||
uid = os.getenv("GITPOD_GIT_USER_NAME")
|
||||
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
uid = json.loads(
|
||||
fetch_remote_content(
|
||||
"{api}/user?token={token}".format(
|
||||
api=os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")),
|
||||
token=os.getenv("USER_TOKEN"),
|
||||
)
|
||||
)
|
||||
).get("id")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
if not uid:
|
||||
uid = uuid.getnode()
|
||||
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
||||
cid = str(cid)
|
||||
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
return cid
|
||||
|
||||
|
||||
@@ -12,9 +12,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from os import environ, makedirs
|
||||
from os.path import isdir, join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@@ -29,7 +29,6 @@ from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import compat, fs
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
@@ -65,18 +64,18 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
"pioide",
|
||||
"piosize",
|
||||
],
|
||||
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
|
||||
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
|
||||
variables=clivars,
|
||||
# Propagating External Environment
|
||||
ENV=environ,
|
||||
ENV=os.environ,
|
||||
UNIX_TIME=int(time()),
|
||||
BUILD_DIR=join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||
BUILD_SRC_DIR=join("$BUILD_DIR", "src"),
|
||||
BUILD_TEST_DIR=join("$BUILD_DIR", "test"),
|
||||
COMPILATIONDB_PATH=join("$BUILD_DIR", "compile_commands.json"),
|
||||
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
||||
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
||||
COMPILATIONDB_PATH=os.path.join("$BUILD_DIR", "compile_commands.json"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROG_PATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
@@ -110,65 +109,70 @@ env.Replace(
|
||||
config = env.GetProjectConfig()
|
||||
env.Replace(
|
||||
PROJECT_DIR=get_project_dir(),
|
||||
PROJECT_CORE_DIR=config.get_optional_dir("core"),
|
||||
PROJECT_PACKAGES_DIR=config.get_optional_dir("packages"),
|
||||
PROJECT_WORKSPACE_DIR=config.get_optional_dir("workspace"),
|
||||
PROJECT_LIBDEPS_DIR=config.get_optional_dir("libdeps"),
|
||||
PROJECT_INCLUDE_DIR=config.get_optional_dir("include"),
|
||||
PROJECT_SRC_DIR=config.get_optional_dir("src"),
|
||||
PROJECTSRC_DIR=config.get_optional_dir("src"), # legacy for dev/platform
|
||||
PROJECT_TEST_DIR=config.get_optional_dir("test"),
|
||||
PROJECT_DATA_DIR=config.get_optional_dir("data"),
|
||||
PROJECTDATA_DIR=config.get_optional_dir("data"), # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get_optional_dir("build"),
|
||||
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
|
||||
PROJECT_CORE_DIR=config.get("platformio", "core_dir"),
|
||||
PROJECT_PACKAGES_DIR=config.get("platformio", "packages_dir"),
|
||||
PROJECT_WORKSPACE_DIR=config.get("platformio", "workspace_dir"),
|
||||
PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"),
|
||||
PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"),
|
||||
PROJECT_SRC_DIR=config.get("platformio", "src_dir"),
|
||||
PROJECTSRC_DIR="$PROJECT_SRC_DIR", # legacy for dev/platform
|
||||
PROJECT_TEST_DIR=config.get("platformio", "test_dir"),
|
||||
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
|
||||
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
|
||||
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
|
||||
LIBSOURCE_DIRS=[
|
||||
config.get_optional_dir("lib"),
|
||||
join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||
config.get_optional_dir("globallib"),
|
||||
config.get("platformio", "lib_dir"),
|
||||
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||
config.get("platformio", "globallib_dir"),
|
||||
],
|
||||
)
|
||||
|
||||
if (
|
||||
compat.WINDOWS
|
||||
and sys.version_info >= (3, 8)
|
||||
and env["PROJECT_DIR"].startswith("\\\\")
|
||||
):
|
||||
click.secho(
|
||||
"There is a known issue with Python 3.8+ and mapped network drives on "
|
||||
"Windows.\nPlease downgrade Python to the latest 3.7. More details at:\n"
|
||||
"https://github.com/platformio/platformio-core/issues/3417",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if env.GetOption("clean"):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
if compat.IS_WINDOWS and sys.version_info >= (3, 8) and os.getcwd().startswith("\\\\"):
|
||||
click.secho("!!! WARNING !!!\t\t" * 3, fg="red")
|
||||
click.secho(
|
||||
"Your project is located on a mapped network drive but the "
|
||||
"current command-line shell does not support the UNC paths.",
|
||||
fg="yellow",
|
||||
)
|
||||
click.secho(
|
||||
"Please move your project to a physical drive or check this workaround: "
|
||||
"https://bit.ly/3kuU5mP\n",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
is_clean_all = "cleanall" in COMMAND_LINE_TARGETS
|
||||
if env.GetOption("clean") or is_clean_all:
|
||||
env.PioClean(is_clean_all)
|
||||
env.Exit(0)
|
||||
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not isdir(env.subst("$BUILD_DIR")):
|
||||
makedirs(env.subst("$BUILD_DIR"))
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(
|
||||
join("$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]))
|
||||
os.path.join(
|
||||
"$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1])
|
||||
)
|
||||
)
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
@@ -209,7 +213,7 @@ env.AddPreAction(
|
||||
),
|
||||
)
|
||||
|
||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
##############################################################################
|
||||
@@ -218,17 +222,20 @@ if "envdump" in COMMAND_LINE_TARGETS:
|
||||
click.echo(env.Dump())
|
||||
env.Exit(0)
|
||||
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS):
|
||||
try:
|
||||
Import("projenv")
|
||||
except: # pylint: disable=bare-except
|
||||
projenv = env
|
||||
click.echo(
|
||||
"\n%s\n"
|
||||
% dump_json_to_unicode(
|
||||
projenv.DumpIDEData(env) # pylint: disable=undefined-variable
|
||||
)
|
||||
)
|
||||
data = projenv.DumpIDEData(env)
|
||||
# dump to file for the further reading by project.helpers.load_project_ide_data
|
||||
with open(
|
||||
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
|
||||
mode="w",
|
||||
encoding="utf8",
|
||||
) as fp:
|
||||
json.dump(data, fp)
|
||||
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
|
||||
env.Exit(0)
|
||||
|
||||
if "sizedata" in COMMAND_LINE_TARGETS:
|
||||
|
||||
@@ -58,7 +58,7 @@ class __CompilationDbNode(SCons.Node.Python.Value):
|
||||
|
||||
|
||||
def changed_since_last_build_node(*args, **kwargs):
|
||||
""" Dummy decider to force always building"""
|
||||
"""Dummy decider to force always building"""
|
||||
return True
|
||||
|
||||
|
||||
@@ -152,7 +152,7 @@ def WriteCompilationDb(target, source, env):
|
||||
item["file"] = os.path.abspath(item["file"])
|
||||
entries.append(item)
|
||||
|
||||
with open(str(target[0]), "w") as target_file:
|
||||
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
|
||||
json.dump(
|
||||
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
||||
)
|
||||
|
||||
@@ -14,13 +14,12 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os
|
||||
from glob import glob
|
||||
|
||||
import SCons.Defaults # pylint: disable=import-error
|
||||
import SCons.Subst # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import exec_command, where_is_program
|
||||
|
||||
@@ -33,14 +32,14 @@ def _dump_includes(env):
|
||||
env.subst("$PROJECT_SRC_DIR"),
|
||||
]
|
||||
includes["build"].extend(
|
||||
[os.path.realpath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
|
||||
# installed libs
|
||||
includes["compatlib"] = []
|
||||
for lb in env.GetLibBuilders():
|
||||
includes["compatlib"].extend(
|
||||
[os.path.realpath(inc) for inc in lb.get_include_dirs()]
|
||||
[os.path.abspath(inc) for inc in lb.get_include_dirs()]
|
||||
)
|
||||
|
||||
# includes from toolchains
|
||||
@@ -49,7 +48,7 @@ def _dump_includes(env):
|
||||
for pkg in p.get_installed_packages():
|
||||
if p.get_package_type(pkg.metadata.name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = glob_escape(pkg.path)
|
||||
toolchain_dir = glob.escape(pkg.path)
|
||||
toolchain_incglobs = [
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
@@ -57,12 +56,12 @@ def _dump_includes(env):
|
||||
os.path.join(toolchain_dir, "*", "include*"),
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
|
||||
includes["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)])
|
||||
|
||||
# include Unity framework if there are tests in project
|
||||
includes["unity"] = []
|
||||
auto_install_unity = False
|
||||
test_dir = env.GetProjectConfig().get_optional_dir("test")
|
||||
test_dir = env.GetProjectConfig().get("platformio", "test_dir")
|
||||
if os.path.isdir(test_dir) and os.listdir(test_dir) != ["README"]:
|
||||
auto_install_unity = True
|
||||
unity_dir = get_core_package_dir(
|
||||
@@ -131,7 +130,7 @@ def _dump_defines(env):
|
||||
def _get_svd_path(env):
|
||||
svd_path = env.GetProjectOption("debug_svd_path")
|
||||
if svd_path:
|
||||
return os.path.realpath(svd_path)
|
||||
return os.path.abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
@@ -146,7 +145,7 @@ def _get_svd_path(env):
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return os.path.realpath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return os.path.abspath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
@@ -156,7 +155,7 @@ def _subst_cmd(env, cmd):
|
||||
|
||||
|
||||
def DumpIDEData(env, globalenv):
|
||||
""" env here is `projenv`"""
|
||||
"""env here is `projenv`"""
|
||||
|
||||
data = {
|
||||
"env_name": env["PIOENV"],
|
||||
|
||||
@@ -32,8 +32,8 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.clients.http import InternetIsOffline
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.clients.http import HTTPClientError, InternetIsOffline
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manifest.parser import (
|
||||
@@ -59,6 +59,16 @@ class LibBuilderFactory(object):
|
||||
clsname = "%sLibBuilder" % used_frameworks[0].title()
|
||||
|
||||
obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose)
|
||||
|
||||
# Handle PlatformIOLibBuilder.manifest.build.builder
|
||||
# pylint: disable=protected-access
|
||||
if isinstance(obj, PlatformIOLibBuilder) and obj._manifest.get("build", {}).get(
|
||||
"builder"
|
||||
):
|
||||
obj = getattr(
|
||||
sys.modules[__name__], obj._manifest.get("build", {}).get("builder")
|
||||
)(env, path, verbose=verbose)
|
||||
|
||||
assert isinstance(obj, LibBuilderBase)
|
||||
return obj
|
||||
|
||||
@@ -86,7 +96,9 @@ class LibBuilderFactory(object):
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||
):
|
||||
continue
|
||||
with io.open(os.path.join(root, fname), errors="ignore") as fp:
|
||||
with io.open(
|
||||
os.path.join(root, fname), encoding="utf8", errors="ignore"
|
||||
) as fp:
|
||||
content = fp.read()
|
||||
if not content:
|
||||
continue
|
||||
@@ -113,7 +125,7 @@ class LibBuilderBase(object):
|
||||
def __init__(self, env, path, manifest=None, verbose=False):
|
||||
self.env = env.Clone()
|
||||
self.envorigin = env.Clone()
|
||||
self.path = os.path.realpath(env.subst(path))
|
||||
self.path = os.path.abspath(env.subst(path))
|
||||
self.verbose = verbose
|
||||
|
||||
try:
|
||||
@@ -126,9 +138,9 @@ class LibBuilderBase(object):
|
||||
|
||||
self._is_dependent = False
|
||||
self._is_built = False
|
||||
self._depbuilders = list()
|
||||
self._circular_deps = list()
|
||||
self._processed_files = list()
|
||||
self._depbuilders = []
|
||||
self._circular_deps = []
|
||||
self._processed_files = []
|
||||
|
||||
# reset source filter, could be overridden with extra script
|
||||
self.env["SRC_FILTER"] = ""
|
||||
@@ -142,7 +154,7 @@ class LibBuilderBase(object):
|
||||
def __contains__(self, path):
|
||||
p1 = self.path
|
||||
p2 = path
|
||||
if WINDOWS:
|
||||
if IS_WINDOWS:
|
||||
p1 = p1.lower()
|
||||
p2 = p2.lower()
|
||||
if p1 == p2:
|
||||
@@ -172,19 +184,19 @@ class LibBuilderBase(object):
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if not all(
|
||||
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
|
||||
):
|
||||
return None
|
||||
return os.path.join(self.path, "include")
|
||||
for name in ("include", "Include"):
|
||||
d = os.path.join(self.path, name)
|
||||
if os.path.isdir(d):
|
||||
return d
|
||||
return None
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
return (
|
||||
os.path.join(self.path, "src")
|
||||
if os.path.isdir(os.path.join(self.path, "src"))
|
||||
else self.path
|
||||
)
|
||||
for name in ("src", "Src"):
|
||||
d = os.path.join(self.path, name)
|
||||
if os.path.isdir(d):
|
||||
return d
|
||||
return self.path
|
||||
|
||||
def get_include_dirs(self):
|
||||
items = []
|
||||
@@ -278,7 +290,7 @@ class LibBuilderBase(object):
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
self.env.SConscript(
|
||||
os.path.realpath(self.extra_script),
|
||||
os.path.abspath(self.extra_script),
|
||||
exports={"env": self.env, "pio_lib_builder": self},
|
||||
)
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
@@ -459,12 +471,22 @@ class LibBuilderBase(object):
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
|
||||
if self.lib_archive:
|
||||
libs.append(
|
||||
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
|
||||
do_not_archive = not self.lib_archive
|
||||
if not do_not_archive:
|
||||
nodes = self.env.CollectBuildFiles(
|
||||
self.build_dir, self.src_dir, self.src_filter
|
||||
)
|
||||
else:
|
||||
if nodes:
|
||||
libs.append(
|
||||
self.env.BuildLibrary(
|
||||
self.build_dir, self.src_dir, self.src_filter, nodes
|
||||
)
|
||||
)
|
||||
else:
|
||||
do_not_archive = True
|
||||
if do_not_archive:
|
||||
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
|
||||
|
||||
return libs
|
||||
|
||||
|
||||
@@ -479,6 +501,14 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
return {}
|
||||
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if not all(
|
||||
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
|
||||
):
|
||||
return None
|
||||
return os.path.join(self.path, "include")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
if os.path.isdir(os.path.join(self.path, "src")):
|
||||
@@ -545,6 +575,24 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
def is_platforms_compatible(self, platforms):
|
||||
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
||||
|
||||
@property
|
||||
def build_flags(self):
|
||||
ldflags = [
|
||||
LibBuilderBase.build_flags.fget(self), # pylint: disable=no-member
|
||||
self._manifest.get("ldflags"),
|
||||
]
|
||||
if self._manifest.get("precompiled") in ("true", "full"):
|
||||
# add to LDPATH {build.mcu} folder
|
||||
board_config = self.env.BoardConfig()
|
||||
for key in ("build.mcu", "build.cpu"):
|
||||
libpath = os.path.join(self.src_dir, board_config.get(key, ""))
|
||||
if not os.path.isdir(libpath):
|
||||
continue
|
||||
self.env.PrependUnique(LIBPATH=libpath)
|
||||
break
|
||||
ldflags = [flag for flag in ldflags if flag] # remove empty
|
||||
return " ".join(ldflags) if ldflags else None
|
||||
|
||||
|
||||
class MbedLibBuilder(LibBuilderBase):
|
||||
def load_manifest(self):
|
||||
@@ -553,12 +601,6 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
return {}
|
||||
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if os.path.isdir(os.path.join(self.path, "include")):
|
||||
return os.path.join(self.path, "include")
|
||||
return None
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if os.path.isdir(os.path.join(self.path, "source")):
|
||||
@@ -671,7 +713,7 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
|
||||
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
||||
lines = []
|
||||
with open(mbed_config_path) as fp:
|
||||
with open(mbed_config_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if line == "#endif":
|
||||
@@ -690,7 +732,7 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
if len(tokens) < 2 or tokens[1] not in macros:
|
||||
lines.append(line)
|
||||
lines.append("")
|
||||
with open(mbed_config_path, "w") as fp:
|
||||
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(lines))
|
||||
|
||||
|
||||
@@ -708,14 +750,14 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
def include_dir(self):
|
||||
if "includeDir" in self._manifest.get("build", {}):
|
||||
with fs.cd(self.path):
|
||||
return os.path.realpath(self._manifest.get("build").get("includeDir"))
|
||||
return os.path.abspath(self._manifest.get("build").get("includeDir"))
|
||||
return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if "srcDir" in self._manifest.get("build", {}):
|
||||
with fs.cd(self.path):
|
||||
return os.path.realpath(self._manifest.get("build").get("srcDir"))
|
||||
return os.path.abspath(self._manifest.get("build").get("srcDir"))
|
||||
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
|
||||
|
||||
@property
|
||||
@@ -897,7 +939,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
try:
|
||||
lm.install(spec)
|
||||
did_install = True
|
||||
except (UnknownPackageError, InternetIsOffline) as e:
|
||||
except (HTTPClientError, UnknownPackageError, InternetIsOffline) as e:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
@@ -982,7 +1024,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
found_incompat = False
|
||||
|
||||
for storage_dir in env.GetLibSourceDirs():
|
||||
storage_dir = os.path.realpath(storage_dir)
|
||||
storage_dir = os.path.abspath(storage_dir)
|
||||
if not os.path.isdir(storage_dir):
|
||||
continue
|
||||
for item in sorted(os.listdir(storage_dir)):
|
||||
@@ -1053,7 +1095,7 @@ def ConfigureProjectLibBuilder(env):
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project) # pylint: disable=no-member
|
||||
|
||||
click.echo("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
||||
click.echo("LDF: Library Dependency Finder -> https://bit.ly/configure-pio-ldf")
|
||||
click.echo(
|
||||
"LDF Modes: Finder ~ %s, Compatibility ~ %s"
|
||||
% (ldf_mode, project.lib_compat_mode)
|
||||
|
||||
@@ -21,20 +21,20 @@ import re
|
||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8192
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8192 if WINDOWS else 131072) - 512
|
||||
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
||||
def tempfile_arg_esc_func(arg):
|
||||
arg = quote_spaces(arg)
|
||||
if not WINDOWS:
|
||||
if not IS_WINDOWS:
|
||||
return arg
|
||||
# GCC requires double Windows slashes, let's use UNIX separator
|
||||
return WINPATHSEP_RE.sub(r"/\1", arg)
|
||||
@@ -65,7 +65,7 @@ def _file_long_data(env, data):
|
||||
)
|
||||
if os.path.isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
@@ -15,16 +15,17 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from tempfile import mkstemp
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding, glob_escape
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import exec_command
|
||||
|
||||
@@ -116,7 +117,7 @@ class InoToCPPConverter(object):
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = mkstemp()[1]
|
||||
tmp_path = tempfile.mkstemp()[1]
|
||||
self.write_safe_contents(tmp_path, contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
@@ -229,7 +230,7 @@ class InoToCPPConverter(object):
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
|
||||
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
|
||||
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
||||
os.path.join(src_dir, "*.pde")
|
||||
)
|
||||
@@ -333,7 +334,13 @@ def ConfigureDebugFlags(env):
|
||||
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
|
||||
_cleanup_debug_flags(scope)
|
||||
|
||||
debug_flags = env.ParseFlags(env.GetProjectOption("debug_build_flags"))
|
||||
debug_flags = env.ParseFlags(
|
||||
env.get("PIODEBUGFLAGS")
|
||||
if env.get("PIODEBUGFLAGS")
|
||||
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
|
||||
else env.GetProjectOption("debug_build_flags")
|
||||
)
|
||||
|
||||
env.MergeFlags(debug_flags)
|
||||
optimization_flags = [
|
||||
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
|
||||
@@ -369,7 +376,7 @@ def GetExtraScripts(env, scope):
|
||||
if not items:
|
||||
return items
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
return [os.path.realpath(item) for item in items]
|
||||
return [os.path.abspath(item) for item in items]
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
||||
@@ -21,7 +21,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.version import get_original_version
|
||||
from platformio.platform.exception import UnknownBoard
|
||||
@@ -71,7 +71,6 @@ def LoadPioPlatform(env):
|
||||
env["PIOPLATFORM"] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for pkg in p.get_installed_packages():
|
||||
type_ = p.get_package_type(pkg.metadata.name)
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
@@ -83,12 +82,12 @@ def LoadPioPlatform(env):
|
||||
else pkg.path,
|
||||
)
|
||||
if (
|
||||
not WINDOWS
|
||||
not IS_WINDOWS
|
||||
and os.path.isdir(os.path.join(pkg.path, "lib"))
|
||||
and type_ != "toolchain"
|
||||
):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
|
||||
os.path.join(pkg.path, "lib"),
|
||||
)
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import environ, makedirs, remove
|
||||
from os.path import isdir, join, splitdrive
|
||||
@@ -23,9 +24,8 @@ from os.path import isdir, join, splitdrive
|
||||
from elftools.elf.descriptions import describe_sh_flags
|
||||
from elftools.elf.elffile import ELFFile
|
||||
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import exec_command
|
||||
from platformio.util import get_systype
|
||||
|
||||
|
||||
def _run_tool(cmd, env, tool_args):
|
||||
@@ -37,7 +37,7 @@ def _run_tool(cmd, env, tool_args):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
||||
|
||||
with open(tmp_file, "w") as fp:
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(tool_args))
|
||||
|
||||
cmd.append("@" + tmp_file)
|
||||
@@ -164,7 +164,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
location = symbol_locations.get(hex(symbol["addr"]))
|
||||
if not location or "?" in location:
|
||||
continue
|
||||
if "windows" in get_systype():
|
||||
if IS_WINDOWS:
|
||||
drive, tail = splitdrive(location)
|
||||
location = join(drive.upper(), tail)
|
||||
symbol["file"] = location
|
||||
@@ -220,7 +220,7 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
"sections": sections,
|
||||
}
|
||||
|
||||
files = dict()
|
||||
files = {}
|
||||
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
file_path = symbol.get("file") or "unknown"
|
||||
if not files.get(file_path, {}):
|
||||
@@ -235,14 +235,16 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
|
||||
files[file_path]["symbols"].append(symbol)
|
||||
|
||||
data["memory"]["files"] = list()
|
||||
data["memory"]["files"] = []
|
||||
for k, v in files.items():
|
||||
file_data = {"path": k}
|
||||
file_data.update(v)
|
||||
data["memory"]["files"].append(file_data)
|
||||
|
||||
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
|
||||
fp.write(dump_json_to_unicode(data))
|
||||
with open(
|
||||
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
|
||||
) as fp:
|
||||
fp.write(json.dumps(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
||||
@@ -29,9 +29,9 @@ def VerboseAction(_, act, actstr):
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def PioClean(env, clean_dir):
|
||||
def PioClean(env, clean_all=False):
|
||||
def _relpath(path):
|
||||
if compat.WINDOWS:
|
||||
if compat.IS_WINDOWS:
|
||||
prefix = os.getcwd()[:2].lower()
|
||||
if (
|
||||
":" not in prefix
|
||||
@@ -41,21 +41,30 @@ def PioClean(env, clean_dir):
|
||||
return path
|
||||
return os.path.relpath(path)
|
||||
|
||||
if not os.path.isdir(clean_dir):
|
||||
def _clean_dir(path):
|
||||
clean_rel_path = _relpath(path)
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
dst = os.path.join(root, f)
|
||||
os.remove(dst)
|
||||
print(
|
||||
"Removed %s"
|
||||
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
|
||||
)
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
libdeps_dir = env.subst("$PROJECT_LIBDEPS_DIR")
|
||||
if os.path.isdir(build_dir):
|
||||
_clean_dir(build_dir)
|
||||
fs.rmtree(build_dir)
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
env.Exit(0)
|
||||
clean_rel_path = _relpath(clean_dir)
|
||||
for root, _, files in os.walk(clean_dir):
|
||||
for f in files:
|
||||
dst = os.path.join(root, f)
|
||||
os.remove(dst)
|
||||
print(
|
||||
"Removed %s"
|
||||
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
|
||||
)
|
||||
|
||||
if clean_all and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
fs.rmtree(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
fs.rmtree(clean_dir)
|
||||
env.Exit(0)
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments
|
||||
@@ -65,7 +74,7 @@ def AddTarget( # pylint: disable=too-many-arguments
|
||||
actions,
|
||||
title=None,
|
||||
description=None,
|
||||
group="Generic",
|
||||
group="General",
|
||||
always_build=True,
|
||||
):
|
||||
if "__PIO_TARGETS" not in env:
|
||||
@@ -101,7 +110,13 @@ def DumpTargets(env):
|
||||
description="Generate compilation database `compile_commands.json`",
|
||||
group="Advanced",
|
||||
)
|
||||
targets["clean"] = dict(name="clean", title="Clean", group="Generic")
|
||||
targets["clean"] = dict(name="clean", title="Clean", group="General")
|
||||
targets["cleanall"] = dict(
|
||||
name="cleanall",
|
||||
title="Clean All",
|
||||
group="General",
|
||||
description="Clean a build environment and installed library dependencies",
|
||||
)
|
||||
return list(targets.values())
|
||||
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import exec_command
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@@ -134,7 +134,7 @@ def AutodetectUploadPort(*args, **kwargs):
|
||||
continue
|
||||
port = item["port"]
|
||||
if upload_protocol.startswith("blackmagic"):
|
||||
if WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
@@ -236,9 +236,9 @@ def CheckUploadSize(_, target, source, env):
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
used_blocks = min(
|
||||
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
|
||||
)
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
|
||||
)
|
||||
|
||||
@@ -27,7 +27,7 @@ from SCons.Script import Export # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import MACOS, string_types
|
||||
from platformio.compat import IS_MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
@@ -50,7 +50,7 @@ def GetBuildType(env):
|
||||
return (
|
||||
"debug"
|
||||
if (
|
||||
set(["debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
set(["__debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
or env.GetProjectOption("build_type") == "debug"
|
||||
)
|
||||
else "release"
|
||||
@@ -69,13 +69,14 @@ def BuildProgram(env):
|
||||
if (
|
||||
env.get("LIBS")
|
||||
and env.GetCompilerType() == "gcc"
|
||||
and (env.PioPlatform().is_embedded() or not MACOS)
|
||||
and (env.PioPlatform().is_embedded() or not IS_MACOS)
|
||||
):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
program = env.Program(
|
||||
os.path.join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
|
||||
os.path.join("$BUILD_DIR", env.subst("$PROGNAME$PROGSUFFIX")),
|
||||
env["PIOBUILDFILES"],
|
||||
)
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
@@ -206,12 +207,12 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.realpath(p)
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result["CCFLAGS"][i] = (f[0], env.File(os.path.realpath(f[1].get_path())))
|
||||
result["CCFLAGS"][i] = (f[0], env.File(os.path.abspath(f[1].get_path())))
|
||||
|
||||
return result
|
||||
|
||||
@@ -345,11 +346,10 @@ def BuildFrameworks(env, frameworks):
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
return env.StaticLibrary(
|
||||
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
)
|
||||
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
return env.StaticLibrary(env.subst(variant_dir), nodes)
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
|
||||
@@ -78,9 +78,9 @@ class ContentCache(object):
|
||||
if not os.path.isdir(os.path.dirname(cache_path)):
|
||||
os.makedirs(os.path.dirname(cache_path))
|
||||
try:
|
||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
||||
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, "a") as fp:
|
||||
with open(self._db_path, mode="a", encoding="utf8") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
||||
except UnicodeError:
|
||||
if os.path.isfile(cache_path):
|
||||
@@ -92,7 +92,7 @@ class ContentCache(object):
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
""" Keys=None, delete expired items """
|
||||
"""Keys=None, delete expired items"""
|
||||
if not os.path.isfile(self._db_path):
|
||||
return None
|
||||
if not keys:
|
||||
@@ -102,7 +102,7 @@ class ContentCache(object):
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
with open(self._db_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
@@ -129,7 +129,7 @@ class ContentCache(object):
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, "w") as fp:
|
||||
with open(self._db_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.clients.http import HTTPClient
|
||||
from platformio.clients.http import HTTPClient, HTTPClientError
|
||||
from platformio.exception import PlatformioException
|
||||
|
||||
|
||||
@@ -61,13 +61,33 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
del account[key]
|
||||
app.set_state_item("account", account)
|
||||
|
||||
def send_auth_request(self, *args, **kwargs):
|
||||
headers = kwargs.get("headers", {})
|
||||
if "Authorization" not in headers:
|
||||
token = self.fetch_authentication_token()
|
||||
headers["Authorization"] = "Bearer %s" % token
|
||||
kwargs["headers"] = headers
|
||||
return self.fetch_json_data(*args, **kwargs)
|
||||
def fetch_json_data(self, *args, **kwargs):
|
||||
try:
|
||||
return super(AccountClient, self).fetch_json_data(*args, **kwargs)
|
||||
except HTTPClientError as exc:
|
||||
raise AccountError(exc) from exc
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
|
||||
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
||||
|
||||
def login(self, username, password):
|
||||
try:
|
||||
@@ -119,10 +139,11 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return True
|
||||
|
||||
def change_password(self, old_password, new_password):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/password",
|
||||
data={"old_password": old_password, "new_password": new_password},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def registration(
|
||||
@@ -150,10 +171,11 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
def auth_token(self, password, regenerate):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/token",
|
||||
data={"password": password, "regenerate": 1 if regenerate else 0},
|
||||
x_with_authorization=True,
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
@@ -164,18 +186,20 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/profile",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
profile["current_password"] = current_password
|
||||
self.delete_local_state("summary")
|
||||
response = self.send_auth_request(
|
||||
response = self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/profile",
|
||||
data=profile,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
return response
|
||||
|
||||
@@ -193,9 +217,10 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
result = self.send_auth_request(
|
||||
result = self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/summary",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
account["summary"] = dict(
|
||||
profile=result.get("profile"),
|
||||
@@ -207,120 +232,125 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
app.set_state_item("account", account)
|
||||
return result
|
||||
|
||||
def get_logged_username(self):
|
||||
return self.get_account_info(offline=True).get("profile").get("username")
|
||||
|
||||
def destroy_account(self):
|
||||
return self.send_auth_request("delete", "/v1/account")
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/account",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_org(self, orgname, email, displayname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs",
|
||||
data={"orgname": orgname, "email": email, "displayname": displayname},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_org(self, orgname):
|
||||
return self.send_auth_request("get", "/v1/orgs/%s" % orgname)
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_orgs(self):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_org(self, orgname, data):
|
||||
return self.send_auth_request(
|
||||
"put", "/v1/orgs/%s" % orgname, data={k: v for k, v in data.items() if v}
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_org(self, orgname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_org_owners(self, orgname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_team(self, orgname, teamname, description):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
data={"name": teamname, "description": description},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_teams(self, orgname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_team(self, orgname, teamname, data):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_team_member(self, orgname, teamname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_team_member(self, orgname, teamname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
|
||||
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
||||
|
||||
@@ -21,7 +21,7 @@ import requests.adapters
|
||||
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
|
||||
|
||||
from platformio import __check_internet_hosts__, __default_requests_timeout__, app, util
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.cache import ContentCache, cleanup_content_cache
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
try:
|
||||
@@ -80,7 +80,7 @@ class EndpointSessionIterator(object):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
""" For Python 2 compatibility """
|
||||
"""For Python 2 compatibility"""
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self):
|
||||
@@ -101,7 +101,10 @@ class HTTPClient(object):
|
||||
def __del__(self):
|
||||
if not self._session:
|
||||
return
|
||||
self._session.close()
|
||||
try:
|
||||
self._session.close()
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
self._session = None
|
||||
|
||||
def _next_session(self):
|
||||
@@ -114,6 +117,21 @@ class HTTPClient(object):
|
||||
# check Internet before and resolve issue with 60 seconds timeout
|
||||
ensure_internet_on(raise_exception=True)
|
||||
|
||||
headers = kwargs.get("headers", {})
|
||||
with_authorization = (
|
||||
kwargs.pop("x_with_authorization")
|
||||
if "x_with_authorization" in kwargs
|
||||
else False
|
||||
)
|
||||
if with_authorization and "Authorization" not in headers:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.clients.account import AccountClient
|
||||
|
||||
headers["Authorization"] = (
|
||||
"Bearer %s" % AccountClient().fetch_authentication_token()
|
||||
)
|
||||
kwargs["headers"] = headers
|
||||
|
||||
# set default timeout
|
||||
if "timeout" not in kwargs:
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
@@ -131,7 +149,9 @@ class HTTPClient(object):
|
||||
raise HTTPClientError(str(e))
|
||||
|
||||
def fetch_json_data(self, method, path, **kwargs):
|
||||
cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None
|
||||
if method != "get":
|
||||
cleanup_content_cache("http")
|
||||
cache_valid = kwargs.pop("x_cache_valid") if "x_cache_valid" in kwargs else None
|
||||
if not cache_valid:
|
||||
return self._parse_json_response(self.send_request(method, path, **kwargs))
|
||||
cache_key = ContentCache.key_from_args(
|
||||
@@ -176,8 +196,9 @@ def _internet_on():
|
||||
continue
|
||||
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
return True
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((host, 80))
|
||||
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
|
||||
# to all possible addresses (IPv4 and IPv6) in turn until a connection succeeds:
|
||||
s = socket.create_connection((host, 80))
|
||||
s.close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
|
||||
@@ -13,9 +13,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import __registry_api__, fs
|
||||
from platformio.clients.account import AccountClient
|
||||
from platformio.clients.account import AccountClient, AccountError
|
||||
from platformio.clients.http import HTTPClient, HTTPClientError
|
||||
from platformio.package.meta import PackageType
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
@@ -24,26 +23,31 @@ class RegistryClient(HTTPClient):
|
||||
def __init__(self):
|
||||
super(RegistryClient, self).__init__(__registry_api__)
|
||||
|
||||
def send_auth_request(self, *args, **kwargs):
|
||||
headers = kwargs.get("headers", {})
|
||||
if "Authorization" not in headers:
|
||||
token = AccountClient().fetch_authentication_token()
|
||||
headers["Authorization"] = "Bearer %s" % token
|
||||
kwargs["headers"] = headers
|
||||
return self.fetch_json_data(*args, **kwargs)
|
||||
@staticmethod
|
||||
def allowed_private_packages():
|
||||
private_permissions = set(
|
||||
[
|
||||
"service.registry.publish-private-tool",
|
||||
"service.registry.publish-private-platform",
|
||||
"service.registry.publish-private-library",
|
||||
]
|
||||
)
|
||||
try:
|
||||
info = AccountClient().get_account_info() or {}
|
||||
for item in info.get("packages", []):
|
||||
if set(item.keys()) & private_permissions:
|
||||
return True
|
||||
except AccountError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def publish_package(
|
||||
self, archive_path, owner=None, released_at=None, private=False, notify=True
|
||||
def publish_package( # pylint: disable=redefined-builtin
|
||||
self, owner, type, archive_path, released_at=None, private=False, notify=True
|
||||
):
|
||||
account = AccountClient()
|
||||
if not owner:
|
||||
owner = (
|
||||
account.get_account_info(offline=True).get("profile").get("username")
|
||||
)
|
||||
with open(archive_path, "rb") as fp:
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v3/packages/%s/%s" % (owner, PackageType.from_archive(archive_path)),
|
||||
"/v3/packages/%s/%s" % (owner, type),
|
||||
params={
|
||||
"private": 1 if private else 0,
|
||||
"notify": 1 if notify else 0,
|
||||
@@ -56,49 +60,49 @@ class RegistryClient(HTTPClient):
|
||||
),
|
||||
},
|
||||
data=fp,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def unpublish_package( # pylint: disable=redefined-builtin
|
||||
self, type, name, owner=None, version=None, undo=False
|
||||
self, owner, type, name, version=None, undo=False
|
||||
):
|
||||
account = AccountClient()
|
||||
if not owner:
|
||||
owner = (
|
||||
account.get_account_info(offline=True).get("profile").get("username")
|
||||
)
|
||||
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
||||
if version:
|
||||
path += "/" + version
|
||||
return self.send_auth_request(
|
||||
"delete",
|
||||
path,
|
||||
params={"undo": 1 if undo else 0},
|
||||
return self.fetch_json_data(
|
||||
"delete", path, params={"undo": 1 if undo else 0}, x_with_authorization=True
|
||||
)
|
||||
|
||||
def update_resource(self, urn, private):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v3/resources/%s" % urn,
|
||||
data={"private": int(private)},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def grant_access_for_resource(self, urn, client, level):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v3/resources/%s/access" % urn,
|
||||
data={"client": client, "level": level},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def revoke_access_from_resource(self, urn, client):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v3/resources/%s/access" % urn,
|
||||
data={"client": client},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_resources(self, owner):
|
||||
return self.send_auth_request(
|
||||
"get", "/v3/resources", params={"owner": owner} if owner else None
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v3/resources",
|
||||
params={"owner": owner} if owner else None,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_packages(self, query=None, filters=None, page=None):
|
||||
@@ -128,7 +132,11 @@ class RegistryClient(HTTPClient):
|
||||
if page:
|
||||
params["page"] = int(page)
|
||||
return self.fetch_json_data(
|
||||
"get", "/v3/packages", params=params, cache_valid="1h"
|
||||
"get",
|
||||
"/v3/search",
|
||||
params=params,
|
||||
x_cache_valid="1h",
|
||||
x_with_authorization=self.allowed_private_packages(),
|
||||
)
|
||||
|
||||
def get_package(self, type_, owner, name, version=None):
|
||||
@@ -139,7 +147,8 @@ class RegistryClient(HTTPClient):
|
||||
type=type_, owner=owner.lower(), name=name.lower()
|
||||
),
|
||||
params=dict(version=version) if version else None,
|
||||
cache_valid="1h",
|
||||
x_cache_valid="1h",
|
||||
x_with_authorization=self.allowed_private_packages(),
|
||||
)
|
||||
except HTTPClientError as e:
|
||||
if e.response is not None and e.response.status_code == 404:
|
||||
|
||||
@@ -134,6 +134,14 @@ def access_list(owner, urn_type, json_output):
|
||||
table_data = []
|
||||
table_data.append(("URN:", resource.get("urn")))
|
||||
table_data.append(("Owner:", resource.get("owner")))
|
||||
table_data.append(
|
||||
(
|
||||
"Access:",
|
||||
click.style("Private", fg="red")
|
||||
if resource.get("private", False)
|
||||
else "Public",
|
||||
)
|
||||
)
|
||||
table_data.append(
|
||||
(
|
||||
"Access level(s):",
|
||||
|
||||
@@ -184,7 +184,7 @@ def account_destroy():
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the %s user account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% client.get_account_info().get("profile").get("username"),
|
||||
% client.get_logged_username(),
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_account()
|
||||
|
||||
@@ -13,12 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import shutil
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
grpboards[board["platform"]] = []
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
@@ -83,4 +83,4 @@ def _print_boards_json(query, installed=False):
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
result.append(board)
|
||||
click.echo(dump_json_to_unicode(result))
|
||||
click.echo(json.dumps(result))
|
||||
|
||||
@@ -15,7 +15,9 @@
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
# pylint: disable=redefined-builtin,too-many-statements
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
@@ -26,7 +28,6 @@ from tabulate import tabulate
|
||||
from platformio import app, exception, fs, util
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools import CheckToolFactory
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
|
||||
@@ -105,8 +106,8 @@ def cli(
|
||||
)
|
||||
|
||||
default_patterns = [
|
||||
config.get_optional_dir("src"),
|
||||
config.get_optional_dir("include"),
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
]
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
@@ -163,7 +164,7 @@ def cli(
|
||||
print_processing_footer(result)
|
||||
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(results_to_json(results)))
|
||||
click.echo(json.dumps(results_to_json(results)))
|
||||
elif not silent:
|
||||
print_check_summary(results)
|
||||
|
||||
@@ -193,7 +194,7 @@ def print_processing_header(tool, envname, envdump):
|
||||
"Checking %s > %s (%s)"
|
||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||
)
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
@@ -214,7 +215,7 @@ def print_processing_footer(result):
|
||||
|
||||
|
||||
def collect_component_stats(result):
|
||||
components = dict()
|
||||
components = {}
|
||||
|
||||
def _append_defect(component, defect):
|
||||
if not components.get(component):
|
||||
@@ -249,7 +250,7 @@ def print_defects_stats(results):
|
||||
|
||||
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
|
||||
severity_labels.reverse()
|
||||
tabular_data = list()
|
||||
tabular_data = []
|
||||
for k, v in component_stats.items():
|
||||
tool_defect = [v.get(s, 0) for s in severity_labels]
|
||||
tabular_data.append([k] + tool_defect)
|
||||
|
||||
@@ -86,7 +86,7 @@ class DefectItem(object):
|
||||
"severity": self.SEVERITY_LABELS[self.severity],
|
||||
"category": self.category,
|
||||
"message": self.message,
|
||||
"file": os.path.realpath(self.file),
|
||||
"file": os.path.abspath(self.file),
|
||||
"line": self.line,
|
||||
"column": self.column,
|
||||
"callstack": self.callstack,
|
||||
|
||||
@@ -12,12 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import os
|
||||
from tempfile import NamedTemporaryFile
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import compat, fs, proc
|
||||
from platformio import fs, proc
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.project.helpers import load_project_ide_data
|
||||
|
||||
@@ -104,7 +105,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
||||
|
||||
def _create_tmp_file(self, data):
|
||||
with NamedTemporaryFile("w", delete=False) as fp:
|
||||
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
|
||||
fp.write(data)
|
||||
self._tmp_files.append(fp.name)
|
||||
return fp.name
|
||||
@@ -200,14 +201,14 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
def _add_file(path):
|
||||
if path.endswith(header_extensions):
|
||||
result["headers"].append(os.path.realpath(path))
|
||||
result["headers"].append(os.path.abspath(path))
|
||||
elif path.endswith(c_extension):
|
||||
result["c"].append(os.path.realpath(path))
|
||||
result["c"].append(os.path.abspath(path))
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.realpath(path))
|
||||
result["c++"].append(os.path.abspath(path))
|
||||
|
||||
for pattern in patterns:
|
||||
for item in compat.glob_recursive(pattern):
|
||||
for item in glob.glob(pattern, recursive=True):
|
||||
if not os.path.isdir(item):
|
||||
_add_file(item)
|
||||
for root, _, files in os.walk(item, followlinks=True):
|
||||
|
||||
@@ -77,7 +77,7 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
includes = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get_optional_dir("packages").lower()
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
includes.append(inc)
|
||||
|
||||
@@ -64,7 +64,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if any(f not in self._buffer for f in self.defect_fields):
|
||||
return None
|
||||
|
||||
args = dict()
|
||||
args = {}
|
||||
for field in self._buffer.split(self._field_delimiter):
|
||||
field = field.strip().replace('"', "")
|
||||
name, value = field.split("=", 1)
|
||||
@@ -84,7 +84,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if (
|
||||
args.get("file", "")
|
||||
.lower()
|
||||
.startswith(self.config.get_optional_dir("packages").lower())
|
||||
.startswith(self.config.get("platformio", "packages_dir").lower())
|
||||
):
|
||||
if args["id"] in breaking_defect_ids:
|
||||
if self.options.get("verbose"):
|
||||
@@ -141,10 +141,11 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
# Standards with GNU extensions are not allowed
|
||||
cmd.append("-" + flag.replace("gnu", "c"))
|
||||
if not self.is_flag_set("--std", flags):
|
||||
# Try to guess the standard version from the build flags
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
cmd.append("-" + self.convert_language_standard(flag))
|
||||
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
|
||||
@@ -200,7 +201,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
result = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get_optional_dir("packages").lower()
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
result.append(inc)
|
||||
@@ -224,6 +225,21 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
# Cppcheck is configured to return '3' if a defect is found
|
||||
return cmd_result["returncode"] in (0, 3)
|
||||
|
||||
@staticmethod
|
||||
def convert_language_standard(flag):
|
||||
cpp_standards_map = {
|
||||
"0x": "11",
|
||||
"1y": "14",
|
||||
"1z": "17",
|
||||
"2a": "20",
|
||||
}
|
||||
|
||||
standard = flag[-2:]
|
||||
# Note: GNU extensions are not supported and converted to regular standards
|
||||
return flag.replace("gnu", "c").replace(
|
||||
standard, cpp_standards_map.get(standard, standard)
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
|
||||
|
||||
@@ -19,9 +19,10 @@ from xml.etree.ElementTree import fromstring
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc, util
|
||||
from platformio import proc
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools.base import CheckToolBase
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
|
||||
|
||||
@@ -34,18 +35,18 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
|
||||
self.tool_path = os.path.join(
|
||||
get_core_package_dir("tool-pvs-studio"),
|
||||
"x64" if "windows" in util.get_systype() else "bin",
|
||||
"x64" if IS_WINDOWS else "bin",
|
||||
"pvs-studio",
|
||||
)
|
||||
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
|
||||
|
||||
with open(self._tmp_cfg_file, "w") as fp:
|
||||
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"exclude-path = "
|
||||
+ self.config.get_optional_dir("packages").replace("\\", "/")
|
||||
+ self.config.get("platformio", "packages_dir").replace("\\", "/")
|
||||
)
|
||||
|
||||
with open(self._tmp_cmd_file, "w") as fp:
|
||||
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
" ".join(
|
||||
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
||||
@@ -70,9 +71,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
def _demangle_report(self, output_file):
|
||||
converter_tool = os.path.join(
|
||||
get_core_package_dir("tool-pvs-studio"),
|
||||
"HtmlGenerator"
|
||||
if "windows" in util.get_systype()
|
||||
else os.path.join("bin", "plog-converter"),
|
||||
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
|
||||
)
|
||||
|
||||
cmd = (
|
||||
|
||||
@@ -12,14 +12,14 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import getenv, makedirs, remove
|
||||
from os.path import basename, isdir, isfile, join, realpath
|
||||
from shutil import copyfile, copytree
|
||||
from tempfile import mkdtemp
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, compat, fs
|
||||
from platformio import app, fs
|
||||
from platformio.commands.project import project_init as cmd_project_init
|
||||
from platformio.commands.project import validate_boards
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
@@ -33,8 +33,8 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
for i, p in enumerate(value):
|
||||
if p.startswith("~"):
|
||||
value[i] = fs.expanduser(p)
|
||||
value[i] = realpath(value[i])
|
||||
if not compat.glob_recursive(value[i]):
|
||||
value[i] = os.path.abspath(value[i])
|
||||
if not glob.glob(value[i], recursive=True):
|
||||
invalid_path = p
|
||||
break
|
||||
try:
|
||||
@@ -51,7 +51,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=mkdtemp,
|
||||
default=tempfile.mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
||||
)
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@@ -78,28 +78,28 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
verbose,
|
||||
):
|
||||
|
||||
if not src and getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src and os.getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src:
|
||||
raise click.BadParameter("Missing argument 'src'")
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
|
||||
if not keep_build_dir and isdir(build_dir):
|
||||
if not keep_build_dir and os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||
if not patterns:
|
||||
continue
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += compat.glob_recursive(p)
|
||||
_copy_contents(join(build_dir, dir_name), contents)
|
||||
contents += glob.glob(p, recursive=True)
|
||||
_copy_contents(os.path.join(build_dir, dir_name), contents)
|
||||
|
||||
if project_conf and isfile(project_conf):
|
||||
if project_conf and os.path.isfile(project_conf):
|
||||
_copy_project_conf(build_dir, project_conf)
|
||||
elif not board:
|
||||
raise CIBuildEnvsEmpty()
|
||||
@@ -122,52 +122,55 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents):
|
||||
def _copy_contents(dst_dir, contents): # pylint: disable=too-many-branches
|
||||
items = {"dirs": set(), "files": set()}
|
||||
|
||||
for path in contents:
|
||||
if isdir(path):
|
||||
if os.path.isdir(path):
|
||||
items["dirs"].add(path)
|
||||
elif isfile(path):
|
||||
elif os.path.isfile(path):
|
||||
items["files"].add(path)
|
||||
|
||||
dst_dir_name = basename(dst_dir)
|
||||
dst_dir_name = os.path.basename(dst_dir)
|
||||
|
||||
if dst_dir_name == "src" and len(items["dirs"]) == 1:
|
||||
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||
if not os.path.isdir(dst_dir):
|
||||
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||
else:
|
||||
if not isdir(dst_dir):
|
||||
makedirs(dst_dir)
|
||||
if not os.path.isdir(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
for d in items["dirs"]:
|
||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
||||
src_dst_dir = os.path.join(dst_dir, os.path.basename(d))
|
||||
if not os.path.isdir(src_dst_dir):
|
||||
shutil.copytree(d, src_dst_dir, symlinks=True)
|
||||
|
||||
if not items["files"]:
|
||||
return
|
||||
|
||||
if dst_dir_name == "lib":
|
||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
||||
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
|
||||
|
||||
for f in items["files"]:
|
||||
dst_file = join(dst_dir, basename(f))
|
||||
dst_file = os.path.join(dst_dir, os.path.basename(f))
|
||||
if f == dst_file:
|
||||
continue
|
||||
copyfile(f, dst_file)
|
||||
shutil.copyfile(f, dst_file)
|
||||
|
||||
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += compat.glob_recursive(join(compat.glob_escape(dst_dir), p))
|
||||
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
|
||||
for path in contents:
|
||||
path = realpath(path)
|
||||
if isdir(path):
|
||||
path = os.path.abspath(path)
|
||||
if os.path.isdir(path):
|
||||
fs.rmtree(path)
|
||||
elif isfile(path):
|
||||
remove(path)
|
||||
elif os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def _copy_project_conf(build_dir, project_conf):
|
||||
config = ProjectConfig(project_conf, parse_extra=False)
|
||||
if config.has_section("platformio"):
|
||||
config.remove_section("platformio")
|
||||
config.save(join(build_dir, "platformio.ini"))
|
||||
config.save(os.path.join(build_dir, "platformio.ini"))
|
||||
|
||||
180
platformio/commands/debug.py
Normal file
180
platformio/commands/debug.py
Normal file
@@ -0,0 +1,180 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-locals
|
||||
# pylint: disable=too-many-branches, too-many-statements
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, exception, fs, proc
|
||||
from platformio.commands.platform import init_platform
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.debug import helpers
|
||||
from platformio.debug.config.factory import DebugConfigFactory
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.debug.process.gdb import GDBClientProcess
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="Unified debugger",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
environment,
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# use env variables from Eclipse or CLion
|
||||
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
break
|
||||
if os.getenv(name):
|
||||
project_dir = os.getenv(name)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
project_config = ProjectConfig.get_instance(project_conf)
|
||||
project_config.validate(envs=[environment] if environment else None)
|
||||
env_name = environment or helpers.get_default_debug_env(project_config)
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
env_options = project_config.items(env=env_name, as_dict=True)
|
||||
if "platform" not in env_options:
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
|
||||
with fs.cd(project_dir):
|
||||
debug_config = DebugConfigFactory.new(
|
||||
init_platform(env_options["platform"]), project_config, env_name
|
||||
)
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
return subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
)
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream("~", str(e) + "\n")
|
||||
if helpers.is_gdbmi_mode()
|
||||
else str(e) + "\n",
|
||||
nl=False,
|
||||
)
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_config.load_cmds == ["preload"]
|
||||
load_mode = load_mode or debug_config.load_mode
|
||||
if load_mode == "always":
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(
|
||||
debug_config.program_path
|
||||
)
|
||||
elif load_mode == "modified":
|
||||
rebuild_prog = helpers.is_prog_obsolete(
|
||||
debug_config.program_path
|
||||
) or not helpers.has_debug_symbols(debug_config.program_path)
|
||||
|
||||
if not (debug_config.program_path and os.path.isfile(debug_config.program_path)):
|
||||
rebuild_prog = True
|
||||
|
||||
if preload or (not rebuild_prog and load_mode != "always"):
|
||||
# don't load firmware through debug server
|
||||
debug_config.load_cmds = []
|
||||
|
||||
if rebuild_prog:
|
||||
if helpers.is_gdbmi_mode():
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream(
|
||||
"~", "Preparing firmware for debugging...\n"
|
||||
),
|
||||
nl=False,
|
||||
)
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
)
|
||||
stream.close()
|
||||
else:
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
)
|
||||
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(debug_config.program_path)
|
||||
|
||||
if not os.path.isfile(debug_config.program_path):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
client = GDBClientProcess(project_dir, debug_config)
|
||||
coro = client.run(__unprocessed)
|
||||
try:
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
loop.run_until_complete(coro)
|
||||
if IS_WINDOWS:
|
||||
# an issue with `asyncio` executor and STIDIN,
|
||||
# it cannot be closed gracefully
|
||||
proc.force_exit()
|
||||
finally:
|
||||
del client
|
||||
loop.close()
|
||||
|
||||
return True
|
||||
@@ -1,175 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-statements
|
||||
# pylint: disable=too-many-locals, too-many-branches
|
||||
|
||||
import os
|
||||
import signal
|
||||
from os.path import isfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, exception, fs, proc
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project, load_project_ide_data
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="Unified debugger",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# use env variables from Eclipse or CLion
|
||||
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
break
|
||||
if os.getenv(sysenv):
|
||||
project_dir = os.getenv(sysenv)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(project_conf)
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
|
||||
env_name = environment or helpers.get_default_debug_env(config)
|
||||
env_options = config.items(env=env_name, as_dict=True)
|
||||
if not set(env_options.keys()) >= set(["platform", "board"]):
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
except UnknownPlatform:
|
||||
ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[env_options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
|
||||
debug_options = helpers.configure_initial_debug_options(platform, env_options)
|
||||
assert debug_options
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
|
||||
|
||||
ide_data = load_project_ide_data(project_dir, env_name)
|
||||
if not ide_data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
result = proc.exec_command([ide_data["gdb_path"], "--version"])
|
||||
if result["returncode"] == 0:
|
||||
return click.echo(result["out"])
|
||||
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream("~", str(e) + "\n")
|
||||
if helpers.is_gdbmi_mode()
|
||||
else str(e) + "\n",
|
||||
nl=False,
|
||||
)
|
||||
|
||||
try:
|
||||
debug_options = platform.configure_debug_options(debug_options, ide_data)
|
||||
except NotImplementedError:
|
||||
# legacy for ESP32 dev-platform <=2.0.0
|
||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, ide_data
|
||||
)
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_options["load_cmds"] == ["preload"]
|
||||
load_mode = debug_options["load_mode"]
|
||||
if load_mode == "always":
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
elif load_mode == "modified":
|
||||
rebuild_prog = helpers.is_prog_obsolete(
|
||||
ide_data["prog_path"]
|
||||
) or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
else:
|
||||
rebuild_prog = not isfile(ide_data["prog_path"])
|
||||
|
||||
if preload or (not rebuild_prog and load_mode != "always"):
|
||||
# don't load firmware through debug server
|
||||
debug_options["load_cmds"] = []
|
||||
|
||||
if rebuild_prog:
|
||||
if helpers.is_gdbmi_mode():
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream(
|
||||
"~", "Preparing firmware for debugging...\n"
|
||||
),
|
||||
nl=False,
|
||||
)
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
|
||||
stream.close()
|
||||
else:
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
|
||||
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(ide_data["prog_path"])
|
||||
|
||||
if not isfile(ide_data["prog_path"]):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
# run debugging client
|
||||
inject_contrib_pysite()
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.debug.process.client import GDBClient, reactor
|
||||
|
||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
||||
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
|
||||
|
||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||
reactor.run()
|
||||
|
||||
return True
|
||||
@@ -1,303 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from fnmatch import fnmatch
|
||||
from hashlib import sha1
|
||||
from io import BytesIO
|
||||
from os.path import isfile
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.compat import is_bytes
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
||||
|
||||
STDOUT = sys.stdout
|
||||
|
||||
def write(self, text):
|
||||
self.STDOUT.write(escape_gdbmi_stream("~", text))
|
||||
self.STDOUT.flush()
|
||||
|
||||
|
||||
def is_gdbmi_mode():
|
||||
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
|
||||
|
||||
|
||||
def escape_gdbmi_stream(prefix, stream):
|
||||
bytes_stream = False
|
||||
if is_bytes(stream):
|
||||
bytes_stream = True
|
||||
stream = stream.decode()
|
||||
|
||||
if not stream:
|
||||
return b"" if bytes_stream else ""
|
||||
|
||||
ends_nl = stream.endswith("\n")
|
||||
stream = re.sub(r"\\+", "\\\\\\\\", stream)
|
||||
stream = stream.replace('"', '\\"')
|
||||
stream = stream.replace("\n", "\\n")
|
||||
stream = '%s"%s"' % (prefix, stream)
|
||||
if ends_nl:
|
||||
stream += "\n"
|
||||
|
||||
return stream.encode() if bytes_stream else stream
|
||||
|
||||
|
||||
def get_default_debug_env(config):
|
||||
default_envs = config.default_envs()
|
||||
all_envs = config.envs()
|
||||
for env in default_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
for env in all_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
return default_envs[0] if default_envs else all_envs[0]
|
||||
|
||||
|
||||
def predebug_project(ctx, project_dir, env_name, preload, verbose):
|
||||
ctx.invoke(
|
||||
cmd_run,
|
||||
project_dir=project_dir,
|
||||
environment=[env_name],
|
||||
target=["debug"] + (["upload"] if preload else []),
|
||||
verbose=verbose,
|
||||
)
|
||||
if preload:
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def configure_initial_debug_options(platform, env_options):
|
||||
def _cleanup_cmds(items):
|
||||
items = ProjectConfig.parse_multi_values(items)
|
||||
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
||||
|
||||
board_config = platform.board_config(env_options["board"])
|
||||
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
|
||||
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
|
||||
server_options = None
|
||||
|
||||
# specific server per a system
|
||||
if isinstance(tool_settings.get("server", {}), list):
|
||||
for item in tool_settings["server"][:]:
|
||||
tool_settings["server"] = item
|
||||
if util.get_systype() in item.get("system", []):
|
||||
break
|
||||
|
||||
# user overwrites debug server
|
||||
if env_options.get("debug_server"):
|
||||
server_options = {
|
||||
"cwd": None,
|
||||
"executable": None,
|
||||
"arguments": env_options.get("debug_server"),
|
||||
}
|
||||
server_options["executable"] = server_options["arguments"][0]
|
||||
server_options["arguments"] = server_options["arguments"][1:]
|
||||
elif "server" in tool_settings:
|
||||
server_options = tool_settings["server"]
|
||||
server_package = server_options.get("package")
|
||||
server_package_dir = (
|
||||
platform.get_package_dir(server_package) if server_package else None
|
||||
)
|
||||
if server_package and not server_package_dir:
|
||||
platform.install_packages(
|
||||
with_packages=[server_package], skip_default_package=True, silent=True
|
||||
)
|
||||
server_package_dir = platform.get_package_dir(server_package)
|
||||
server_options.update(
|
||||
dict(
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=server_options.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
for a in server_options.get("arguments", [])
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
|
||||
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
|
||||
result = dict(
|
||||
tool=tool_name,
|
||||
upload_protocol=env_options.get(
|
||||
"upload_protocol", board_config.get("upload", {}).get("protocol")
|
||||
),
|
||||
load_cmds=_cleanup_cmds(
|
||||
env_options.get(
|
||||
"debug_load_cmds",
|
||||
tool_settings.get(
|
||||
"load_cmds",
|
||||
tool_settings.get(
|
||||
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
|
||||
),
|
||||
),
|
||||
)
|
||||
),
|
||||
load_mode=env_options.get(
|
||||
"debug_load_mode",
|
||||
tool_settings.get(
|
||||
"load_mode", ProjectOptions["env.debug_load_mode"].default
|
||||
),
|
||||
),
|
||||
init_break=env_options.get(
|
||||
"debug_init_break",
|
||||
tool_settings.get(
|
||||
"init_break", ProjectOptions["env.debug_init_break"].default
|
||||
),
|
||||
),
|
||||
init_cmds=_cleanup_cmds(
|
||||
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
|
||||
),
|
||||
extra_cmds=extra_cmds,
|
||||
require_debug_port=tool_settings.get("require_debug_port", False),
|
||||
port=reveal_debug_port(
|
||||
env_options.get("debug_port", tool_settings.get("port")),
|
||||
tool_name,
|
||||
tool_settings,
|
||||
),
|
||||
speed=env_options.get("debug_speed", tool_settings.get("speed")),
|
||||
server=server_options,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def configure_esp32_load_cmds(debug_options, configuration):
|
||||
"""
|
||||
DEPRECATED: Moved to ESP32 dev-platform
|
||||
See platform.py::configure_debug_options
|
||||
"""
|
||||
flash_images = configuration.get("extra", {}).get("flash_images")
|
||||
ignore_conds = [
|
||||
debug_options["load_cmds"] != ["load"],
|
||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
||||
not flash_images,
|
||||
not all(isfile(item["path"]) for item in flash_images),
|
||||
]
|
||||
if any(ignore_conds):
|
||||
return debug_options["load_cmds"]
|
||||
|
||||
mon_cmds = [
|
||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
||||
path=fs.to_unix_path(item["path"]), offset=item["offset"]
|
||||
)
|
||||
for item in flash_images
|
||||
]
|
||||
mon_cmds.append(
|
||||
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
|
||||
% fs.to_unix_path(configuration["prog_path"][:-4])
|
||||
)
|
||||
return mon_cmds
|
||||
|
||||
|
||||
def has_debug_symbols(prog_path):
|
||||
if not isfile(prog_path):
|
||||
return False
|
||||
matched = {
|
||||
b".debug_info": False,
|
||||
b".debug_abbrev": False,
|
||||
b" -Og": False,
|
||||
b" -g": False,
|
||||
b"__PLATFORMIO_BUILD_DEBUG__": False,
|
||||
}
|
||||
with open(prog_path, "rb") as fp:
|
||||
last_data = b""
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
for pattern, found in matched.items():
|
||||
if found:
|
||||
continue
|
||||
if pattern in last_data + data:
|
||||
matched[pattern] = True
|
||||
last_data = data
|
||||
return all(matched.values())
|
||||
|
||||
|
||||
def is_prog_obsolete(prog_path):
|
||||
prog_hash_path = prog_path + ".sha1"
|
||||
if not isfile(prog_path):
|
||||
return True
|
||||
shasum = sha1()
|
||||
with open(prog_path, "rb") as fp:
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
shasum.update(data)
|
||||
new_digest = shasum.hexdigest()
|
||||
old_digest = None
|
||||
if isfile(prog_hash_path):
|
||||
with open(prog_hash_path) as fp:
|
||||
old_digest = fp.read()
|
||||
if new_digest == old_digest:
|
||||
return False
|
||||
with open(prog_hash_path, "w") as fp:
|
||||
fp.write(new_digest)
|
||||
return True
|
||||
|
||||
|
||||
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
||||
def _get_pattern():
|
||||
if not env_debug_port:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
||||
return env_debug_port
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_serial_port(hwids):
|
||||
for item in util.get_serialports(filter_hwid=True):
|
||||
if not _is_match_pattern(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if tool_name.startswith("blackmagic"):
|
||||
if (
|
||||
"windows" in util.get_systype()
|
||||
and port.startswith("COM")
|
||||
and len(port) > 4
|
||||
):
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
for hwid in hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
return port
|
||||
return None
|
||||
|
||||
if env_debug_port and not _get_pattern():
|
||||
return env_debug_port
|
||||
if not tool_settings.get("require_debug_port"):
|
||||
return None
|
||||
|
||||
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
||||
if not debug_port:
|
||||
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
|
||||
return debug_port
|
||||
@@ -1,161 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
GDB_DEFAULT_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor init
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_STUTIL_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
monitor halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_JLINK_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
monitor halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor clrbp
|
||||
monitor reset
|
||||
monitor go
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor clrbp
|
||||
monitor speed auto
|
||||
pio_reset_halt_target
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_BLACKMAGIC_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor swdp_scan
|
||||
attach 1
|
||||
set mem inaccessible-by-default off
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
"""
|
||||
|
||||
GDB_MSPDEBUG_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor erase
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_QEMU_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_RENODE_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
$LOAD_CMDS
|
||||
monitor start
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
monitor start
|
||||
"""
|
||||
|
||||
|
||||
TOOL_TO_CONFIG = {
|
||||
"jlink": GDB_JLINK_INIT_CONFIG,
|
||||
"mspdebug": GDB_MSPDEBUG_INIT_CONFIG,
|
||||
"qemu": GDB_QEMU_INIT_CONFIG,
|
||||
"blackmagic": GDB_BLACKMAGIC_INIT_CONFIG,
|
||||
"renode": GDB_RENODE_INIT_CONFIG,
|
||||
}
|
||||
|
||||
|
||||
def get_gdb_init_config(debug_options):
|
||||
tool = debug_options.get("tool")
|
||||
if tool and tool in TOOL_TO_CONFIG:
|
||||
return TOOL_TO_CONFIG[tool]
|
||||
server_exe = (debug_options.get("server") or {}).get("executable", "").lower()
|
||||
if "st-util" in server_exe:
|
||||
return GDB_STUTIL_INIT_CONFIG
|
||||
return GDB_DEFAULT_INIT_CONFIG
|
||||
@@ -1,93 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import signal
|
||||
import time
|
||||
|
||||
import click
|
||||
from twisted.internet import protocol # pylint: disable=import-error
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import string_types
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_core_dir
|
||||
|
||||
|
||||
class BaseProcess(protocol.ProcessProtocol, object):
|
||||
|
||||
STDOUT_CHUNK_SIZE = 2048
|
||||
LOG_FILE = None
|
||||
|
||||
COMMON_PATTERNS = {
|
||||
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
|
||||
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
|
||||
"PYTHONEXE": get_pythonexe_path(),
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self._last_activity = 0
|
||||
|
||||
def apply_patterns(self, source, patterns=None):
|
||||
_patterns = self.COMMON_PATTERNS.copy()
|
||||
_patterns.update(patterns or {})
|
||||
|
||||
for key, value in _patterns.items():
|
||||
if key.endswith(("_DIR", "_PATH")):
|
||||
_patterns[key] = fs.to_unix_path(value)
|
||||
|
||||
def _replace(text):
|
||||
for key, value in _patterns.items():
|
||||
pattern = "$%s" % key
|
||||
text = text.replace(pattern, value or "")
|
||||
return text
|
||||
|
||||
if isinstance(source, string_types):
|
||||
source = _replace(source)
|
||||
elif isinstance(source, (list, dict)):
|
||||
items = enumerate(source) if isinstance(source, list) else source.items()
|
||||
for key, value in items:
|
||||
if isinstance(value, string_types):
|
||||
source[key] = _replace(value)
|
||||
elif isinstance(value, (list, dict)):
|
||||
source[key] = self.apply_patterns(value, patterns)
|
||||
|
||||
return source
|
||||
|
||||
def onStdInData(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
|
||||
def outReceived(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
while data:
|
||||
chunk = data[: self.STDOUT_CHUNK_SIZE]
|
||||
click.echo(chunk, nl=False)
|
||||
data = data[self.STDOUT_CHUNK_SIZE :]
|
||||
|
||||
def errReceived(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
click.echo(data, nl=False, err=True)
|
||||
|
||||
def processEnded(self, _):
|
||||
self._last_activity = time.time()
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
@@ -1,280 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import time
|
||||
from hashlib import sha1
|
||||
from os.path import basename, dirname, isdir, join, realpath, splitext
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import protocol # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.internet import stdio # pylint: disable=import-error
|
||||
from twisted.internet import task # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, proc, telemetry, util
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.debug.initcfgs import get_gdb_init_config
|
||||
from platformio.commands.debug.process.base import BaseProcess
|
||||
from platformio.commands.debug.process.server import DebugServer
|
||||
from platformio.compat import hashlib_encode_data, is_bytes
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
PIO_SRC_NAME = ".pioinit"
|
||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||
|
||||
def __init__(self, project_dir, args, debug_options, env_options):
|
||||
super(GDBClient, self).__init__()
|
||||
self.project_dir = project_dir
|
||||
self.args = list(args)
|
||||
self.debug_options = debug_options
|
||||
self.env_options = env_options
|
||||
|
||||
self._debug_server = DebugServer(debug_options, env_options)
|
||||
self._session_id = None
|
||||
|
||||
if not isdir(get_project_cache_dir()):
|
||||
os.makedirs(get_project_cache_dir())
|
||||
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
|
||||
|
||||
self._target_is_run = False
|
||||
self._auto_continue_timer = None
|
||||
self._errors_buffer = b""
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def spawn(self, gdb_path, prog_path):
|
||||
session_hash = gdb_path + prog_path
|
||||
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
|
||||
self._kill_previous_session()
|
||||
|
||||
patterns = {
|
||||
"PROJECT_DIR": self.project_dir,
|
||||
"PROG_PATH": prog_path,
|
||||
"PROG_DIR": dirname(prog_path),
|
||||
"PROG_NAME": basename(splitext(prog_path)[0]),
|
||||
"DEBUG_PORT": self.debug_options["port"],
|
||||
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
|
||||
"INIT_BREAK": self.debug_options["init_break"] or "",
|
||||
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
|
||||
}
|
||||
|
||||
yield self._debug_server.spawn(patterns)
|
||||
if not patterns["DEBUG_PORT"]:
|
||||
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
|
||||
|
||||
self.generate_pioinit(self._gdbsrc_dir, patterns)
|
||||
|
||||
# start GDB client
|
||||
args = [
|
||||
"piogdb",
|
||||
"-q",
|
||||
"--directory",
|
||||
self._gdbsrc_dir,
|
||||
"--directory",
|
||||
self.project_dir,
|
||||
"-l",
|
||||
"10",
|
||||
]
|
||||
args.extend(self.args)
|
||||
if not gdb_path:
|
||||
raise DebugInvalidOptionsError("GDB client is not configured")
|
||||
gdb_data_dir = self._get_data_dir(gdb_path)
|
||||
if gdb_data_dir:
|
||||
args.extend(["--data-directory", gdb_data_dir])
|
||||
args.append(patterns["PROG_PATH"])
|
||||
|
||||
transport = reactor.spawnProcess(
|
||||
self, gdb_path, args, path=self.project_dir, env=os.environ
|
||||
)
|
||||
defer.returnValue(transport)
|
||||
|
||||
@staticmethod
|
||||
def _get_data_dir(gdb_path):
|
||||
if "msp430" in gdb_path:
|
||||
return None
|
||||
gdb_data_dir = realpath(join(dirname(gdb_path), "..", "share", "gdb"))
|
||||
return gdb_data_dir if isdir(gdb_data_dir) else None
|
||||
|
||||
def generate_pioinit(self, dst_dir, patterns):
|
||||
# default GDB init commands depending on debug tool
|
||||
commands = get_gdb_init_config(self.debug_options).split("\n")
|
||||
|
||||
if self.debug_options["init_cmds"]:
|
||||
commands = self.debug_options["init_cmds"]
|
||||
commands.extend(self.debug_options["extra_cmds"])
|
||||
|
||||
if not any("define pio_reset_run_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_run_target",
|
||||
" echo Warning! Undefined pio_reset_run_target command\\n",
|
||||
" monitor reset",
|
||||
"end",
|
||||
] + commands
|
||||
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_halt_target",
|
||||
" echo Warning! Undefined pio_reset_halt_target command\\n",
|
||||
" monitor reset halt",
|
||||
"end",
|
||||
] + commands
|
||||
if not any("define pio_restart_target" in cmd for cmd in commands):
|
||||
commands += [
|
||||
"define pio_restart_target",
|
||||
" pio_reset_halt_target",
|
||||
" $INIT_BREAK",
|
||||
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
|
||||
"end",
|
||||
]
|
||||
|
||||
banner = [
|
||||
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
||||
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
|
||||
"echo PlatformIO: Initializing remote target...\\n",
|
||||
]
|
||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||
commands = banner + commands + footer
|
||||
|
||||
with open(join(dst_dir, self.PIO_SRC_NAME), "w") as fp:
|
||||
fp.write("\n".join(self.apply_patterns(commands, patterns)))
|
||||
|
||||
def connectionMade(self):
|
||||
self._lock_session(self.transport.pid)
|
||||
|
||||
p = protocol.Protocol()
|
||||
p.dataReceived = self.onStdInData
|
||||
stdio.StandardIO(p)
|
||||
|
||||
def onStdInData(self, data):
|
||||
super(GDBClient, self).onStdInData(data)
|
||||
if b"-exec-run" in data:
|
||||
if self._target_is_run:
|
||||
token, _ = data.split(b"-", 1)
|
||||
self.outReceived(token + b"^running\n")
|
||||
return
|
||||
data = data.replace(b"-exec-run", b"-exec-continue")
|
||||
|
||||
if b"-exec-continue" in data:
|
||||
self._target_is_run = True
|
||||
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
self.transport.write(b"pio_reset_run_target\n")
|
||||
self.transport.write(data)
|
||||
|
||||
def processEnded(self, reason): # pylint: disable=unused-argument
|
||||
self._unlock_session()
|
||||
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
|
||||
fs.rmtree(self._gdbsrc_dir)
|
||||
if self._debug_server:
|
||||
self._debug_server.terminate()
|
||||
|
||||
reactor.stop()
|
||||
|
||||
def outReceived(self, data):
|
||||
super(GDBClient, self).outReceived(data)
|
||||
self._handle_error(data)
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
telemetry.send_event(
|
||||
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
|
||||
)
|
||||
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
|
||||
self._auto_continue_timer.start(0.1)
|
||||
|
||||
def errReceived(self, data):
|
||||
super(GDBClient, self).errReceived(data)
|
||||
self._handle_error(data)
|
||||
|
||||
def console_log(self, msg):
|
||||
if helpers.is_gdbmi_mode():
|
||||
msg = helpers.escape_gdbmi_stream("~", msg)
|
||||
self.outReceived(msg if is_bytes(msg) else msg.encode())
|
||||
|
||||
def _auto_exec_continue(self):
|
||||
auto_exec_delay = 0.5 # in seconds
|
||||
if self._last_activity > (time.time() - auto_exec_delay):
|
||||
return
|
||||
if self._auto_continue_timer:
|
||||
self._auto_continue_timer.stop()
|
||||
self._auto_continue_timer = None
|
||||
|
||||
if not self.debug_options["init_break"] or self._target_is_run:
|
||||
return
|
||||
self.console_log(
|
||||
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
|
||||
% self.debug_options["init_break"]
|
||||
)
|
||||
self.console_log(
|
||||
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
|
||||
)
|
||||
self.transport.write(
|
||||
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
|
||||
)
|
||||
self._target_is_run = True
|
||||
|
||||
def _handle_error(self, data):
|
||||
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
|
||||
if not (
|
||||
self.PIO_SRC_NAME.encode() in self._errors_buffer
|
||||
and b"Error in sourced" in self._errors_buffer
|
||||
):
|
||||
return
|
||||
|
||||
last_erros = self._errors_buffer.decode()
|
||||
last_erros = " ".join(reversed(last_erros.split("\n")))
|
||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||
|
||||
err = "%s -> %s" % (
|
||||
telemetry.dump_run_environment(self.env_options),
|
||||
last_erros,
|
||||
)
|
||||
telemetry.send_exception("DebugInitError: %s" % err)
|
||||
self.transport.loseConnection()
|
||||
|
||||
def _kill_previous_session(self):
|
||||
assert self._session_id
|
||||
pid = None
|
||||
with ContentCache() as cc:
|
||||
pid = cc.get(self._session_id)
|
||||
cc.delete(self._session_id)
|
||||
if not pid:
|
||||
return
|
||||
if "windows" in util.get_systype():
|
||||
kill = ["Taskkill", "/PID", pid, "/F"]
|
||||
else:
|
||||
kill = ["kill", pid]
|
||||
try:
|
||||
proc.exec_command(kill)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def _lock_session(self, pid):
|
||||
if not self._session_id:
|
||||
return
|
||||
with ContentCache() as cc:
|
||||
cc.set(self._session_id, str(pid), "1h")
|
||||
|
||||
def _unlock_session(self):
|
||||
if not self._session_id:
|
||||
return
|
||||
with ContentCache() as cc:
|
||||
cc.delete(self._session_id)
|
||||
@@ -1,175 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
|
||||
from platformio.commands.debug.process.base import BaseProcess
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class DebugServer(BaseProcess):
|
||||
def __init__(self, debug_options, env_options):
|
||||
super(DebugServer, self).__init__()
|
||||
self.debug_options = debug_options
|
||||
self.env_options = env_options
|
||||
|
||||
self._debug_port = ":3333"
|
||||
self._transport = None
|
||||
self._process_ended = False
|
||||
self._ready = False
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def spawn(self, patterns): # pylint: disable=too-many-branches
|
||||
systype = util.get_systype()
|
||||
server = self.debug_options.get("server")
|
||||
if not server:
|
||||
defer.returnValue(None)
|
||||
server = self.apply_patterns(server, patterns)
|
||||
server_executable = server["executable"]
|
||||
if not server_executable:
|
||||
defer.returnValue(None)
|
||||
if server["cwd"]:
|
||||
server_executable = join(server["cwd"], server_executable)
|
||||
if (
|
||||
"windows" in systype
|
||||
and not server_executable.endswith(".exe")
|
||||
and isfile(server_executable + ".exe")
|
||||
):
|
||||
server_executable = server_executable + ".exe"
|
||||
|
||||
if not isfile(server_executable):
|
||||
server_executable = where_is_program(server_executable)
|
||||
if not isfile(server_executable):
|
||||
raise DebugInvalidOptionsError(
|
||||
"\nCould not launch Debug Server '%s'. Please check that it "
|
||||
"is installed and is included in a system PATH\n\n"
|
||||
"See documentation or contact contact@platformio.org:\n"
|
||||
"https://docs.platformio.org/page/plus/debugging.html\n"
|
||||
% server_executable
|
||||
)
|
||||
|
||||
openocd_pipe_allowed = all(
|
||||
[not self.debug_options["port"], "openocd" in server_executable]
|
||||
)
|
||||
if openocd_pipe_allowed:
|
||||
args = []
|
||||
if server["cwd"]:
|
||||
args.extend(["-s", server["cwd"]])
|
||||
args.extend(
|
||||
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
|
||||
)
|
||||
args.extend(server["arguments"])
|
||||
str_args = " ".join(
|
||||
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
|
||||
)
|
||||
self._debug_port = '| "%s" %s' % (server_executable, str_args)
|
||||
self._debug_port = fs.to_unix_path(self._debug_port)
|
||||
defer.returnValue(self._debug_port)
|
||||
|
||||
env = os.environ.copy()
|
||||
# prepend server "lib" folder to LD path
|
||||
if (
|
||||
"windows" not in systype
|
||||
and server["cwd"]
|
||||
and isdir(join(server["cwd"], "lib"))
|
||||
):
|
||||
ld_key = "DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
|
||||
env[ld_key] = join(server["cwd"], "lib")
|
||||
if os.environ.get(ld_key):
|
||||
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
|
||||
# prepend BIN to PATH
|
||||
if server["cwd"] and isdir(join(server["cwd"], "bin")):
|
||||
env["PATH"] = "%s%s%s" % (
|
||||
join(server["cwd"], "bin"),
|
||||
os.pathsep,
|
||||
os.environ.get("PATH", os.environ.get("Path", "")),
|
||||
)
|
||||
|
||||
self._transport = reactor.spawnProcess(
|
||||
self,
|
||||
server_executable,
|
||||
[server_executable] + server["arguments"],
|
||||
path=server["cwd"],
|
||||
env=env,
|
||||
)
|
||||
if "mspdebug" in server_executable.lower():
|
||||
self._debug_port = ":2000"
|
||||
elif "jlink" in server_executable.lower():
|
||||
self._debug_port = ":2331"
|
||||
elif "qemu" in server_executable.lower():
|
||||
self._debug_port = ":1234"
|
||||
|
||||
yield self._wait_until_ready()
|
||||
|
||||
defer.returnValue(self._debug_port)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _wait_until_ready(self):
|
||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
||||
timeout = 60 if ready_pattern else 10
|
||||
elapsed = 0
|
||||
delay = 0.5
|
||||
auto_ready_delay = 0.5
|
||||
while not self._ready and not self._process_ended and elapsed < timeout:
|
||||
yield self.async_sleep(delay)
|
||||
if not ready_pattern:
|
||||
self._ready = self._last_activity < (time.time() - auto_ready_delay)
|
||||
elapsed += delay
|
||||
|
||||
def _check_ready_by_pattern(self, data):
|
||||
if self._ready:
|
||||
return self._ready
|
||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
||||
if ready_pattern:
|
||||
self._ready = ready_pattern.encode() in data
|
||||
return self._ready
|
||||
|
||||
@staticmethod
|
||||
def async_sleep(secs):
|
||||
d = defer.Deferred()
|
||||
reactor.callLater(secs, d.callback, None)
|
||||
return d
|
||||
|
||||
def get_debug_port(self):
|
||||
return self._debug_port
|
||||
|
||||
def outReceived(self, data):
|
||||
super(DebugServer, self).outReceived(
|
||||
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
|
||||
)
|
||||
self._check_ready_by_pattern(data)
|
||||
|
||||
def errReceived(self, data):
|
||||
super(DebugServer, self).errReceived(data)
|
||||
self._check_ready_by_pattern(data)
|
||||
|
||||
def processEnded(self, reason):
|
||||
self._process_ended = True
|
||||
super(DebugServer, self).processEnded(reason)
|
||||
|
||||
def terminate(self):
|
||||
if self._process_ended or not self._transport:
|
||||
return
|
||||
try:
|
||||
self._transport.signalProcess("KILL")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
@@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
@@ -21,7 +22,6 @@ from serial.tools import miniterm
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands.device import helpers as device_helpers
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
|
||||
@@ -52,9 +52,7 @@ def device_list( # pylint: disable=too-many-branches
|
||||
single_key = list(data)[0] if len(list(data)) == 1 else None
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
dump_json_to_unicode(data[single_key] if single_key else data)
|
||||
)
|
||||
return click.echo(json.dumps(data[single_key] if single_key else data))
|
||||
|
||||
titles = {
|
||||
"serial": "Serial Ports",
|
||||
@@ -174,28 +172,21 @@ def device_list( # pylint: disable=too-many-branches
|
||||
help="Load configuration from `platformio.ini` and specified environment",
|
||||
)
|
||||
def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
# load default monitor filters
|
||||
filters_dir = os.path.join(fs.get_source_dir(), "commands", "device", "filters")
|
||||
for name in os.listdir(filters_dir):
|
||||
if not name.endswith(".py"):
|
||||
continue
|
||||
device_helpers.load_monitor_filter(
|
||||
os.path.join(filters_dir, name), options=kwargs
|
||||
)
|
||||
|
||||
project_options = {}
|
||||
platform = None
|
||||
try:
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
project_options = device_helpers.get_project_options(kwargs["environment"])
|
||||
kwargs = device_helpers.apply_project_monitor_options(kwargs, project_options)
|
||||
kwargs = device_helpers.apply_project_monitor_options(
|
||||
kwargs, project_options
|
||||
)
|
||||
if "platform" in project_options:
|
||||
platform = PlatformFactory.new(project_options["platform"])
|
||||
except NotPlatformIOProjectError:
|
||||
pass
|
||||
|
||||
platform = None
|
||||
if "platform" in project_options:
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
platform = PlatformFactory.new(project_options["platform"])
|
||||
device_helpers.register_platform_filters(platform, options=kwargs)
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
device_helpers.register_filters(platform=platform, options=kwargs)
|
||||
|
||||
if not kwargs["port"]:
|
||||
ports = util.get_serial_ports(filter_hwid=True)
|
||||
@@ -233,7 +224,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
"--- Available filters and text transformations: %s"
|
||||
% ", ".join(sorted(miniterm.TRANSFORMATIONS.keys()))
|
||||
)
|
||||
click.echo("--- More details at http://bit.ly/pio-monitor-filters")
|
||||
click.echo("--- More details at https://bit.ly/pio-monitor-filters")
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs["port"],
|
||||
|
||||
@@ -19,7 +19,7 @@ from platformio.project.config import ProjectConfig
|
||||
|
||||
class DeviceMonitorFilter(miniterm.Transform):
|
||||
def __init__(self, options=None):
|
||||
""" Called by PlatformIO to pass context """
|
||||
"""Called by PlatformIO to pass context"""
|
||||
miniterm.Transform.__init__(self)
|
||||
|
||||
self.options = options or {}
|
||||
@@ -35,7 +35,7 @@ class DeviceMonitorFilter(miniterm.Transform):
|
||||
self.environment = self.config.envs()[0]
|
||||
|
||||
def __call__(self):
|
||||
""" Called by the miniterm library when the filter is actually used """
|
||||
"""Called by the miniterm library when the filter is actually used"""
|
||||
return self
|
||||
|
||||
@property
|
||||
|
||||
@@ -31,6 +31,7 @@ class LogToFile(DeviceMonitorFilter):
|
||||
"%y%m%d-%H%M%S"
|
||||
)
|
||||
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
|
||||
# pylint: disable=consider-using-with
|
||||
self._log_fp = io.open(log_file_name, "w", encoding="utf-8")
|
||||
return self
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from serial.tools import miniterm
|
||||
from platformio import fs
|
||||
from platformio.commands.device import DeviceMonitorFilter
|
||||
from platformio.compat import get_object_members, load_python_module
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
@@ -92,15 +93,40 @@ def load_monitor_filter(path, options=None):
|
||||
return True
|
||||
|
||||
|
||||
def register_platform_filters(platform, options=None):
|
||||
monitor_dir = os.path.join(platform.get_dir(), "monitor")
|
||||
def load_monitor_filters(monitor_dir, prefix=None, options=None):
|
||||
if not os.path.isdir(monitor_dir):
|
||||
return
|
||||
|
||||
for name in os.listdir(monitor_dir):
|
||||
if not name.startswith("filter_") or not name.endswith(".py"):
|
||||
if (prefix and not name.startswith(prefix)) or not name.endswith(".py"):
|
||||
continue
|
||||
path = os.path.join(monitor_dir, name)
|
||||
if not os.path.isfile(path):
|
||||
continue
|
||||
load_monitor_filter(path, options)
|
||||
|
||||
|
||||
def register_filters(platform=None, options=None):
|
||||
# project filters
|
||||
load_monitor_filters(
|
||||
ProjectConfig.get_instance().get("platformio", "monitor_dir"),
|
||||
prefix="filter_",
|
||||
options=options,
|
||||
)
|
||||
# platform filters
|
||||
if platform:
|
||||
load_monitor_filters(
|
||||
os.path.join(platform.get_dir(), "monitor"),
|
||||
prefix="filter_",
|
||||
options=options,
|
||||
)
|
||||
# load package filters
|
||||
pm = ToolPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
load_monitor_filters(
|
||||
os.path.join(pkg.path, "monitor"), prefix="filter_", options=options
|
||||
)
|
||||
# default filters
|
||||
load_monitor_filters(
|
||||
os.path.join(fs.get_source_dir(), "commands", "device", "filters"),
|
||||
options=options,
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@ import mimetypes
|
||||
import click
|
||||
|
||||
from platformio.commands.home.helpers import is_port_used
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.commands.home.run import run_server
|
||||
|
||||
|
||||
@click.command("home", short_help="GUI to manage PlatformIO")
|
||||
@@ -48,8 +48,6 @@ from platformio.compat import ensure_python3
|
||||
),
|
||||
)
|
||||
def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
ensure_python3()
|
||||
|
||||
# Ensure PIO Home mimetypes are known
|
||||
mimetypes.add_type("text/html", ".html")
|
||||
mimetypes.add_type("text/css", ".css")
|
||||
@@ -87,9 +85,6 @@ def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
click.launch(home_url)
|
||||
return
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.home.run import run_server
|
||||
|
||||
run_server(
|
||||
host=host,
|
||||
port=port,
|
||||
|
||||
@@ -18,7 +18,7 @@ import requests
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
@@ -37,15 +37,13 @@ def requests_session():
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def get_core_fullpath():
|
||||
return where_is_program(
|
||||
"platformio" + (".exe" if "windows" in util.get_systype() else "")
|
||||
)
|
||||
return where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if WINDOWS:
|
||||
if IS_WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
|
||||
@@ -14,16 +14,16 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from os.path import join
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from platformio import __version__, app, fs, util
|
||||
from platformio.project.helpers import get_project_core_dir, is_platformio_project
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import is_platformio_project
|
||||
|
||||
|
||||
class AppRPC:
|
||||
|
||||
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
|
||||
|
||||
IGNORE_STORAGE_KEYS = [
|
||||
"cid",
|
||||
"coreVersion",
|
||||
@@ -34,9 +34,16 @@ class AppRPC:
|
||||
"projectsDir",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_state_path():
|
||||
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
|
||||
if not os.path.isdir(core_dir):
|
||||
os.makedirs(core_dir)
|
||||
return os.path.join(core_dir, "homestate.json")
|
||||
|
||||
@staticmethod
|
||||
def load_state():
|
||||
with app.State(AppRPC.APPSTATE_PATH, lock=True) as state:
|
||||
with app.State(AppRPC.get_state_path(), lock=True) as state:
|
||||
storage = state.get("storage", {})
|
||||
|
||||
# base data
|
||||
@@ -58,9 +65,13 @@ class AppRPC:
|
||||
storage["projectsDir"] = storage["coreSettings"]["projects_dir"]["value"]
|
||||
|
||||
# skip non-existing recent projects
|
||||
storage["recentProjects"] = [
|
||||
p for p in storage.get("recentProjects", []) if is_platformio_project(p)
|
||||
]
|
||||
storage["recentProjects"] = list(
|
||||
set(
|
||||
str(Path(p).resolve())
|
||||
for p in storage.get("recentProjects", [])
|
||||
if is_platformio_project(p)
|
||||
)
|
||||
)
|
||||
|
||||
state["storage"] = storage
|
||||
state.modified = False # skip saving extra fields
|
||||
@@ -72,7 +83,7 @@ class AppRPC:
|
||||
|
||||
@staticmethod
|
||||
def save_state(state):
|
||||
with app.State(AppRPC.APPSTATE_PATH, lock=True) as s:
|
||||
with app.State(AppRPC.get_state_path(), lock=True) as s:
|
||||
s.clear()
|
||||
s.update(state)
|
||||
storage = s.get("storage", {})
|
||||
|
||||
@@ -13,36 +13,73 @@
|
||||
# limitations under the License.
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from ajsonrpc.core import JSONRPC20DispatchException
|
||||
|
||||
from platformio.compat import get_running_loop
|
||||
from platformio.compat import aio_get_running_loop
|
||||
|
||||
|
||||
class IDERPC:
|
||||
|
||||
COMMAND_TIMEOUT = 1.5 # in seconds
|
||||
|
||||
def __init__(self):
|
||||
self._queue = {}
|
||||
self._ide_queue = []
|
||||
self._cmd_queue = {}
|
||||
|
||||
def send_command(self, sid, command, params):
|
||||
if not self._queue.get(sid):
|
||||
raise JSONRPC20DispatchException(
|
||||
code=4005, message="PIO Home IDE agent is not started"
|
||||
)
|
||||
while self._queue[sid]:
|
||||
self._queue[sid].pop().set_result(
|
||||
{"id": time.time(), "method": command, "params": params}
|
||||
)
|
||||
async def listen_commands(self):
|
||||
f = aio_get_running_loop().create_future()
|
||||
self._ide_queue.append(f)
|
||||
self._process_commands()
|
||||
return await f
|
||||
|
||||
async def listen_commands(self, sid=0):
|
||||
if sid not in self._queue:
|
||||
self._queue[sid] = []
|
||||
self._queue[sid].append(get_running_loop().create_future())
|
||||
return await self._queue[sid][-1]
|
||||
|
||||
def open_project(self, sid, project_dir):
|
||||
return self.send_command(sid, "open_project", project_dir)
|
||||
|
||||
def open_text_document(self, sid, path, line=None, column=None):
|
||||
return self.send_command(
|
||||
sid, "open_text_document", dict(path=path, line=line, column=column)
|
||||
async def send_command(self, command, params=None):
|
||||
cmd_id = f"ide-{command}-{time.time()}"
|
||||
self._cmd_queue[cmd_id] = {
|
||||
"method": command,
|
||||
"params": params,
|
||||
"time": time.time(),
|
||||
"future": aio_get_running_loop().create_future(),
|
||||
}
|
||||
self._process_commands()
|
||||
# in case if IDE agent has not been started
|
||||
aio_get_running_loop().call_later(
|
||||
self.COMMAND_TIMEOUT + 0.1, self._process_commands
|
||||
)
|
||||
return await self._cmd_queue[cmd_id]["future"]
|
||||
|
||||
def on_command_result(self, cmd_id, value):
|
||||
if cmd_id not in self._cmd_queue:
|
||||
return
|
||||
if self._cmd_queue[cmd_id]["method"] == "get_pio_project_dirs":
|
||||
value = [str(Path(p).resolve()) for p in value]
|
||||
self._cmd_queue[cmd_id]["future"].set_result(value)
|
||||
del self._cmd_queue[cmd_id]
|
||||
|
||||
def _process_commands(self):
|
||||
for cmd_id in list(self._cmd_queue):
|
||||
cmd_data = self._cmd_queue[cmd_id]
|
||||
if cmd_data["future"].done():
|
||||
del self._cmd_queue[cmd_id]
|
||||
continue
|
||||
|
||||
if (
|
||||
not self._ide_queue
|
||||
and (time.time() - cmd_data["time"]) > self.COMMAND_TIMEOUT
|
||||
):
|
||||
cmd_data["future"].set_exception(
|
||||
JSONRPC20DispatchException(
|
||||
code=4005, message="PIO Home IDE agent is not started"
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
while self._ide_queue:
|
||||
self._ide_queue.pop().set_result(
|
||||
{
|
||||
"id": cmd_id,
|
||||
"method": cmd_data["method"],
|
||||
"params": cmd_data["params"],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@ import time
|
||||
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.compat import create_task
|
||||
from platformio.compat import aio_create_task
|
||||
|
||||
|
||||
class MiscRPC:
|
||||
@@ -30,7 +30,7 @@ class MiscRPC:
|
||||
cache_data = json.loads(cache_data)
|
||||
# automatically update cache in background every 12 hours
|
||||
if cache_data["time"] < (time.time() - (3600 * 12)):
|
||||
create_task(
|
||||
aio_create_task(
|
||||
self._preload_latest_tweets(data_url, cache_key, cache_valid)
|
||||
)
|
||||
return cache_data["result"]
|
||||
|
||||
@@ -23,10 +23,10 @@ from ajsonrpc.core import JSONRPC20DispatchException
|
||||
from platformio import exception, fs
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectError
|
||||
from platformio.project.generator import ProjectGenerator
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
from platformio.project.options import get_config_options_schema
|
||||
|
||||
@@ -81,7 +81,7 @@ class ProjectRPC:
|
||||
data["description"] = config.get("platformio", "description")
|
||||
data["libExtraDirs"].extend(config.get("platformio", "lib_extra_dirs", []))
|
||||
|
||||
libdeps_dir = config.get_optional_dir("libdeps")
|
||||
libdeps_dir = config.get("platformio", "libdeps_dir")
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
@@ -93,7 +93,7 @@ class ProjectRPC:
|
||||
# skip non existing folders and resolve full path
|
||||
for key in ("envLibdepsDirs", "libExtraDirs"):
|
||||
data[key] = [
|
||||
fs.expanduser(d) if d.startswith("~") else os.path.realpath(d)
|
||||
fs.expanduser(d) if d.startswith("~") else os.path.abspath(d)
|
||||
for d in data[key]
|
||||
if os.path.isdir(d)
|
||||
]
|
||||
@@ -200,10 +200,10 @@ class ProjectRPC:
|
||||
await PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
return self._generate_project_main(project_dir, framework)
|
||||
return self._generate_project_main(project_dir, board, framework)
|
||||
|
||||
@staticmethod
|
||||
def _generate_project_main(project_dir, framework):
|
||||
def _generate_project_main(project_dir, board, framework):
|
||||
main_content = None
|
||||
if framework == "arduino":
|
||||
main_content = "\n".join(
|
||||
@@ -238,19 +238,35 @@ class ProjectRPC:
|
||||
)
|
||||
if not main_content:
|
||||
return project_dir
|
||||
|
||||
is_cpp_project = True
|
||||
pm = PlatformPackageManager()
|
||||
try:
|
||||
board = pm.board_config(board)
|
||||
platforms = board.get("platforms", board.get("platform"))
|
||||
if not isinstance(platforms, list):
|
||||
platforms = [platforms]
|
||||
c_based_platforms = ["intel_mcs51", "ststm8"]
|
||||
is_cpp_project = not (set(platforms) & set(c_based_platforms))
|
||||
except exception.PlatformioException:
|
||||
pass
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig()
|
||||
src_dir = config.get_optional_dir("src")
|
||||
main_path = os.path.join(src_dir, "main.cpp")
|
||||
src_dir = config.get("platformio", "src_dir")
|
||||
main_path = os.path.join(
|
||||
src_dir, "main.%s" % ("cpp" if is_cpp_project else "c")
|
||||
)
|
||||
if os.path.isfile(main_path):
|
||||
return project_dir
|
||||
if not os.path.isdir(src_dir):
|
||||
os.makedirs(src_dir)
|
||||
with open(main_path, "w") as fp:
|
||||
with open(main_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(main_content.strip())
|
||||
return project_dir
|
||||
|
||||
async def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
|
||||
@staticmethod
|
||||
async def import_arduino(board, use_arduino_libs, arduino_project_dir):
|
||||
board = str(board)
|
||||
# don't import PIO Project
|
||||
if is_platformio_project(arduino_project_dir):
|
||||
@@ -292,7 +308,7 @@ class ProjectRPC:
|
||||
)
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig()
|
||||
src_dir = config.get_optional_dir("src")
|
||||
src_dir = config.get("platformio", "src_dir")
|
||||
if os.path.isdir(src_dir):
|
||||
fs.rmtree(src_dir)
|
||||
shutil.copytree(arduino_project_dir, src_dir, symlinks=True)
|
||||
|
||||
@@ -17,7 +17,7 @@ from ajsonrpc.dispatcher import Dispatcher
|
||||
from ajsonrpc.manager import AsyncJSONRPCResponseManager
|
||||
from starlette.endpoints import WebSocketEndpoint
|
||||
|
||||
from platformio.compat import create_task, get_running_loop
|
||||
from platformio.compat import aio_create_task, aio_get_running_loop
|
||||
from platformio.proc import force_exit
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ class JSONRPCServerFactoryBase:
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def addObjectHandler(self, handler, namespace):
|
||||
def add_object_handler(self, handler, namespace):
|
||||
self.manager.dispatcher.add_object(handler, prefix="%s." % namespace)
|
||||
|
||||
def on_client_connect(self):
|
||||
@@ -63,7 +63,7 @@ class JSONRPCServerFactoryBase:
|
||||
click.echo("Automatically shutdown server on timeout")
|
||||
force_exit()
|
||||
|
||||
self.shutdown_timer = get_running_loop().call_later(
|
||||
self.shutdown_timer = aio_get_running_loop().call_later(
|
||||
self.shutdown_timeout, _auto_shutdown_server
|
||||
)
|
||||
|
||||
@@ -84,7 +84,7 @@ class WebSocketJSONRPCServer(WebSocketEndpoint):
|
||||
self.factory.on_client_connect() # pylint: disable=no-member
|
||||
|
||||
async def on_receive(self, websocket, data):
|
||||
create_task(self._handle_rpc(websocket, data))
|
||||
aio_create_task(self._handle_rpc(websocket, data))
|
||||
|
||||
async def on_disconnect(self, websocket, close_code):
|
||||
self.factory.on_client_disconnect() # pylint: disable=no-member
|
||||
@@ -92,6 +92,6 @@ class WebSocketJSONRPCServer(WebSocketEndpoint):
|
||||
async def _handle_rpc(self, websocket, data):
|
||||
# pylint: disable=no-member
|
||||
response = await self.factory.manager.get_response_for_payload(data)
|
||||
if response.error:
|
||||
if response.error and response.error.data:
|
||||
click.secho("Error: %s" % response.error.data, fg="red", err=True)
|
||||
await websocket.send_text(self.factory.manager.serialize(response.body))
|
||||
|
||||
@@ -32,7 +32,7 @@ from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
|
||||
from platformio.compat import get_running_loop
|
||||
from platformio.compat import aio_get_running_loop
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import force_exit
|
||||
@@ -49,7 +49,7 @@ class ShutdownMiddleware:
|
||||
|
||||
|
||||
async def shutdown_server(_=None):
|
||||
get_running_loop().call_later(0.5, force_exit)
|
||||
aio_get_running_loop().call_later(0.5, force_exit)
|
||||
return PlainTextResponse("Server has been shutdown!")
|
||||
|
||||
|
||||
@@ -65,13 +65,13 @@ def run_server(host, port, no_open, shutdown_timeout, home_url):
|
||||
raise PlatformioException("Invalid path to PIO Home Contrib")
|
||||
|
||||
ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)
|
||||
ws_rpc_factory.addObjectHandler(AccountRPC(), namespace="account")
|
||||
ws_rpc_factory.addObjectHandler(AppRPC(), namespace="app")
|
||||
ws_rpc_factory.addObjectHandler(IDERPC(), namespace="ide")
|
||||
ws_rpc_factory.addObjectHandler(MiscRPC(), namespace="misc")
|
||||
ws_rpc_factory.addObjectHandler(OSRPC(), namespace="os")
|
||||
ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace="core")
|
||||
ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace="project")
|
||||
ws_rpc_factory.add_object_handler(AccountRPC(), namespace="account")
|
||||
ws_rpc_factory.add_object_handler(AppRPC(), namespace="app")
|
||||
ws_rpc_factory.add_object_handler(IDERPC(), namespace="ide")
|
||||
ws_rpc_factory.add_object_handler(MiscRPC(), namespace="misc")
|
||||
ws_rpc_factory.add_object_handler(OSRPC(), namespace="os")
|
||||
ws_rpc_factory.add_object_handler(PIOCoreRPC(), namespace="core")
|
||||
ws_rpc_factory.add_object_handler(ProjectRPC(), namespace="project")
|
||||
|
||||
path = urlparse(home_url).path
|
||||
routes = [
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
@@ -23,7 +24,6 @@ from tabulate import tabulate
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
@@ -43,7 +43,7 @@ CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
|
||||
|
||||
|
||||
def get_project_global_lib_dir():
|
||||
return ProjectConfig.get_instance().get_optional_dir("globallib")
|
||||
return ProjectConfig.get_instance().get("platformio", "globallib_dir")
|
||||
|
||||
|
||||
@click.group(short_help="Library manager")
|
||||
@@ -111,7 +111,7 @@ def cli(ctx, **options):
|
||||
os.path.join(storage_dir, "platformio.ini")
|
||||
)
|
||||
config.validate(options["environment"], silent=in_silence)
|
||||
libdeps_dir = config.get_optional_dir("libdeps")
|
||||
libdeps_dir = config.get("platformio", "libdeps_dir")
|
||||
for env in config.envs():
|
||||
if options["environment"] and env not in options["environment"]:
|
||||
continue
|
||||
@@ -286,7 +286,7 @@ def lib_update( # pylint: disable=too-many-arguments
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
dump_json_to_unicode(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
@@ -315,7 +315,7 @@ def lib_list(ctx, json_output):
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
dump_json_to_unicode(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
@@ -355,11 +355,11 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
"get",
|
||||
"/v2/lib/search",
|
||||
params=dict(query=" ".join(query), page=page),
|
||||
cache_valid="1d",
|
||||
x_cache_valid="1d",
|
||||
)
|
||||
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(result))
|
||||
click.echo(json.dumps(result))
|
||||
return
|
||||
|
||||
if result["total"] == 0:
|
||||
@@ -408,7 +408,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
"get",
|
||||
"/v2/lib/search",
|
||||
params=dict(query=" ".join(query), page=int(result["page"]) + 1),
|
||||
cache_valid="1d",
|
||||
x_cache_valid="1d",
|
||||
)
|
||||
|
||||
|
||||
@@ -418,7 +418,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
def lib_builtin(storage, json_output):
|
||||
items = get_builtin_libs(storage)
|
||||
if json_output:
|
||||
return click.echo(dump_json_to_unicode(items))
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
for storage_ in items:
|
||||
if not storage_["items"]:
|
||||
@@ -440,9 +440,11 @@ def lib_show(library, json_output):
|
||||
lm = LibraryPackageManager()
|
||||
lib_id = lm.reveal_registry_package_id(library, silent=json_output)
|
||||
regclient = lm.get_registry_client_instance()
|
||||
lib = regclient.fetch_json_data("get", "/v2/lib/info/%d" % lib_id, cache_valid="1h")
|
||||
lib = regclient.fetch_json_data(
|
||||
"get", "/v2/lib/info/%d" % lib_id, x_cache_valid="1h"
|
||||
)
|
||||
if json_output:
|
||||
return click.echo(dump_json_to_unicode(lib))
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
title = "{ownername}/{name}".format(**lib)
|
||||
click.secho(title, fg="cyan")
|
||||
@@ -535,10 +537,10 @@ def lib_register(config_url): # pylint: disable=unused-argument
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_stats(json_output):
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data("get", "/v2/lib/stats", cache_valid="1h")
|
||||
result = regclient.fetch_json_data("get", "/v2/lib/stats", x_cache_valid="1h")
|
||||
|
||||
if json_output:
|
||||
return click.echo(dump_json_to_unicode(result))
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
for key in ("updated", "added"):
|
||||
tabular_data = [
|
||||
|
||||
@@ -17,10 +17,14 @@ import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.clients.account import AccountClient
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.exception import UserSideException
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema, ManifestValidationError
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.package.pack import PackagePacker
|
||||
from platformio.package.unpack import FileUnpacker, TARArchiver
|
||||
@@ -36,6 +40,54 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
return value
|
||||
|
||||
|
||||
def load_manifest_from_archive(path):
|
||||
return ManifestSchema().load_manifest(
|
||||
ManifestParserFactory.new_from_archive(path).as_dict()
|
||||
)
|
||||
|
||||
|
||||
def check_package_duplicates(
|
||||
owner, type, name, version, system
|
||||
): # pylint: disable=redefined-builtin
|
||||
found = False
|
||||
items = (
|
||||
RegistryClient()
|
||||
.list_packages(filters=dict(types=[type], names=[name]))
|
||||
.get("items")
|
||||
)
|
||||
if not items:
|
||||
return True
|
||||
# duplicated version by owner / system
|
||||
found = False
|
||||
for item in items:
|
||||
if item["owner"]["username"] != owner or item["version"]["name"] != version:
|
||||
continue
|
||||
if not system:
|
||||
found = True
|
||||
break
|
||||
published_systems = []
|
||||
for f in item["version"]["files"]:
|
||||
published_systems.extend(f.get("system", []))
|
||||
found = set(system).issubset(set(published_systems))
|
||||
if found:
|
||||
raise UserSideException(
|
||||
"The package `%s/%s@%s` is already published in the registry"
|
||||
% (owner, name, version)
|
||||
)
|
||||
other_owners = [
|
||||
item["owner"]["username"]
|
||||
for item in items
|
||||
if item["owner"]["username"] != owner
|
||||
]
|
||||
if other_owners:
|
||||
click.secho(
|
||||
"\nWarning! A package with the name `%s` is already published by the next "
|
||||
"owners: %s\n" % (name, ", ".join(other_owners)),
|
||||
fg="yellow",
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@click.group("package", short_help="Package manager")
|
||||
def cli():
|
||||
pass
|
||||
@@ -54,6 +106,12 @@ def cli():
|
||||
def package_pack(package, output):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack(output)
|
||||
# validate manifest
|
||||
try:
|
||||
load_manifest_from_archive(archive_path)
|
||||
except ManifestValidationError as e:
|
||||
os.remove(archive_path)
|
||||
raise e
|
||||
click.secho('Wrote a tarball to "%s"' % archive_path, fg="green")
|
||||
|
||||
|
||||
@@ -80,28 +138,71 @@ def package_pack(package, output):
|
||||
default=True,
|
||||
help="Notify by email when package is processed",
|
||||
)
|
||||
def package_publish(package, owner, released_at, private, notify):
|
||||
assert ensure_python3()
|
||||
|
||||
# publish .tar.gz instantly without repacking
|
||||
if not os.path.isdir(package) and isinstance(
|
||||
@click.option(
|
||||
"--non-interactive",
|
||||
is_flag=True,
|
||||
help="Do not show interactive prompt",
|
||||
)
|
||||
def package_publish( # pylint: disable=too-many-arguments, too-many-locals
|
||||
package, owner, released_at, private, notify, non_interactive
|
||||
):
|
||||
click.secho("Preparing a package...", fg="cyan")
|
||||
owner = owner or AccountClient().get_logged_username()
|
||||
do_not_pack = not os.path.isdir(package) and isinstance(
|
||||
FileUnpacker.new_archiver(package), TARArchiver
|
||||
):
|
||||
response = RegistryClient().publish_package(
|
||||
package, owner, released_at, private, notify
|
||||
)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
return
|
||||
|
||||
)
|
||||
archive_path = None
|
||||
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
|
||||
with fs.cd(tmp_dir):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
response = RegistryClient().publish_package(
|
||||
archive_path, owner, released_at, private, notify
|
||||
# publish .tar.gz instantly without repacking
|
||||
if do_not_pack:
|
||||
archive_path = package
|
||||
else:
|
||||
with fs.cd(tmp_dir):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
|
||||
type_ = PackageType.from_archive(archive_path)
|
||||
manifest = load_manifest_from_archive(archive_path)
|
||||
name = manifest.get("name")
|
||||
version = manifest.get("version")
|
||||
data = [
|
||||
("Type:", type_),
|
||||
("Owner:", owner),
|
||||
("Name:", name),
|
||||
("Version:", version),
|
||||
]
|
||||
if manifest.get("system"):
|
||||
data.insert(len(data) - 1, ("System:", ", ".join(manifest.get("system"))))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
# look for duplicates
|
||||
check_package_duplicates(owner, type_, name, version, manifest.get("system"))
|
||||
|
||||
if not non_interactive:
|
||||
click.confirm(
|
||||
"Are you sure you want to publish the %s %s to the registry?\n"
|
||||
% (
|
||||
type_,
|
||||
click.style(
|
||||
"%s/%s@%s" % (owner, name, version),
|
||||
fg="cyan",
|
||||
),
|
||||
),
|
||||
abort=True,
|
||||
)
|
||||
|
||||
click.secho(
|
||||
"The package publishing may take some time depending "
|
||||
"on your Internet connection and the package size.",
|
||||
fg="yellow",
|
||||
)
|
||||
click.echo("Publishing...")
|
||||
response = RegistryClient().publish_package(
|
||||
owner, type_, archive_path, released_at, private, notify
|
||||
)
|
||||
if not do_not_pack:
|
||||
os.remove(archive_path)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
click.secho(response.get("message"), fg="green")
|
||||
|
||||
|
||||
@cli.command("unpublish", short_help="Remove a pushed package from the registry")
|
||||
@@ -122,9 +223,9 @@ def package_publish(package, owner, released_at, private, notify):
|
||||
def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
|
||||
spec = PackageSpec(package)
|
||||
response = RegistryClient().unpublish_package(
|
||||
owner=spec.owner or AccountClient().get_logged_username(),
|
||||
type=type,
|
||||
name=spec.name,
|
||||
owner=spec.owner,
|
||||
version=str(spec.requirements),
|
||||
undo=undo,
|
||||
)
|
||||
|
||||
@@ -12,13 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.cache import cleanup_content_cache
|
||||
from platformio.commands.boards import print_boards
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.package.version import get_original_version
|
||||
@@ -31,6 +31,301 @@ def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for development platform")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_search(query, json_output):
|
||||
platforms = []
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(platform)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
platforms.append(
|
||||
_get_registry_platform_data(
|
||||
platform["name"], with_boards=False, expose_packages=False
|
||||
)
|
||||
)
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_frameworks(query, json_output):
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
frameworks = []
|
||||
for framework in regclient.fetch_json_data(
|
||||
"get", "/v2/frameworks", x_cache_valid="1d"
|
||||
):
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(framework)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
|
||||
framework["platforms"] = [
|
||||
platform["name"]
|
||||
for platform in _get_registry_platforms()
|
||||
if framework["name"] in platform["frameworks"]
|
||||
]
|
||||
frameworks.append(framework)
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
||||
if json_output:
|
||||
click.echo(json.dumps(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_list(json_output):
|
||||
platforms = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
|
||||
)
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about development platform")
|
||||
@click.argument("platform")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
data = _get_platform_data(platform)
|
||||
if not data:
|
||||
raise UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
|
||||
click.echo(
|
||||
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
|
||||
)
|
||||
click.echo("=" * (3 + len(dep + data["title"])))
|
||||
click.echo(data["description"])
|
||||
click.echo()
|
||||
if "version" in data:
|
||||
click.echo("Version: %s" % data["version"])
|
||||
if data["homepage"]:
|
||||
click.echo("Home: %s" % data["homepage"])
|
||||
if data["repository"]:
|
||||
click.echo("Repository: %s" % data["repository"])
|
||||
if data["url"]:
|
||||
click.echo("Vendor: %s" % data["url"])
|
||||
if data["license"]:
|
||||
click.echo("License: %s" % data["license"])
|
||||
if data["frameworks"]:
|
||||
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
|
||||
|
||||
if not data["packages"]:
|
||||
return None
|
||||
|
||||
if not isinstance(data["packages"][0], dict):
|
||||
click.echo("Packages: %s" % ", ".join(data["packages"]))
|
||||
else:
|
||||
click.echo()
|
||||
click.secho("Packages", bold=True)
|
||||
click.echo("--------")
|
||||
for item in data["packages"]:
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
|
||||
click.echo("-" * (8 + len(item["name"])))
|
||||
if item["type"]:
|
||||
click.echo("Type: %s" % item["type"])
|
||||
click.echo("Requirements: %s" % item["requirements"])
|
||||
click.echo(
|
||||
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
|
||||
)
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item["version"])
|
||||
if "originalVersion" in item:
|
||||
click.echo("Original version: %s" % item["originalVersion"])
|
||||
if "description" in item:
|
||||
click.echo("Description: %s" % item["description"])
|
||||
|
||||
if data["boards"]:
|
||||
click.echo()
|
||||
click.secho("Boards", bold=True)
|
||||
click.echo("------")
|
||||
print_boards(data["boards"])
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install new development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
@click.option("--with-package", multiple=True)
|
||||
@click.option("--without-package", multiple=True)
|
||||
@click.option("--skip-default-package", is_flag=True)
|
||||
@click.option("--with-all-packages", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
platforms,
|
||||
with_package,
|
||||
without_package,
|
||||
skip_default_package,
|
||||
with_all_packages,
|
||||
silent,
|
||||
force,
|
||||
):
|
||||
return _platform_install(
|
||||
platforms,
|
||||
with_package,
|
||||
without_package,
|
||||
skip_default_package,
|
||||
with_all_packages,
|
||||
silent,
|
||||
force,
|
||||
)
|
||||
|
||||
|
||||
def _platform_install( # pylint: disable=too-many-arguments
|
||||
platforms,
|
||||
with_package=None,
|
||||
without_package=None,
|
||||
skip_default_package=False,
|
||||
with_all_packages=False,
|
||||
silent=False,
|
||||
force=False,
|
||||
):
|
||||
pm = PlatformPackageManager()
|
||||
for platform in platforms:
|
||||
pkg = pm.install(
|
||||
spec=platform,
|
||||
with_packages=with_package or [],
|
||||
without_packages=without_package or [],
|
||||
skip_default_package=skip_default_package,
|
||||
with_all_packages=with_all_packages,
|
||||
silent=silent,
|
||||
force=force,
|
||||
)
|
||||
if pkg and not silent:
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of the packages will be installed later "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
def platform_uninstall(platforms):
|
||||
pm = PlatformPackageManager()
|
||||
for platform in platforms:
|
||||
if pm.uninstall(platform):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully removed!" % platform,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed development platforms")
|
||||
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
||||
@click.option(
|
||||
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
platforms, only_packages, only_check, dry_run, silent, json_output
|
||||
):
|
||||
pm = PlatformPackageManager()
|
||||
platforms = platforms or pm.get_installed()
|
||||
only_check = dry_run or only_check
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(platform, PackageItem):
|
||||
pkg = platform
|
||||
else:
|
||||
spec = PackageSpec(platform)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = pm.outdated(pkg, spec)
|
||||
if (
|
||||
not outdated.is_outdated(allow_incompatible=True)
|
||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||
):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg, with_boards=False, expose_packages=False
|
||||
)
|
||||
if outdated.is_outdated(allow_incompatible=True):
|
||||
data["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(data)
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
# cleanup cached board and platform lists
|
||||
cleanup_content_cache("http")
|
||||
|
||||
for platform in platforms:
|
||||
click.echo(
|
||||
"Platform %s"
|
||||
% click.style(
|
||||
platform.metadata.name
|
||||
if isinstance(platform, PackageItem)
|
||||
else platform,
|
||||
fg="cyan",
|
||||
)
|
||||
)
|
||||
click.echo("--------")
|
||||
pm.update(
|
||||
platform, only_packages=only_packages, only_check=only_check, silent=silent
|
||||
)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def init_platform(name, skip_default_package=True, auto_install=True):
|
||||
try:
|
||||
return PlatformFactory.new(name)
|
||||
except UnknownPlatform:
|
||||
if auto_install:
|
||||
_platform_install([name], skip_default_package=skip_default_package)
|
||||
return PlatformFactory.new(name)
|
||||
|
||||
|
||||
def _print_platforms(platforms):
|
||||
for platform in platforms:
|
||||
click.echo(
|
||||
@@ -59,7 +354,7 @@ def _print_platforms(platforms):
|
||||
|
||||
def _get_registry_platforms():
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
return regclient.fetch_json_data("get", "/v2/platforms", cache_valid="1d")
|
||||
return regclient.fetch_json_data("get", "/v2/platforms", x_cache_valid="1d")
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
@@ -162,264 +457,3 @@ def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for development platform")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_search(query, json_output):
|
||||
platforms = []
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = dump_json_to_unicode(platform)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
platforms.append(
|
||||
_get_registry_platform_data(
|
||||
platform["name"], with_boards=False, expose_packages=False
|
||||
)
|
||||
)
|
||||
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_frameworks(query, json_output):
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
frameworks = []
|
||||
for framework in regclient.fetch_json_data(
|
||||
"get", "/v2/frameworks", cache_valid="1d"
|
||||
):
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = dump_json_to_unicode(framework)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
|
||||
framework["platforms"] = [
|
||||
platform["name"]
|
||||
for platform in _get_registry_platforms()
|
||||
if framework["name"] in platform["frameworks"]
|
||||
]
|
||||
frameworks.append(framework)
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_list(json_output):
|
||||
platforms = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
|
||||
)
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about development platform")
|
||||
@click.argument("platform")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
data = _get_platform_data(platform)
|
||||
if not data:
|
||||
raise UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(dump_json_to_unicode(data))
|
||||
|
||||
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
|
||||
click.echo(
|
||||
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
|
||||
)
|
||||
click.echo("=" * (3 + len(dep + data["title"])))
|
||||
click.echo(data["description"])
|
||||
click.echo()
|
||||
if "version" in data:
|
||||
click.echo("Version: %s" % data["version"])
|
||||
if data["homepage"]:
|
||||
click.echo("Home: %s" % data["homepage"])
|
||||
if data["repository"]:
|
||||
click.echo("Repository: %s" % data["repository"])
|
||||
if data["url"]:
|
||||
click.echo("Vendor: %s" % data["url"])
|
||||
if data["license"]:
|
||||
click.echo("License: %s" % data["license"])
|
||||
if data["frameworks"]:
|
||||
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
|
||||
|
||||
if not data["packages"]:
|
||||
return None
|
||||
|
||||
if not isinstance(data["packages"][0], dict):
|
||||
click.echo("Packages: %s" % ", ".join(data["packages"]))
|
||||
else:
|
||||
click.echo()
|
||||
click.secho("Packages", bold=True)
|
||||
click.echo("--------")
|
||||
for item in data["packages"]:
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
|
||||
click.echo("-" * (8 + len(item["name"])))
|
||||
if item["type"]:
|
||||
click.echo("Type: %s" % item["type"])
|
||||
click.echo("Requirements: %s" % item["requirements"])
|
||||
click.echo(
|
||||
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
|
||||
)
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item["version"])
|
||||
if "originalVersion" in item:
|
||||
click.echo("Original version: %s" % item["originalVersion"])
|
||||
if "description" in item:
|
||||
click.echo("Description: %s" % item["description"])
|
||||
|
||||
if data["boards"]:
|
||||
click.echo()
|
||||
click.secho("Boards", bold=True)
|
||||
click.echo("------")
|
||||
print_boards(data["boards"])
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install new development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
@click.option("--with-package", multiple=True)
|
||||
@click.option("--without-package", multiple=True)
|
||||
@click.option("--skip-default-package", is_flag=True)
|
||||
@click.option("--with-all-packages", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
platforms,
|
||||
with_package,
|
||||
without_package,
|
||||
skip_default_package,
|
||||
with_all_packages,
|
||||
silent,
|
||||
force,
|
||||
):
|
||||
pm = PlatformPackageManager()
|
||||
for platform in platforms:
|
||||
pkg = pm.install(
|
||||
spec=platform,
|
||||
with_packages=with_package,
|
||||
without_packages=without_package,
|
||||
skip_default_package=skip_default_package,
|
||||
with_all_packages=with_all_packages,
|
||||
silent=silent,
|
||||
force=force,
|
||||
)
|
||||
if pkg and not silent:
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of the packages will be installed later "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
def platform_uninstall(platforms):
|
||||
pm = PlatformPackageManager()
|
||||
for platform in platforms:
|
||||
if pm.uninstall(platform):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully removed!" % platform,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed development platforms")
|
||||
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
||||
@click.option(
|
||||
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
platforms, only_packages, only_check, dry_run, silent, json_output
|
||||
):
|
||||
pm = PlatformPackageManager()
|
||||
platforms = platforms or pm.get_installed()
|
||||
only_check = dry_run or only_check
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(platform, PackageItem):
|
||||
pkg = platform
|
||||
else:
|
||||
spec = PackageSpec(platform)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = pm.outdated(pkg, spec)
|
||||
if (
|
||||
not outdated.is_outdated(allow_incompatible=True)
|
||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||
):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg, with_boards=False, expose_packages=False
|
||||
)
|
||||
if outdated.is_outdated(allow_incompatible=True):
|
||||
data["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(data)
|
||||
return click.echo(dump_json_to_unicode(result))
|
||||
|
||||
# cleanup cached board and platform lists
|
||||
cleanup_content_cache("http")
|
||||
|
||||
for platform in platforms:
|
||||
click.echo(
|
||||
"Platform %s"
|
||||
% click.style(
|
||||
platform.metadata.name
|
||||
if isinstance(platform, PackageItem)
|
||||
else platform,
|
||||
fg="cyan",
|
||||
)
|
||||
)
|
||||
click.echo("--------")
|
||||
pm.update(
|
||||
platform, only_packages=only_packages, only_check=only_check, silent=silent
|
||||
)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
||||
@@ -22,11 +22,11 @@ from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.commands.platform import platform_install as cli_platform_install
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.platform.exception import UnknownBoard
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
from platformio.project.generator import ProjectGenerator
|
||||
from platformio.project.helpers import is_platformio_project, load_project_ide_data
|
||||
|
||||
|
||||
@@ -191,10 +191,7 @@ def project_init(
|
||||
os.path.join(project_dir, "platformio.ini")
|
||||
)
|
||||
config.validate()
|
||||
pg = ProjectGenerator(
|
||||
config, environment or get_best_envname(config, board), ide
|
||||
)
|
||||
pg.generate()
|
||||
ProjectGenerator(config, environment, ide, board).generate()
|
||||
|
||||
if is_new_project:
|
||||
init_cvs_ignore(project_dir)
|
||||
@@ -226,10 +223,10 @@ def init_base_project(project_dir):
|
||||
config = ProjectConfig()
|
||||
config.save()
|
||||
dir_to_readme = [
|
||||
(config.get_optional_dir("src"), None),
|
||||
(config.get_optional_dir("include"), init_include_readme),
|
||||
(config.get_optional_dir("lib"), init_lib_readme),
|
||||
(config.get_optional_dir("test"), init_test_readme),
|
||||
(config.get("platformio", "src_dir"), None),
|
||||
(config.get("platformio", "include_dir"), init_include_readme),
|
||||
(config.get("platformio", "lib_dir"), init_lib_readme),
|
||||
(config.get("platformio", "test_dir"), init_test_readme),
|
||||
]
|
||||
for (path, cb) in dir_to_readme:
|
||||
if os.path.isdir(path):
|
||||
@@ -240,7 +237,7 @@ def init_base_project(project_dir):
|
||||
|
||||
|
||||
def init_include_readme(include_dir):
|
||||
with open(os.path.join(include_dir, "README"), "w") as fp:
|
||||
with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
This directory is intended for project header files.
|
||||
@@ -286,7 +283,7 @@ https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
||||
|
||||
|
||||
def init_lib_readme(lib_dir):
|
||||
with open(os.path.join(lib_dir, "README"), "w") as fp:
|
||||
with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
This directory is intended for project specific (private) libraries.
|
||||
@@ -339,7 +336,7 @@ More information about PlatformIO Library Dependency Finder
|
||||
|
||||
|
||||
def init_test_readme(test_dir):
|
||||
with open(os.path.join(test_dir, "README"), "w") as fp:
|
||||
with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
This directory is intended for PlatformIO Unit Testing and project tests.
|
||||
@@ -360,7 +357,7 @@ def init_cvs_ignore(project_dir):
|
||||
conf_path = os.path.join(project_dir, ".gitignore")
|
||||
if os.path.isfile(conf_path):
|
||||
return
|
||||
with open(conf_path, "w") as fp:
|
||||
with open(conf_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(".pio\n")
|
||||
|
||||
|
||||
@@ -441,23 +438,3 @@ def update_project_env(project_dir, environment, project_option):
|
||||
config.set(section, _name.strip(), _value.strip())
|
||||
|
||||
config.save()
|
||||
|
||||
|
||||
def get_best_envname(config, board_ids=None):
|
||||
envname = None
|
||||
default_envs = config.default_envs()
|
||||
if default_envs:
|
||||
envname = default_envs[0]
|
||||
if not board_ids:
|
||||
return envname
|
||||
|
||||
for env in config.envs():
|
||||
if not board_ids:
|
||||
return env
|
||||
if not envname:
|
||||
envname = env
|
||||
items = config.items(env=env, as_dict=True)
|
||||
if "board" in items and items.get("board") in board_ids:
|
||||
return env
|
||||
|
||||
return envname
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from os.path import getatime, getmtime, isdir, isfile, join
|
||||
|
||||
from twisted.logger import LogLevel # pylint: disable=import-error
|
||||
from twisted.spread import pb # pylint: disable=import-error
|
||||
@@ -25,15 +24,16 @@ from platformio.commands.remote.ac.serial import SerialPortAsyncCmd
|
||||
from platformio.commands.remote.client.base import RemoteClientBase
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
from platformio.project.helpers import get_project_core_dir
|
||||
|
||||
|
||||
class RemoteAgentService(RemoteClientBase):
|
||||
def __init__(self, name, share, working_dir=None):
|
||||
RemoteClientBase.__init__(self)
|
||||
self.log_level = LogLevel.info
|
||||
self.working_dir = working_dir or join(get_project_core_dir(), "remote")
|
||||
if not isdir(self.working_dir):
|
||||
self.working_dir = working_dir or os.path.join(
|
||||
ProjectConfig.get_instance().get("platformio", "core_dir"), "remote"
|
||||
)
|
||||
if not os.path.isdir(self.working_dir):
|
||||
os.makedirs(self.working_dir)
|
||||
if name:
|
||||
self.name = str(name)[:50]
|
||||
@@ -138,14 +138,14 @@ class RemoteAgentService(RemoteClientBase):
|
||||
self, command, options
|
||||
):
|
||||
assert options and "project_id" in options
|
||||
project_dir = join(self.working_dir, "projects", options["project_id"])
|
||||
origin_pio_ini = join(project_dir, "platformio.ini")
|
||||
back_pio_ini = join(project_dir, "platformio.ini.bak")
|
||||
project_dir = os.path.join(self.working_dir, "projects", options["project_id"])
|
||||
origin_pio_ini = os.path.join(project_dir, "platformio.ini")
|
||||
back_pio_ini = os.path.join(project_dir, "platformio.ini.bak")
|
||||
|
||||
# remove insecure project options
|
||||
try:
|
||||
conf = ProjectConfig(origin_pio_ini)
|
||||
if isfile(back_pio_ini):
|
||||
if os.path.isfile(back_pio_ini):
|
||||
os.remove(back_pio_ini)
|
||||
os.rename(origin_pio_ini, back_pio_ini)
|
||||
# cleanup
|
||||
@@ -159,7 +159,10 @@ class RemoteAgentService(RemoteClientBase):
|
||||
conf.save(origin_pio_ini)
|
||||
|
||||
# restore A/M times
|
||||
os.utime(origin_pio_ini, (getatime(back_pio_ini), getmtime(back_pio_ini)))
|
||||
os.utime(
|
||||
origin_pio_ini,
|
||||
(os.path.getatime(back_pio_ini), os.path.getmtime(back_pio_ini)),
|
||||
)
|
||||
except NotPlatformIOProjectError as e:
|
||||
raise pb.Error(str(e))
|
||||
|
||||
@@ -194,8 +197,8 @@ class RemoteAgentService(RemoteClientBase):
|
||||
paused_acs.append(ac)
|
||||
|
||||
def _cb_on_end():
|
||||
if isfile(back_pio_ini):
|
||||
if isfile(origin_pio_ini):
|
||||
if os.path.isfile(back_pio_ini):
|
||||
if os.path.isfile(origin_pio_ini):
|
||||
os.remove(origin_pio_ini)
|
||||
os.rename(back_pio_ini, origin_pio_ini)
|
||||
for ac in paused_acs:
|
||||
|
||||
@@ -84,7 +84,7 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
self._ac_id = None
|
||||
self._d_acread = None
|
||||
self._d_acwrite = None
|
||||
self._acwrite_buffer = ""
|
||||
self._acwrite_buffer = b""
|
||||
|
||||
def agent_pool_ready(self):
|
||||
d = task.deferLater(
|
||||
@@ -173,7 +173,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
address = port.getHost()
|
||||
self.log.debug("Serial Bridge is started on {address!r}", address=address)
|
||||
if "sock" in self.cmd_options:
|
||||
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
|
||||
with open(
|
||||
os.path.join(self.cmd_options["sock"], "sock"),
|
||||
mode="w",
|
||||
encoding="utf8",
|
||||
) as fp:
|
||||
fp.write("socket://localhost:%d" % address.port)
|
||||
|
||||
def client_terminal_stopped(self):
|
||||
@@ -222,7 +226,7 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
return
|
||||
|
||||
data = self._acwrite_buffer
|
||||
self._acwrite_buffer = ""
|
||||
self._acwrite_buffer = b""
|
||||
try:
|
||||
d = self.agentpool.callRemote("acwrite", self._agent_id, self._ac_id, data)
|
||||
d.addCallback(self.cb_acwrite_result)
|
||||
@@ -233,4 +237,4 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
def cb_acwrite_result(self, result):
|
||||
assert result > 0
|
||||
if self._acwrite_buffer:
|
||||
self.acwrite_data("")
|
||||
self.acwrite_data(b"")
|
||||
|
||||
@@ -69,8 +69,8 @@ class RunOrTestClient(AsyncClientBase):
|
||||
os.path.join(self.options["project_dir"], "platformio.ini")
|
||||
)
|
||||
psync.add_item(cfg.path, "platformio.ini")
|
||||
psync.add_item(cfg.get_optional_dir("shared"), "shared")
|
||||
psync.add_item(cfg.get_optional_dir("boards"), "boards")
|
||||
psync.add_item(cfg.get("platformio", "shared_dir"), "shared")
|
||||
psync.add_item(cfg.get("platformio", "boards_dir"), "boards")
|
||||
|
||||
if self.options["force_remote"]:
|
||||
self._add_project_source_items(cfg, psync)
|
||||
@@ -78,26 +78,26 @@ class RunOrTestClient(AsyncClientBase):
|
||||
self._add_project_binary_items(cfg, psync)
|
||||
|
||||
if self.command == "test":
|
||||
psync.add_item(cfg.get_optional_dir("test"), "test")
|
||||
psync.add_item(cfg.get("platformio", "test_dir"), "test")
|
||||
|
||||
def _add_project_source_items(self, cfg, psync):
|
||||
psync.add_item(cfg.get_optional_dir("lib"), "lib")
|
||||
psync.add_item(cfg.get("platformio", "lib_dir"), "lib")
|
||||
psync.add_item(
|
||||
cfg.get_optional_dir("include"),
|
||||
cfg.get("platformio", "include_dir"),
|
||||
"include",
|
||||
cb_filter=self._cb_tarfile_filter,
|
||||
)
|
||||
psync.add_item(
|
||||
cfg.get_optional_dir("src"), "src", cb_filter=self._cb_tarfile_filter
|
||||
cfg.get("platformio", "src_dir"), "src", cb_filter=self._cb_tarfile_filter
|
||||
)
|
||||
if set(["buildfs", "uploadfs", "uploadfsota"]) & set(
|
||||
self.options.get("target", [])
|
||||
):
|
||||
psync.add_item(cfg.get_optional_dir("data"), "data")
|
||||
psync.add_item(cfg.get("platformio", "data_dir"), "data")
|
||||
|
||||
@staticmethod
|
||||
def _add_project_binary_items(cfg, psync):
|
||||
build_dir = cfg.get_optional_dir("build")
|
||||
build_dir = cfg.get("platformio", "build_dir")
|
||||
for env_name in os.listdir(build_dir):
|
||||
env_dir = os.path.join(build_dir, env_name)
|
||||
if not os.path.isdir(env_dir):
|
||||
|
||||
@@ -28,7 +28,6 @@ from platformio.commands.device import helpers as device_helpers
|
||||
from platformio.commands.device.command import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.commands.test.command import cli as cmd_test
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
|
||||
@@ -37,9 +36,8 @@ from platformio.project.exception import NotPlatformIOProjectError
|
||||
@click.option("-a", "--agent", multiple=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, agent):
|
||||
assert ensure_python3()
|
||||
ctx.obj = agent
|
||||
inject_contrib_pysite(verify_openssl=True)
|
||||
inject_contrib_pysite()
|
||||
|
||||
|
||||
@cli.group("agent", short_help="Start a new agent or list active")
|
||||
@@ -338,7 +336,10 @@ def device_monitor(ctx, agents, **kwargs):
|
||||
kwargs["baud"] = kwargs["baud"] or 9600
|
||||
|
||||
def _tx_target(sock_dir):
|
||||
subcmd_argv = ["remote", "device", "monitor"]
|
||||
subcmd_argv = ["remote"]
|
||||
for agent in agents:
|
||||
subcmd_argv.extend(["--agent", agent])
|
||||
subcmd_argv.extend(["device", "monitor"])
|
||||
subcmd_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
|
||||
subcmd_argv.extend(["--sock", sock_dir])
|
||||
subprocess.call([proc.where_is_program("platformio")] + subcmd_argv)
|
||||
@@ -352,7 +353,7 @@ def device_monitor(ctx, agents, **kwargs):
|
||||
sleep(0.1)
|
||||
if not t.is_alive():
|
||||
return
|
||||
with open(sock_file) as fp:
|
||||
with open(sock_file, encoding="utf8") as fp:
|
||||
kwargs["port"] = fp.read()
|
||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||
t.join(2)
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
from multiprocessing import cpu_count
|
||||
from time import time
|
||||
|
||||
@@ -100,7 +101,7 @@ def cli(
|
||||
|
||||
# clean obsolete build dir
|
||||
if not disable_auto_clean:
|
||||
build_dir = config.get_optional_dir("build")
|
||||
build_dir = config.get("platformio", "build_dir")
|
||||
try:
|
||||
clean_build_dir(build_dir, config)
|
||||
except: # pylint: disable=bare-except
|
||||
@@ -200,7 +201,7 @@ def print_processing_header(env, config, verbose=False):
|
||||
"Processing %s (%s)"
|
||||
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
|
||||
)
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
|
||||
@@ -23,8 +23,8 @@ from platformio.project.helpers import compute_project_checksum, get_project_dir
|
||||
|
||||
def handle_legacy_libdeps(project_dir, config):
|
||||
legacy_libdeps_dir = join(project_dir, ".piolibdeps")
|
||||
if not isdir(legacy_libdeps_dir) or legacy_libdeps_dir == config.get_optional_dir(
|
||||
"libdeps"
|
||||
if not isdir(legacy_libdeps_dir) or legacy_libdeps_dir == config.get(
|
||||
"platformio", "libdeps_dir"
|
||||
):
|
||||
return
|
||||
if not config.has_section("env"):
|
||||
@@ -54,11 +54,11 @@ def clean_build_dir(build_dir, config):
|
||||
if isdir(build_dir):
|
||||
# check project structure
|
||||
if isfile(checksum_file):
|
||||
with open(checksum_file) as fp:
|
||||
with open(checksum_file, encoding="utf8") as fp:
|
||||
if fp.read() == checksum:
|
||||
return
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
makedirs(build_dir)
|
||||
with open(checksum_file, "w") as fp:
|
||||
with open(checksum_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(checksum)
|
||||
|
||||
@@ -12,10 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.commands.platform import init_platform
|
||||
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.exception import UndefinedEnvPlatformError
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
@@ -66,15 +64,7 @@ class EnvironmentProcessor(object):
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
|
||||
try:
|
||||
p = PlatformFactory.new(self.options["platform"])
|
||||
except UnknownPlatform:
|
||||
self.cmd_ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[self.options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
p = PlatformFactory.new(self.options["platform"])
|
||||
|
||||
result = p.run(build_vars, build_targets, self.silent, self.verbose, self.jobs)
|
||||
result = init_platform(self.options["platform"]).run(
|
||||
build_vars, build_targets, self.silent, self.verbose, self.jobs
|
||||
)
|
||||
return result["returncode"] == 0
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
|
||||
import json
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import click
|
||||
@@ -22,6 +21,7 @@ from tabulate import tabulate
|
||||
|
||||
from platformio import __version__, compat, fs, proc, util
|
||||
from platformio.commands.system.completion import (
|
||||
ShellType,
|
||||
get_completion_install_path,
|
||||
install_completion_code,
|
||||
uninstall_completion_code,
|
||||
@@ -64,12 +64,12 @@ def system_info(json_output):
|
||||
}
|
||||
data["core_dir"] = {
|
||||
"title": "PlatformIO Core Directory",
|
||||
"value": project_config.get_optional_dir("core"),
|
||||
"value": project_config.get("platformio", "core_dir"),
|
||||
}
|
||||
data["platformio_exe"] = {
|
||||
"title": "PlatformIO Core Executable",
|
||||
"value": proc.where_is_program(
|
||||
"platformio.exe" if proc.WINDOWS else "platformio"
|
||||
"platformio.exe" if compat.IS_WINDOWS else "platformio"
|
||||
),
|
||||
}
|
||||
data["python_exe"] = {
|
||||
@@ -88,7 +88,7 @@ def system_info(json_output):
|
||||
"title": "Tools & Toolchains",
|
||||
"value": len(
|
||||
ToolPackageManager(
|
||||
project_config.get_optional_dir("packages")
|
||||
project_config.get("platformio", "packages_dir")
|
||||
).get_installed()
|
||||
),
|
||||
}
|
||||
@@ -150,23 +150,11 @@ def system_prune(force, dry_run, cache, core_packages, platform_packages):
|
||||
|
||||
@cli.group("completion", short_help="Shell completion support")
|
||||
def completion():
|
||||
# pylint: disable=import-error,import-outside-toplevel
|
||||
try:
|
||||
import click_completion # pylint: disable=unused-import,unused-variable
|
||||
except ImportError:
|
||||
click.echo("Installing dependent packages...")
|
||||
subprocess.check_call(
|
||||
[proc.get_pythonexe_path(), "-m", "pip", "install", "click-completion"],
|
||||
)
|
||||
pass
|
||||
|
||||
|
||||
@completion.command("install", short_help="Install shell completion files/code")
|
||||
@click.option(
|
||||
"--shell",
|
||||
default=None,
|
||||
type=click.Choice(["fish", "bash", "zsh", "powershell", "auto"]),
|
||||
help="The shell type, default=auto",
|
||||
)
|
||||
@click.argument("shell", type=click.Choice([t.value for t in ShellType]))
|
||||
@click.option(
|
||||
"--path",
|
||||
type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True),
|
||||
@@ -174,26 +162,18 @@ def completion():
|
||||
"The standard installation path is used by default.",
|
||||
)
|
||||
def completion_install(shell, path):
|
||||
|
||||
import click_completion # pylint: disable=import-outside-toplevel,import-error
|
||||
|
||||
shell = shell or click_completion.get_auto_shell()
|
||||
shell = ShellType(shell)
|
||||
path = path or get_completion_install_path(shell)
|
||||
install_completion_code(shell, path)
|
||||
click.echo(
|
||||
"PlatformIO CLI completion has been installed for %s shell to %s \n"
|
||||
"Please restart a current shell session."
|
||||
% (click.style(shell, fg="cyan"), click.style(path, fg="blue"))
|
||||
% (click.style(shell.name, fg="cyan"), click.style(path, fg="blue"))
|
||||
)
|
||||
|
||||
|
||||
@completion.command("uninstall", short_help="Uninstall shell completion files/code")
|
||||
@click.option(
|
||||
"--shell",
|
||||
default=None,
|
||||
type=click.Choice(["fish", "bash", "zsh", "powershell", "auto"]),
|
||||
help="The shell type, default=auto",
|
||||
)
|
||||
@click.argument("shell", type=click.Choice([t.value for t in ShellType]))
|
||||
@click.option(
|
||||
"--path",
|
||||
type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True),
|
||||
@@ -201,14 +181,11 @@ def completion_install(shell, path):
|
||||
"The standard installation path is used by default.",
|
||||
)
|
||||
def completion_uninstall(shell, path):
|
||||
|
||||
import click_completion # pylint: disable=import-outside-toplevel,import-error
|
||||
|
||||
shell = shell or click_completion.get_auto_shell()
|
||||
shell = ShellType(shell)
|
||||
path = path or get_completion_install_path(shell)
|
||||
uninstall_completion_code(shell, path)
|
||||
click.echo(
|
||||
"PlatformIO CLI completion has been uninstalled for %s shell from %s \n"
|
||||
"Please restart a current shell session."
|
||||
% (click.style(shell, fg="cyan"), click.style(path, fg="blue"))
|
||||
% (click.style(shell.name, fg="cyan"), click.style(path, fg="blue"))
|
||||
)
|
||||
|
||||
@@ -13,61 +13,75 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from enum import Enum
|
||||
|
||||
import click
|
||||
|
||||
from platformio.compat import IS_MACOS
|
||||
|
||||
|
||||
class ShellType(Enum):
|
||||
FISH = "fish"
|
||||
ZSH = "zsh"
|
||||
BASH = "bash"
|
||||
|
||||
|
||||
def get_completion_install_path(shell):
|
||||
home_dir = os.path.expanduser("~")
|
||||
prog_name = click.get_current_context().find_root().info_name
|
||||
if shell == "fish":
|
||||
if shell == ShellType.FISH:
|
||||
return os.path.join(
|
||||
home_dir, ".config", "fish", "completions", "%s.fish" % prog_name
|
||||
)
|
||||
if shell == "bash":
|
||||
return os.path.join(home_dir, ".bash_completion")
|
||||
if shell == "zsh":
|
||||
if shell == ShellType.ZSH:
|
||||
return os.path.join(home_dir, ".zshrc")
|
||||
if shell == "powershell":
|
||||
return subprocess.check_output(
|
||||
["powershell", "-NoProfile", "echo $profile"]
|
||||
).strip()
|
||||
if shell == ShellType.BASH:
|
||||
return os.path.join(home_dir, ".bash_completion")
|
||||
raise click.ClickException("%s is not supported." % shell)
|
||||
|
||||
|
||||
def get_completion_code(shell):
|
||||
if shell == ShellType.FISH:
|
||||
return "eval (env _PIO_COMPLETE=fish_source pio)"
|
||||
if shell == ShellType.ZSH:
|
||||
code = "autoload -Uz compinit\ncompinit\n" if IS_MACOS else ""
|
||||
return code + 'eval "$(_PIO_COMPLETE=zsh_source pio)"'
|
||||
if shell == ShellType.BASH:
|
||||
return 'eval "$(_PIO_COMPLETE=bash_source pio)"'
|
||||
raise click.ClickException("%s is not supported." % shell)
|
||||
|
||||
|
||||
def is_completion_code_installed(shell, path):
|
||||
if shell == "fish" or not os.path.exists(path):
|
||||
if shell == ShellType.FISH or not os.path.exists(path):
|
||||
return False
|
||||
|
||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||
|
||||
with open(path) as fp:
|
||||
return click_completion.get_code(shell=shell) in fp.read()
|
||||
with open(path, encoding="utf8") as fp:
|
||||
return get_completion_code(shell) in fp.read()
|
||||
|
||||
|
||||
def install_completion_code(shell, path):
|
||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||
|
||||
if is_completion_code_installed(shell, path):
|
||||
return None
|
||||
|
||||
return click_completion.install(shell=shell, path=path, append=shell != "fish")
|
||||
append = shell != ShellType.FISH
|
||||
with open(path, mode="a" if append else "w", encoding="utf8") as fp:
|
||||
if append:
|
||||
fp.write("\n\n# Begin: PlatformIO Core completion support\n")
|
||||
fp.write(get_completion_code(shell))
|
||||
if append:
|
||||
fp.write("\n# End: PlatformIO Core completion support\n\n")
|
||||
return True
|
||||
|
||||
|
||||
def uninstall_completion_code(shell, path):
|
||||
if not os.path.exists(path):
|
||||
return True
|
||||
if shell == "fish":
|
||||
if shell == ShellType.FISH:
|
||||
os.remove(path)
|
||||
return True
|
||||
|
||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||
|
||||
with open(path, "r+") as fp:
|
||||
with open(path, "r+", encoding="utf8") as fp:
|
||||
contents = fp.read()
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(contents.replace(click_completion.get_code(shell=shell), ""))
|
||||
fp.write(contents.replace(get_completion_code(shell), ""))
|
||||
|
||||
return True
|
||||
|
||||
@@ -14,18 +14,19 @@
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
|
||||
|
||||
from fnmatch import fnmatch
|
||||
from os import getcwd, listdir
|
||||
from os.path import isdir, join
|
||||
import fnmatch
|
||||
import os
|
||||
import shutil
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app, exception, fs, util
|
||||
from platformio.commands.platform import init_platform
|
||||
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
||||
from platformio.commands.test.helpers import get_test_names
|
||||
from platformio.commands.test.native import NativeTestProcessor
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
@@ -50,7 +51,7 @@ from platformio.project.config import ProjectConfig
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
@@ -102,11 +103,7 @@ def cli( # pylint: disable=redefined-builtin
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(project_conf)
|
||||
config.validate(envs=environment)
|
||||
|
||||
test_dir = config.get_optional_dir("test")
|
||||
if not isdir(test_dir):
|
||||
raise exception.TestDirNotExists(test_dir)
|
||||
test_names = get_test_names(test_dir)
|
||||
test_names = get_test_names(config)
|
||||
|
||||
if not verbose:
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
@@ -129,9 +126,11 @@ def cli( # pylint: disable=redefined-builtin
|
||||
not environment and default_envs and envname not in default_envs,
|
||||
testname != "*"
|
||||
and patterns["filter"]
|
||||
and not any(fnmatch(testname, p) for p in patterns["filter"]),
|
||||
and not any(
|
||||
fnmatch.fnmatch(testname, p) for p in patterns["filter"]
|
||||
),
|
||||
testname != "*"
|
||||
and any(fnmatch(testname, p) for p in patterns["ignore"]),
|
||||
and any(fnmatch.fnmatch(testname, p) for p in patterns["ignore"]),
|
||||
]
|
||||
if any(skip_conditions):
|
||||
results.append({"env": envname, "test": testname})
|
||||
@@ -142,7 +141,8 @@ def cli( # pylint: disable=redefined-builtin
|
||||
|
||||
cls = (
|
||||
EmbeddedTestProcessor
|
||||
if is_embedded_platform(config.get(section, "platform"))
|
||||
if config.get(section, "platform")
|
||||
and init_platform(config.get(section, "platform")).is_embedded()
|
||||
else NativeTestProcessor
|
||||
)
|
||||
tp = cls(
|
||||
@@ -185,22 +185,6 @@ def cli( # pylint: disable=redefined-builtin
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
|
||||
def get_test_names(test_dir):
|
||||
names = []
|
||||
for item in sorted(listdir(test_dir)):
|
||||
if isdir(join(test_dir, item)):
|
||||
names.append(item)
|
||||
if not names:
|
||||
names = ["*"]
|
||||
return names
|
||||
|
||||
|
||||
def is_embedded_platform(name):
|
||||
if not name:
|
||||
return False
|
||||
return PlatformFactory.new(name).is_embedded()
|
||||
|
||||
|
||||
def print_processing_header(test, env):
|
||||
click.echo(
|
||||
"Processing %s in %s environment"
|
||||
@@ -209,7 +193,7 @@ def print_processing_header(test, env):
|
||||
click.style(env, fg="cyan", bold=True),
|
||||
)
|
||||
)
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
|
||||
@@ -117,13 +117,10 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
||||
port = item["port"]
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
if hwid_str in item["hwid"] and self.is_serial_port_ready(port):
|
||||
return port
|
||||
|
||||
# check if port is already configured
|
||||
try:
|
||||
serial.Serial(port, timeout=self.SERIAL_TIMEOUT).close()
|
||||
except serial.SerialException:
|
||||
if port and not self.is_serial_port_ready(port):
|
||||
port = None
|
||||
|
||||
if not port:
|
||||
@@ -136,3 +133,18 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
||||
"global `--test-port` option."
|
||||
)
|
||||
return port
|
||||
|
||||
@staticmethod
|
||||
def is_serial_port_ready(port, timeout=3):
|
||||
if not port:
|
||||
return False
|
||||
elapsed = 0
|
||||
while elapsed < timeout:
|
||||
try:
|
||||
serial.Serial(port, timeout=1).close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
sleep(1)
|
||||
elapsed += 1
|
||||
return False
|
||||
|
||||
30
platformio/commands/test/helpers.py
Normal file
30
platformio/commands/test/helpers.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio import exception
|
||||
|
||||
|
||||
def get_test_names(config):
|
||||
test_dir = config.get("platformio", "test_dir")
|
||||
if not os.path.isdir(test_dir):
|
||||
raise exception.TestDirNotExists(test_dir)
|
||||
names = []
|
||||
for item in sorted(os.listdir(test_dir)):
|
||||
if os.path.isdir(os.path.join(test_dir, item)):
|
||||
names.append(item)
|
||||
if not names:
|
||||
names = ["*"]
|
||||
return names
|
||||
@@ -31,7 +31,7 @@ class NativeTestProcessor(TestProcessorBase):
|
||||
return self.run()
|
||||
|
||||
def run(self):
|
||||
build_dir = self.options["project_config"].get_optional_dir("build")
|
||||
build_dir = self.options["project_config"].get("platformio", "build_dir")
|
||||
result = proc.exec_command(
|
||||
[join(build_dir, self.env_name, "program")],
|
||||
stdout=LineBufferedAsyncPipe(self.on_run_out),
|
||||
|
||||
@@ -124,7 +124,7 @@ class TestProcessorBase(object):
|
||||
def build_or_upload(self, target):
|
||||
if not self._output_file_generated:
|
||||
self.generate_output_file(
|
||||
self.options["project_config"].get_optional_dir("test")
|
||||
self.options["project_config"].get("platformio", "test_dir")
|
||||
)
|
||||
self._output_file_generated = True
|
||||
|
||||
@@ -139,9 +139,9 @@ class TestProcessorBase(object):
|
||||
cmd_run,
|
||||
project_dir=self.options["project_dir"],
|
||||
project_conf=self.options["project_config"].path,
|
||||
upload_port=self.options["upload_port"],
|
||||
upload_port=self.options.get("upload_port"),
|
||||
verbose=self.options["verbose"],
|
||||
silent=self.options["silent"],
|
||||
silent=self.options.get("silent"),
|
||||
environment=[self.env_name],
|
||||
disable_auto_clean="nobuild" in target,
|
||||
target=target,
|
||||
@@ -224,7 +224,7 @@ class TestProcessorBase(object):
|
||||
test_dir,
|
||||
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
|
||||
)
|
||||
with open(tmp_file, "w") as fp:
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
|
||||
atexit.register(delete_tmptest_files, test_dir)
|
||||
|
||||
@@ -21,7 +21,7 @@ import click
|
||||
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.clients.http import fetch_remote_content
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import exec_command, get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
@@ -40,7 +40,7 @@ def cli(dev):
|
||||
|
||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||
cmds = (
|
||||
["pip", "install", "--upgrade", get_pip_package(to_develop)],
|
||||
["pip", "install", "--upgrade", download_dist_package(to_develop)],
|
||||
["platformio", "--version"],
|
||||
)
|
||||
|
||||
@@ -73,7 +73,7 @@ def cli(dev):
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if any(m in r["err"].lower() for m in permission_errors) and not WINDOWS:
|
||||
if any(m in r["err"].lower() for m in permission_errors) and not IS_WINDOWS:
|
||||
click.secho(
|
||||
"""
|
||||
-----------------
|
||||
@@ -94,7 +94,7 @@ WARNING! Don't use `sudo` for the rest PlatformIO commands.
|
||||
return True
|
||||
|
||||
|
||||
def get_pip_package(to_develop):
|
||||
def download_dist_package(to_develop):
|
||||
if not to_develop:
|
||||
return "platformio"
|
||||
dl_url = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
@@ -103,7 +103,7 @@ def get_pip_package(to_develop):
|
||||
os.makedirs(cache_dir)
|
||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||
try:
|
||||
with open(pkg_name, "w") as fp:
|
||||
with open(pkg_name, "wb") as fp:
|
||||
r = exec_command(
|
||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
||||
)
|
||||
|
||||
@@ -12,23 +12,57 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-import, no-name-in-module, import-error,
|
||||
# pylint: disable=no-member, undefined-variable, unexpected-keyword-arg
|
||||
# pylint: disable=unused-import,no-name-in-module
|
||||
|
||||
import glob
|
||||
import inspect
|
||||
import json
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
if sys.version_info >= (3, 7):
|
||||
from asyncio import create_task as aio_create_task
|
||||
from asyncio import get_running_loop as aio_get_running_loop
|
||||
else:
|
||||
from asyncio import ensure_future as aio_create_task
|
||||
from asyncio import get_event_loop as aio_get_running_loop
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
CYGWIN = sys.platform.startswith("cygwin")
|
||||
WINDOWS = sys.platform.startswith("win")
|
||||
MACOS = sys.platform.startswith("darwin")
|
||||
IS_CYGWIN = sys.platform.startswith("cygwin")
|
||||
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
|
||||
IS_MACOS = sys.platform.startswith("darwin")
|
||||
string_types = (str,)
|
||||
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (bytes, memoryview, bytearray))
|
||||
|
||||
|
||||
def ci_strings_are_equal(a, b):
|
||||
if a == b:
|
||||
return True
|
||||
if not a or not b:
|
||||
return False
|
||||
return a.strip().lower() == b.strip().lower()
|
||||
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
if not isinstance(data, string_types):
|
||||
data = str(data)
|
||||
return data.encode()
|
||||
|
||||
|
||||
def load_python_module(name, pathname):
|
||||
import importlib.util # pylint: disable=import-outside-toplevel
|
||||
|
||||
spec = importlib.util.spec_from_file_location(name, pathname)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
def get_filesystem_encoding():
|
||||
@@ -53,14 +87,6 @@ def get_object_members(obj, ignore_private=True):
|
||||
}
|
||||
|
||||
|
||||
def ci_strings_are_equal(a, b):
|
||||
if a == b:
|
||||
return True
|
||||
if not a or not b:
|
||||
return False
|
||||
return a.strip().lower() == b.strip().lower()
|
||||
|
||||
|
||||
def ensure_python3(raise_exception=True):
|
||||
compatible = sys.version_info >= (3, 6)
|
||||
if not raise_exception or compatible:
|
||||
@@ -73,99 +99,9 @@ def ensure_python3(raise_exception=True):
|
||||
)
|
||||
|
||||
|
||||
if PY2:
|
||||
import imp
|
||||
|
||||
string_types = (str, unicode)
|
||||
|
||||
def create_task(coro, name=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_running_loop():
|
||||
raise NotImplementedError
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (buffer, bytearray))
|
||||
|
||||
def path_to_unicode(path):
|
||||
if isinstance(path, unicode):
|
||||
return path
|
||||
return path.decode(get_filesystem_encoding())
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode(get_filesystem_encoding())
|
||||
elif not isinstance(data, string_types):
|
||||
data = str(data)
|
||||
return data
|
||||
|
||||
def dump_json_to_unicode(obj):
|
||||
if isinstance(obj, unicode):
|
||||
return obj
|
||||
return json.dumps(
|
||||
obj, encoding=get_filesystem_encoding(), ensure_ascii=False
|
||||
).encode("utf8")
|
||||
|
||||
_magic_check = re.compile("([*?[])")
|
||||
_magic_check_bytes = re.compile(b"([*?[])")
|
||||
|
||||
def glob_recursive(pathname):
|
||||
return glob.glob(pathname)
|
||||
|
||||
def glob_escape(pathname):
|
||||
"""Escape all special characters."""
|
||||
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
|
||||
# Escaping is done by wrapping any of "*?[" between square brackets.
|
||||
# Metacharacters do not work in the drive part and shouldn't be
|
||||
# escaped.
|
||||
drive, pathname = os.path.splitdrive(pathname)
|
||||
if isinstance(pathname, bytes):
|
||||
pathname = _magic_check_bytes.sub(br"[\1]", pathname)
|
||||
else:
|
||||
pathname = _magic_check.sub(r"[\1]", pathname)
|
||||
return drive + pathname
|
||||
|
||||
def load_python_module(name, pathname):
|
||||
return imp.load_source(name, pathname)
|
||||
|
||||
|
||||
else:
|
||||
import importlib.util
|
||||
from glob import escape as glob_escape
|
||||
|
||||
if sys.version_info >= (3, 7):
|
||||
from asyncio import create_task, get_running_loop
|
||||
else:
|
||||
from asyncio import ensure_future as create_task
|
||||
from asyncio import get_event_loop as get_running_loop
|
||||
|
||||
string_types = (str,)
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (bytes, memoryview, bytearray))
|
||||
|
||||
def path_to_unicode(path):
|
||||
return path
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
if not isinstance(data, string_types):
|
||||
data = str(data)
|
||||
return data.encode()
|
||||
|
||||
def dump_json_to_unicode(obj):
|
||||
if isinstance(obj, string_types):
|
||||
return obj
|
||||
return json.dumps(obj)
|
||||
|
||||
def glob_recursive(pathname):
|
||||
return glob.glob(pathname, recursive=True)
|
||||
|
||||
def load_python_module(name, pathname):
|
||||
spec = importlib.util.spec_from_file_location(name, pathname)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
def path_to_unicode(path):
|
||||
"""
|
||||
Deprecated: Compatibility with dev-platforms,
|
||||
and custom device monitor filters
|
||||
"""
|
||||
return path
|
||||
|
||||
253
platformio/debug/config/base.py
Normal file
253
platformio/debug/config/base.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from platformio import fs, proc, util
|
||||
from platformio.compat import string_types
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.debug.helpers import reveal_debug_port
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import load_project_ide_data
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, platform, project_config, env_name):
|
||||
self.platform = platform
|
||||
self.project_config = project_config
|
||||
self.env_name = env_name
|
||||
self.env_options = project_config.items(env=env_name, as_dict=True)
|
||||
self.build_data = self._load_build_data()
|
||||
|
||||
self.tool_name = None
|
||||
self.board_config = {}
|
||||
self.tool_settings = {}
|
||||
if "board" in self.env_options:
|
||||
self.board_config = platform.board_config(self.env_options["board"])
|
||||
self.tool_name = self.board_config.get_debug_tool_name(
|
||||
self.env_options.get("debug_tool")
|
||||
)
|
||||
self.tool_settings = (
|
||||
self.board_config.get("debug", {})
|
||||
.get("tools", {})
|
||||
.get(self.tool_name, {})
|
||||
)
|
||||
|
||||
self._load_cmds = None
|
||||
self._port = None
|
||||
|
||||
self.server = self._configure_server()
|
||||
|
||||
try:
|
||||
platform.configure_debug_session(self)
|
||||
except NotImplementedError:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def cleanup_cmds(items):
|
||||
items = ProjectConfig.parse_multi_values(items)
|
||||
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
||||
|
||||
@property
|
||||
def program_path(self):
|
||||
return self.build_data["prog_path"]
|
||||
|
||||
@property
|
||||
def client_executable_path(self):
|
||||
return self.build_data["gdb_path"]
|
||||
|
||||
@property
|
||||
def load_cmds(self):
|
||||
if self._load_cmds is not None:
|
||||
return self._load_cmds
|
||||
result = self.env_options.get("debug_load_cmds")
|
||||
if not result:
|
||||
result = self.tool_settings.get("load_cmds")
|
||||
if not result:
|
||||
# legacy
|
||||
result = self.tool_settings.get("load_cmd")
|
||||
if not result:
|
||||
result = ProjectOptions["env.debug_load_cmds"].default
|
||||
return self.cleanup_cmds(result)
|
||||
|
||||
@load_cmds.setter
|
||||
def load_cmds(self, cmds):
|
||||
self._load_cmds = cmds
|
||||
|
||||
@property
|
||||
def load_mode(self):
|
||||
result = self.env_options.get("debug_load_mode")
|
||||
if not result:
|
||||
result = self.tool_settings.get("load_mode")
|
||||
return result or ProjectOptions["env.debug_load_mode"].default
|
||||
|
||||
@property
|
||||
def init_break(self):
|
||||
missed = object()
|
||||
result = self.env_options.get("debug_init_break", missed)
|
||||
if result != missed:
|
||||
return result
|
||||
result = None
|
||||
if not result:
|
||||
result = self.tool_settings.get("init_break")
|
||||
return result or ProjectOptions["env.debug_init_break"].default
|
||||
|
||||
@property
|
||||
def init_cmds(self):
|
||||
return self.cleanup_cmds(
|
||||
self.env_options.get("debug_init_cmds", self.tool_settings.get("init_cmds"))
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_cmds(self):
|
||||
return self.cleanup_cmds(
|
||||
self.env_options.get("debug_extra_cmds")
|
||||
) + self.cleanup_cmds(self.tool_settings.get("extra_cmds"))
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
return reveal_debug_port(
|
||||
self.env_options.get("debug_port", self.tool_settings.get("port"))
|
||||
or self._port,
|
||||
self.tool_name,
|
||||
self.tool_settings,
|
||||
)
|
||||
|
||||
@port.setter
|
||||
def port(self, value):
|
||||
self._port = value
|
||||
|
||||
@property
|
||||
def upload_protocol(self):
|
||||
return self.env_options.get(
|
||||
"upload_protocol", self.board_config.get("upload", {}).get("protocol")
|
||||
)
|
||||
|
||||
@property
|
||||
def speed(self):
|
||||
return self.env_options.get("debug_speed", self.tool_settings.get("speed"))
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return self.env_options.get(
|
||||
"debug_server_ready_pattern", (self.server or {}).get("ready_pattern")
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_project_ide_data(os.getcwd(), self.env_name, cache=True)
|
||||
if data:
|
||||
return data
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
|
||||
def _configure_server(self):
|
||||
# user disabled server in platformio.ini
|
||||
if "debug_server" in self.env_options and not self.env_options.get(
|
||||
"debug_server"
|
||||
):
|
||||
return None
|
||||
|
||||
result = None
|
||||
|
||||
# specific server per a system
|
||||
if isinstance(self.tool_settings.get("server", {}), list):
|
||||
for item in self.tool_settings["server"][:]:
|
||||
self.tool_settings["server"] = item
|
||||
if util.get_systype() in item.get("system", []):
|
||||
break
|
||||
|
||||
# user overwrites debug server
|
||||
if self.env_options.get("debug_server"):
|
||||
result = {
|
||||
"cwd": None,
|
||||
"executable": None,
|
||||
"arguments": self.env_options.get("debug_server"),
|
||||
}
|
||||
result["executable"] = result["arguments"][0]
|
||||
result["arguments"] = result["arguments"][1:]
|
||||
elif "server" in self.tool_settings:
|
||||
result = self.tool_settings["server"]
|
||||
server_package = result.get("package")
|
||||
server_package_dir = (
|
||||
self.platform.get_package_dir(server_package)
|
||||
if server_package
|
||||
else None
|
||||
)
|
||||
if server_package and not server_package_dir:
|
||||
self.platform.install_packages(
|
||||
with_packages=[server_package],
|
||||
skip_default_package=True,
|
||||
silent=True,
|
||||
)
|
||||
server_package_dir = self.platform.get_package_dir(server_package)
|
||||
result.update(
|
||||
dict(
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=result.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
for a in result.get("arguments", [])
|
||||
],
|
||||
)
|
||||
)
|
||||
return self.reveal_patterns(result) if result else None
|
||||
|
||||
def get_init_script(self, debugger):
|
||||
try:
|
||||
return getattr(self, "%s_INIT_SCRIPT" % debugger.upper())
|
||||
except AttributeError:
|
||||
raise NotImplementedError
|
||||
|
||||
def reveal_patterns(self, source, recursive=True):
|
||||
program_path = self.program_path or ""
|
||||
patterns = {
|
||||
"PLATFORMIO_CORE_DIR": self.project_config.get("platformio", "core_dir"),
|
||||
"PYTHONEXE": proc.get_pythonexe_path(),
|
||||
"PROJECT_DIR": os.getcwd(),
|
||||
"PROG_PATH": program_path,
|
||||
"PROG_DIR": os.path.dirname(program_path),
|
||||
"PROG_NAME": os.path.basename(os.path.splitext(program_path)[0]),
|
||||
"DEBUG_PORT": self.port,
|
||||
"UPLOAD_PROTOCOL": self.upload_protocol,
|
||||
"INIT_BREAK": self.init_break or "",
|
||||
"LOAD_CMDS": "\n".join(self.load_cmds or []),
|
||||
}
|
||||
for key, value in patterns.items():
|
||||
if key.endswith(("_DIR", "_PATH")):
|
||||
patterns[key] = fs.to_unix_path(value)
|
||||
|
||||
def _replace(text):
|
||||
for key, value in patterns.items():
|
||||
pattern = "$%s" % key
|
||||
text = text.replace(pattern, value or "")
|
||||
return text
|
||||
|
||||
if isinstance(source, string_types):
|
||||
source = _replace(source)
|
||||
elif isinstance(source, (list, dict)):
|
||||
items = enumerate(source) if isinstance(source, list) else source.items()
|
||||
for key, value in items:
|
||||
if isinstance(value, string_types):
|
||||
source[key] = _replace(value)
|
||||
elif isinstance(value, (list, dict)) and recursive:
|
||||
source[key] = self.reveal_patterns(value, patterns)
|
||||
|
||||
data = json.dumps(source)
|
||||
if any(("$" + key) in data for key in patterns):
|
||||
source = self.reveal_patterns(source, patterns)
|
||||
|
||||
return source
|
||||
49
platformio/debug/config/blackmagic.py
Normal file
49
platformio/debug/config/blackmagic.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class BlackmagicDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor swdp_scan
|
||||
attach 1
|
||||
set mem inaccessible-by-default off
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
"""
|
||||
48
platformio/debug/config/factory.py
Normal file
48
platformio/debug/config/factory.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import importlib
|
||||
import re
|
||||
|
||||
from platformio.debug.config.generic import GenericDebugConfig
|
||||
from platformio.debug.config.native import NativeDebugConfig
|
||||
|
||||
|
||||
class DebugConfigFactory(object):
|
||||
@staticmethod
|
||||
def get_clsname(name):
|
||||
name = re.sub(r"[^\da-z\_\-]+", "", name, flags=re.I)
|
||||
return "%s%sDebugConfig" % (name.upper()[0], name.lower()[1:])
|
||||
|
||||
@classmethod
|
||||
def new(cls, platform, project_config, env_name):
|
||||
board_config = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
)
|
||||
tool_name = (
|
||||
board_config.get_debug_tool_name(
|
||||
project_config.get("env:" + env_name, "debug_tool")
|
||||
)
|
||||
if board_config
|
||||
else None
|
||||
)
|
||||
config_cls = None
|
||||
try:
|
||||
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
|
||||
config_cls = getattr(mod, cls.get_clsname(tool_name))
|
||||
except ModuleNotFoundError:
|
||||
config_cls = (
|
||||
GenericDebugConfig if platform.is_embedded() else NativeDebugConfig
|
||||
)
|
||||
return config_cls(platform, project_config, env_name)
|
||||
38
platformio/debug/config/generic.py
Normal file
38
platformio/debug/config/generic.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class GenericDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor init
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(GenericDebugConfig, self).__init__(*args, **kwargs)
|
||||
self.port = ":3333"
|
||||
48
platformio/debug/config/jlink.py
Normal file
48
platformio/debug/config/jlink.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class JlinkDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
monitor halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor clrbp
|
||||
monitor reset
|
||||
monitor go
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor clrbp
|
||||
monitor speed auto
|
||||
pio_reset_halt_target
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(JlinkDebugConfig, self).__init__(*args, **kwargs)
|
||||
self.port = ":2331"
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super(JlinkDebugConfig, self).server_ready_pattern or (
|
||||
"Waiting for GDB connection"
|
||||
)
|
||||
36
platformio/debug/config/mspdebug.py
Normal file
36
platformio/debug/config/mspdebug.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class MspdebugDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
end
|
||||
|
||||
target remote $DEBUG_PORT
|
||||
monitor erase
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MspdebugDebugConfig, self).__init__(*args, **kwargs)
|
||||
self.port = ":2000"
|
||||
@@ -12,10 +12,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class NativeDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
end
|
||||
|
||||
define pio_restart_target
|
||||
end
|
||||
|
||||
$INIT_BREAK
|
||||
""" + (
|
||||
"set startup-with-shell off" if not IS_WINDOWS else ""
|
||||
)
|
||||
37
platformio/debug/config/qemu.py
Normal file
37
platformio/debug/config/qemu.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class QemuDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(QemuDebugConfig, self).__init__(*args, **kwargs)
|
||||
self.port = ":1234"
|
||||
45
platformio/debug/config/renode.py
Normal file
45
platformio/debug/config/renode.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class RenodeDebugConfig(DebugConfigBase):
|
||||
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
$LOAD_CMDS
|
||||
monitor start
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
monitor start
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(RenodeDebugConfig, self).__init__(*args, **kwargs)
|
||||
self.port = ":3333"
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super(RenodeDebugConfig, self).server_ready_pattern or (
|
||||
"GDB server with all CPUs started on port"
|
||||
)
|
||||
204
platformio/debug/helpers.py
Normal file
204
platformio/debug/helpers.py
Normal file
@@ -0,0 +1,204 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from fnmatch import fnmatch
|
||||
from hashlib import sha1
|
||||
from io import BytesIO
|
||||
from os.path import isfile
|
||||
|
||||
from platformio import util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.commands.run.command import print_processing_header
|
||||
from platformio.commands.test.helpers import get_test_names
|
||||
from platformio.commands.test.processor import TestProcessorBase
|
||||
from platformio.compat import IS_WINDOWS, is_bytes
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
|
||||
|
||||
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
||||
|
||||
STDOUT = sys.stdout
|
||||
|
||||
def write(self, text):
|
||||
self.STDOUT.write(escape_gdbmi_stream("~", text))
|
||||
self.STDOUT.flush()
|
||||
|
||||
|
||||
def is_gdbmi_mode():
|
||||
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
|
||||
|
||||
|
||||
def escape_gdbmi_stream(prefix, stream):
|
||||
bytes_stream = False
|
||||
if is_bytes(stream):
|
||||
bytes_stream = True
|
||||
stream = stream.decode()
|
||||
|
||||
if not stream:
|
||||
return b"" if bytes_stream else ""
|
||||
|
||||
ends_nl = stream.endswith("\n")
|
||||
stream = re.sub(r"\\+", "\\\\\\\\", stream)
|
||||
stream = stream.replace('"', '\\"')
|
||||
stream = stream.replace("\n", "\\n")
|
||||
stream = '%s"%s"' % (prefix, stream)
|
||||
if ends_nl:
|
||||
stream += "\n"
|
||||
|
||||
return stream.encode() if bytes_stream else stream
|
||||
|
||||
|
||||
def get_default_debug_env(config):
|
||||
default_envs = config.default_envs()
|
||||
all_envs = config.envs()
|
||||
for env in default_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
for env in all_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
return default_envs[0] if default_envs else all_envs[0]
|
||||
|
||||
|
||||
def predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
): # pylint: disable=too-many-arguments
|
||||
debug_testname = project_config.get("env:" + env_name, "debug_test")
|
||||
if debug_testname:
|
||||
test_names = get_test_names(project_config)
|
||||
if debug_testname not in test_names:
|
||||
raise DebugInvalidOptionsError(
|
||||
"Unknown test name `%s`. Valid names are `%s`"
|
||||
% (debug_testname, ", ".join(test_names))
|
||||
)
|
||||
print_processing_header(env_name, project_config, verbose)
|
||||
tp = TestProcessorBase(
|
||||
ctx,
|
||||
debug_testname,
|
||||
env_name,
|
||||
dict(
|
||||
project_config=project_config,
|
||||
project_dir=project_dir,
|
||||
without_building=False,
|
||||
without_uploading=True,
|
||||
without_testing=True,
|
||||
verbose=False,
|
||||
),
|
||||
)
|
||||
tp.build_or_upload(["__debug", "__test"] + (["upload"] if preload else []))
|
||||
else:
|
||||
ctx.invoke(
|
||||
cmd_run,
|
||||
project_dir=project_dir,
|
||||
project_conf=project_config.path,
|
||||
environment=[env_name],
|
||||
target=["__debug"] + (["upload"] if preload else []),
|
||||
verbose=verbose,
|
||||
)
|
||||
|
||||
if preload:
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def has_debug_symbols(prog_path):
|
||||
if not isfile(prog_path):
|
||||
return False
|
||||
matched = {
|
||||
b".debug_info": False,
|
||||
b".debug_abbrev": False,
|
||||
b" -Og": False,
|
||||
b" -g": False,
|
||||
# b"__PLATFORMIO_BUILD_DEBUG__": False,
|
||||
}
|
||||
with open(prog_path, "rb") as fp:
|
||||
last_data = b""
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
for pattern, found in matched.items():
|
||||
if found:
|
||||
continue
|
||||
if pattern in last_data + data:
|
||||
matched[pattern] = True
|
||||
last_data = data
|
||||
return all(matched.values())
|
||||
|
||||
|
||||
def is_prog_obsolete(prog_path):
|
||||
prog_hash_path = prog_path + ".sha1"
|
||||
if not isfile(prog_path):
|
||||
return True
|
||||
shasum = sha1()
|
||||
with open(prog_path, "rb") as fp:
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
shasum.update(data)
|
||||
new_digest = shasum.hexdigest()
|
||||
old_digest = None
|
||||
if isfile(prog_hash_path):
|
||||
with open(prog_hash_path, encoding="utf8") as fp:
|
||||
old_digest = fp.read()
|
||||
if new_digest == old_digest:
|
||||
return False
|
||||
with open(prog_hash_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(new_digest)
|
||||
return True
|
||||
|
||||
|
||||
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
||||
def _get_pattern():
|
||||
if not env_debug_port:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
||||
return env_debug_port
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_serial_port(hwids):
|
||||
for item in util.get_serialports(filter_hwid=True):
|
||||
if not _is_match_pattern(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if tool_name.startswith("blackmagic"):
|
||||
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
for hwid in hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
return port
|
||||
return None
|
||||
|
||||
if env_debug_port and not _get_pattern():
|
||||
return env_debug_port
|
||||
if not tool_settings.get("require_debug_port"):
|
||||
return None
|
||||
|
||||
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
||||
if not debug_port:
|
||||
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
|
||||
return debug_port
|
||||
155
platformio/debug/process/base.py
Normal file
155
platformio/debug/process/base.py
Normal file
@@ -0,0 +1,155 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import asyncio
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
from platformio.compat import (
|
||||
IS_WINDOWS,
|
||||
aio_create_task,
|
||||
aio_get_running_loop,
|
||||
get_locale_encoding,
|
||||
)
|
||||
|
||||
|
||||
class DebugSubprocessProtocol(asyncio.SubprocessProtocol):
|
||||
def __init__(self, factory):
|
||||
self.factory = factory
|
||||
self._is_exited = False
|
||||
|
||||
def connection_made(self, transport):
|
||||
self.factory.connection_made(transport)
|
||||
|
||||
def pipe_data_received(self, fd, data):
|
||||
pipe_to_cb = [
|
||||
self.factory.stdin_data_received,
|
||||
self.factory.stdout_data_received,
|
||||
self.factory.stderr_data_received,
|
||||
]
|
||||
pipe_to_cb[fd](data)
|
||||
|
||||
def connection_lost(self, exc):
|
||||
self.process_exited()
|
||||
|
||||
def process_exited(self):
|
||||
if self._is_exited:
|
||||
return
|
||||
self.factory.process_exited()
|
||||
self._is_exited = True
|
||||
|
||||
|
||||
class DebugBaseProcess:
|
||||
|
||||
STDOUT_CHUNK_SIZE = 2048
|
||||
LOG_FILE = None
|
||||
|
||||
def __init__(self):
|
||||
self.transport = None
|
||||
self._is_running = False
|
||||
self._last_activity = 0
|
||||
self._exit_future = None
|
||||
self._stdin_read_task = None
|
||||
self._std_encoding = get_locale_encoding()
|
||||
|
||||
async def spawn(self, *args, **kwargs):
|
||||
wait_until_exit = False
|
||||
if "wait_until_exit" in kwargs:
|
||||
wait_until_exit = kwargs["wait_until_exit"]
|
||||
del kwargs["wait_until_exit"]
|
||||
for pipe in ("stdin", "stdout", "stderr"):
|
||||
if pipe not in kwargs:
|
||||
kwargs[pipe] = subprocess.PIPE
|
||||
loop = aio_get_running_loop()
|
||||
await loop.subprocess_exec(
|
||||
lambda: DebugSubprocessProtocol(self), *args, **kwargs
|
||||
)
|
||||
if wait_until_exit:
|
||||
self._exit_future = loop.create_future()
|
||||
await self._exit_future
|
||||
|
||||
def is_running(self):
|
||||
return self._is_running
|
||||
|
||||
def connection_made(self, transport):
|
||||
self._is_running = True
|
||||
self.transport = transport
|
||||
|
||||
def connect_stdin_pipe(self):
|
||||
self._stdin_read_task = aio_create_task(self._read_stdin_pipe())
|
||||
|
||||
async def _read_stdin_pipe(self):
|
||||
loop = aio_get_running_loop()
|
||||
if IS_WINDOWS:
|
||||
while True:
|
||||
self.stdin_data_received(
|
||||
await loop.run_in_executor(None, sys.stdin.buffer.readline)
|
||||
)
|
||||
else:
|
||||
reader = asyncio.StreamReader()
|
||||
protocol = asyncio.StreamReaderProtocol(reader)
|
||||
await loop.connect_read_pipe(lambda: protocol, sys.stdin)
|
||||
while True:
|
||||
self.stdin_data_received(await reader.readline())
|
||||
|
||||
def stdin_data_received(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
|
||||
def stdout_data_received(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
while data:
|
||||
chunk = data[: self.STDOUT_CHUNK_SIZE]
|
||||
print(chunk.decode(self._std_encoding, "replace"), end="", flush=True)
|
||||
data = data[self.STDOUT_CHUNK_SIZE :]
|
||||
|
||||
def stderr_data_received(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
print(
|
||||
data.decode(self._std_encoding, "replace"),
|
||||
end="",
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
|
||||
def process_exited(self):
|
||||
self._is_running = False
|
||||
self._last_activity = time.time()
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
if self._stdin_read_task:
|
||||
self._stdin_read_task.cancel()
|
||||
self._stdin_read_task = None
|
||||
if self._exit_future:
|
||||
self._exit_future.set_result(True)
|
||||
self._exit_future = None
|
||||
|
||||
def terminate(self):
|
||||
if not self.is_running() or not self.transport:
|
||||
return
|
||||
try:
|
||||
self.transport.kill()
|
||||
self.transport.close()
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
101
platformio/debug/process/client.py
Normal file
101
platformio/debug/process/client.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import signal
|
||||
import tempfile
|
||||
|
||||
from platformio import fs, proc
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
from platformio.debug.process.base import DebugBaseProcess
|
||||
from platformio.debug.process.server import DebugServerProcess
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class DebugClientProcess(DebugBaseProcess):
|
||||
def __init__(self, project_dir, debug_config):
|
||||
super(DebugClientProcess, self).__init__()
|
||||
self.project_dir = project_dir
|
||||
self.debug_config = debug_config
|
||||
|
||||
self._server_process = None
|
||||
self._session_id = None
|
||||
|
||||
if not os.path.isdir(get_project_cache_dir()):
|
||||
os.makedirs(get_project_cache_dir())
|
||||
self.working_dir = tempfile.mkdtemp(
|
||||
dir=get_project_cache_dir(), prefix=".piodebug-"
|
||||
)
|
||||
|
||||
self._target_is_running = False
|
||||
self._errors_buffer = b""
|
||||
|
||||
async def run(self):
|
||||
session_hash = (
|
||||
self.debug_config.client_executable_path + self.debug_config.program_path
|
||||
)
|
||||
self._session_id = hashlib.sha1(hashlib_encode_data(session_hash)).hexdigest()
|
||||
self._kill_previous_session()
|
||||
|
||||
if self.debug_config.server:
|
||||
self._server_process = DebugServerProcess(self.debug_config)
|
||||
self.debug_config.port = await self._server_process.run()
|
||||
|
||||
def connection_made(self, transport):
|
||||
super(DebugClientProcess, self).connection_made(transport)
|
||||
self._lock_session(transport.get_pid())
|
||||
# Disable SIGINT and allow GDB's Ctrl+C interrupt
|
||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||
self.connect_stdin_pipe()
|
||||
|
||||
def process_exited(self):
|
||||
if self._server_process:
|
||||
self._server_process.terminate()
|
||||
super(DebugClientProcess, self).process_exited()
|
||||
|
||||
def _kill_previous_session(self):
|
||||
assert self._session_id
|
||||
pid = None
|
||||
with ContentCache() as cc:
|
||||
pid = cc.get(self._session_id)
|
||||
cc.delete(self._session_id)
|
||||
if not pid:
|
||||
return
|
||||
if IS_WINDOWS:
|
||||
kill = ["Taskkill", "/PID", pid, "/F"]
|
||||
else:
|
||||
kill = ["kill", pid]
|
||||
try:
|
||||
proc.exec_command(kill)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def _lock_session(self, pid):
|
||||
if not self._session_id:
|
||||
return
|
||||
with ContentCache() as cc:
|
||||
cc.set(self._session_id, str(pid), "1h")
|
||||
|
||||
def _unlock_session(self):
|
||||
if not self._session_id:
|
||||
return
|
||||
with ContentCache() as cc:
|
||||
cc.delete(self._session_id)
|
||||
|
||||
def __del__(self):
|
||||
self._unlock_session()
|
||||
if self.working_dir and os.path.isdir(self.working_dir):
|
||||
fs.rmtree(self.working_dir)
|
||||
193
platformio/debug/process/gdb.py
Normal file
193
platformio/debug/process/gdb.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import time
|
||||
|
||||
from platformio import telemetry
|
||||
from platformio.compat import aio_get_running_loop, is_bytes
|
||||
from platformio.debug import helpers
|
||||
from platformio.debug.process.client import DebugClientProcess
|
||||
|
||||
|
||||
class GDBClientProcess(DebugClientProcess):
|
||||
|
||||
PIO_SRC_NAME = ".pioinit"
|
||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(GDBClientProcess, self).__init__(*args, **kwargs)
|
||||
self._target_is_running = False
|
||||
self._errors_buffer = b""
|
||||
|
||||
async def run(self, extra_args): # pylint: disable=arguments-differ
|
||||
await super(GDBClientProcess, self).run()
|
||||
|
||||
self.generate_init_script(os.path.join(self.working_dir, self.PIO_SRC_NAME))
|
||||
gdb_path = self.debug_config.client_executable_path or "gdb"
|
||||
# start GDB client
|
||||
args = [
|
||||
gdb_path,
|
||||
"-q",
|
||||
"--directory",
|
||||
self.working_dir,
|
||||
"--directory",
|
||||
self.project_dir,
|
||||
"-l",
|
||||
"10",
|
||||
]
|
||||
args.extend(list(extra_args or []))
|
||||
gdb_data_dir = self._get_data_dir(gdb_path)
|
||||
if gdb_data_dir:
|
||||
args.extend(["--data-directory", gdb_data_dir])
|
||||
args.append(self.debug_config.program_path)
|
||||
|
||||
await self.spawn(*args, cwd=self.project_dir, wait_until_exit=True)
|
||||
|
||||
@staticmethod
|
||||
def _get_data_dir(gdb_path):
|
||||
if "msp430" in gdb_path:
|
||||
return None
|
||||
gdb_data_dir = os.path.abspath(
|
||||
os.path.join(os.path.dirname(gdb_path), "..", "share", "gdb")
|
||||
)
|
||||
return gdb_data_dir if os.path.isdir(gdb_data_dir) else None
|
||||
|
||||
def generate_init_script(self, dst):
|
||||
# default GDB init commands depending on debug tool
|
||||
commands = self.debug_config.get_init_script("gdb").split("\n")
|
||||
|
||||
if self.debug_config.init_cmds:
|
||||
commands = self.debug_config.init_cmds
|
||||
commands.extend(self.debug_config.extra_cmds)
|
||||
|
||||
if not any("define pio_reset_run_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_run_target",
|
||||
" echo Warning! Undefined pio_reset_run_target command\\n",
|
||||
" monitor reset",
|
||||
"end",
|
||||
] + commands
|
||||
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_halt_target",
|
||||
" echo Warning! Undefined pio_reset_halt_target command\\n",
|
||||
" monitor reset halt",
|
||||
"end",
|
||||
] + commands
|
||||
if not any("define pio_restart_target" in cmd for cmd in commands):
|
||||
commands += [
|
||||
"define pio_restart_target",
|
||||
" pio_reset_halt_target",
|
||||
" $INIT_BREAK",
|
||||
" %s" % ("continue" if self.debug_config.init_break else "next"),
|
||||
"end",
|
||||
]
|
||||
|
||||
banner = [
|
||||
"echo PlatformIO Unified Debugger -> https://bit.ly/pio-debug\\n",
|
||||
"echo PlatformIO: debug_tool = %s\\n" % self.debug_config.tool_name,
|
||||
"echo PlatformIO: Initializing remote target...\\n",
|
||||
]
|
||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||
commands = banner + commands + footer
|
||||
|
||||
with open(dst, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(self.debug_config.reveal_patterns(commands)))
|
||||
|
||||
def stdin_data_received(self, data):
|
||||
super(GDBClientProcess, self).stdin_data_received(data)
|
||||
if b"-exec-run" in data:
|
||||
if self._target_is_running:
|
||||
token, _ = data.split(b"-", 1)
|
||||
self.stdout_data_received(token + b"^running\n")
|
||||
return
|
||||
if self.debug_config.platform.is_embedded():
|
||||
data = data.replace(b"-exec-run", b"-exec-continue")
|
||||
|
||||
if b"-exec-continue" in data:
|
||||
self._target_is_running = True
|
||||
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
self.transport.get_pipe_transport(0).write(b"pio_reset_run_target\n")
|
||||
self.transport.get_pipe_transport(0).write(data)
|
||||
|
||||
def stdout_data_received(self, data):
|
||||
super(GDBClientProcess, self).stdout_data_received(data)
|
||||
self._handle_error(data)
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
telemetry.send_event(
|
||||
"Debug",
|
||||
"Started",
|
||||
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||
)
|
||||
self._auto_exec_continue()
|
||||
|
||||
def console_log(self, msg):
|
||||
if helpers.is_gdbmi_mode():
|
||||
msg = helpers.escape_gdbmi_stream("~", msg)
|
||||
self.stdout_data_received(msg if is_bytes(msg) else msg.encode())
|
||||
|
||||
def _auto_exec_continue(self):
|
||||
auto_exec_delay = 0.5 # in seconds
|
||||
if self._last_activity > (time.time() - auto_exec_delay):
|
||||
aio_get_running_loop().call_later(0.1, self._auto_exec_continue)
|
||||
return
|
||||
|
||||
if not self.debug_config.init_break or self._target_is_running:
|
||||
return
|
||||
|
||||
self.console_log(
|
||||
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
|
||||
% self.debug_config.init_break
|
||||
)
|
||||
self.console_log(
|
||||
"PlatformIO: More configuration options -> https://bit.ly/pio-debug\n"
|
||||
)
|
||||
if self.debug_config.platform.is_embedded():
|
||||
self.transport.get_pipe_transport(0).write(
|
||||
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
|
||||
)
|
||||
else:
|
||||
self.transport.get_pipe_transport(0).write(
|
||||
b"0-exec-run\n" if helpers.is_gdbmi_mode() else b"run\n"
|
||||
)
|
||||
self._target_is_running = True
|
||||
|
||||
def stderr_data_received(self, data):
|
||||
super(GDBClientProcess, self).stderr_data_received(data)
|
||||
self._handle_error(data)
|
||||
|
||||
def _handle_error(self, data):
|
||||
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
|
||||
if not (
|
||||
self.PIO_SRC_NAME.encode() in self._errors_buffer
|
||||
and b"Error in sourced" in self._errors_buffer
|
||||
):
|
||||
return
|
||||
|
||||
last_erros = self._errors_buffer.decode()
|
||||
last_erros = " ".join(reversed(last_erros.split("\n")))
|
||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||
|
||||
err = "%s -> %s" % (
|
||||
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||
last_erros,
|
||||
)
|
||||
telemetry.send_exception("DebugInitError: %s" % err)
|
||||
self.transport.close()
|
||||
148
platformio/debug/process/server.py
Normal file
148
platformio/debug/process/server.py
Normal file
@@ -0,0 +1,148 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
|
||||
from platformio.debug.process.base import DebugBaseProcess
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class DebugServerProcess(DebugBaseProcess):
|
||||
|
||||
STD_BUFFER_SIZE = 1024
|
||||
|
||||
def __init__(self, debug_config):
|
||||
super(DebugServerProcess, self).__init__()
|
||||
self.debug_config = debug_config
|
||||
self._ready = False
|
||||
self._std_buffer = {"out": b"", "err": b""}
|
||||
|
||||
async def run(self): # pylint: disable=too-many-branches
|
||||
server = self.debug_config.server
|
||||
if not server:
|
||||
return None
|
||||
server_executable = server["executable"]
|
||||
if not server_executable:
|
||||
return None
|
||||
if server["cwd"]:
|
||||
server_executable = os.path.join(server["cwd"], server_executable)
|
||||
if (
|
||||
IS_WINDOWS
|
||||
and not server_executable.endswith(".exe")
|
||||
and os.path.isfile(server_executable + ".exe")
|
||||
):
|
||||
server_executable = server_executable + ".exe"
|
||||
|
||||
if not os.path.isfile(server_executable):
|
||||
server_executable = where_is_program(server_executable)
|
||||
if not os.path.isfile(server_executable):
|
||||
raise DebugInvalidOptionsError(
|
||||
"Could not launch Debug Server '%s'. Please check that it "
|
||||
"is installed and is included in a system PATH\n"
|
||||
"See https://docs.platformio.org/page/plus/debugging.html"
|
||||
% server_executable
|
||||
)
|
||||
|
||||
openocd_pipe_allowed = all(
|
||||
[
|
||||
not self.debug_config.env_options.get("debug_port"),
|
||||
"gdb" in self.debug_config.client_executable_path,
|
||||
"openocd" in server_executable,
|
||||
]
|
||||
)
|
||||
if openocd_pipe_allowed:
|
||||
args = []
|
||||
if server["cwd"]:
|
||||
args.extend(["-s", server["cwd"]])
|
||||
args.extend(
|
||||
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
|
||||
)
|
||||
args.extend(server["arguments"])
|
||||
str_args = " ".join(
|
||||
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
|
||||
)
|
||||
return fs.to_unix_path('| "%s" %s' % (server_executable, str_args))
|
||||
|
||||
env = os.environ.copy()
|
||||
# prepend server "lib" folder to LD path
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
and server["cwd"]
|
||||
and os.path.isdir(os.path.join(server["cwd"], "lib"))
|
||||
):
|
||||
ld_key = "DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH"
|
||||
env[ld_key] = os.path.join(server["cwd"], "lib")
|
||||
if os.environ.get(ld_key):
|
||||
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
|
||||
# prepend BIN to PATH
|
||||
if server["cwd"] and os.path.isdir(os.path.join(server["cwd"], "bin")):
|
||||
env["PATH"] = "%s%s%s" % (
|
||||
os.path.join(server["cwd"], "bin"),
|
||||
os.pathsep,
|
||||
os.environ.get("PATH", os.environ.get("Path", "")),
|
||||
)
|
||||
|
||||
await self.spawn(
|
||||
*([server_executable] + server["arguments"]), cwd=server["cwd"], env=env
|
||||
)
|
||||
await self._wait_until_ready()
|
||||
|
||||
return self.debug_config.port
|
||||
|
||||
async def _wait_until_ready(self):
|
||||
ready_pattern = self.debug_config.server_ready_pattern
|
||||
timeout = 60 if ready_pattern else 10
|
||||
elapsed = 0
|
||||
delay = 0.5
|
||||
auto_ready_delay = 0.5
|
||||
while not self._ready and self.is_running() and elapsed < timeout:
|
||||
await asyncio.sleep(delay)
|
||||
if not ready_pattern:
|
||||
self._ready = self._last_activity < (time.time() - auto_ready_delay)
|
||||
elapsed += delay
|
||||
|
||||
def _check_ready_by_pattern(self, data):
|
||||
if self._ready:
|
||||
return self._ready
|
||||
ready_pattern = self.debug_config.server_ready_pattern
|
||||
if ready_pattern:
|
||||
if ready_pattern.startswith("^"):
|
||||
self._ready = re.match(
|
||||
ready_pattern,
|
||||
data.decode("utf-8", "ignore"),
|
||||
)
|
||||
else:
|
||||
self._ready = ready_pattern.encode() in data
|
||||
return self._ready
|
||||
|
||||
def stdout_data_received(self, data):
|
||||
super(DebugServerProcess, self).stdout_data_received(
|
||||
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
|
||||
)
|
||||
self._std_buffer["out"] += data
|
||||
self._check_ready_by_pattern(self._std_buffer["out"])
|
||||
self._std_buffer["out"] = self._std_buffer["out"][-1 * self.STD_BUFFER_SIZE :]
|
||||
|
||||
def stderr_data_received(self, data):
|
||||
super(DebugServerProcess, self).stderr_data_received(data)
|
||||
self._std_buffer["err"] += data
|
||||
self._check_ready_by_pattern(self._std_buffer["err"])
|
||||
self._std_buffer["err"] = self._std_buffer["err"][-1 * self.STD_BUFFER_SIZE :]
|
||||
@@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
@@ -23,8 +24,8 @@ import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import WINDOWS, glob_escape, glob_recursive
|
||||
from platformio import exception, proc
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class cd(object):
|
||||
@@ -40,7 +41,7 @@ class cd(object):
|
||||
|
||||
|
||||
def get_source_dir():
|
||||
curpath = os.path.realpath(__file__)
|
||||
curpath = os.path.abspath(__file__)
|
||||
if not os.path.isfile(curpath):
|
||||
for p in sys.path:
|
||||
if os.path.isfile(os.path.join(p, __file__)):
|
||||
@@ -51,7 +52,7 @@ def get_source_dir():
|
||||
|
||||
def load_json(file_path):
|
||||
try:
|
||||
with open(file_path, "r") as f:
|
||||
with open(file_path, mode="r", encoding="utf8") as f:
|
||||
return json.load(f)
|
||||
except ValueError:
|
||||
raise exception.InvalidJSONFile(file_path)
|
||||
@@ -101,7 +102,7 @@ def ensure_udev_rules():
|
||||
|
||||
def _rules_to_set(rules_path):
|
||||
result = set()
|
||||
with open(rules_path) as fp:
|
||||
with open(rules_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
@@ -118,7 +119,7 @@ def ensure_udev_rules():
|
||||
if not any(os.path.isfile(p) for p in installed_rules):
|
||||
raise exception.MissedUdevRules
|
||||
|
||||
origin_path = os.path.realpath(
|
||||
origin_path = os.path.abspath(
|
||||
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
|
||||
)
|
||||
if not os.path.isfile(origin_path):
|
||||
@@ -158,7 +159,9 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
||||
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
||||
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||
items = set()
|
||||
for item in glob_recursive(os.path.join(glob_escape(src_dir), pattern)):
|
||||
for item in glob.glob(
|
||||
os.path.join(glob.escape(src_dir), pattern), recursive=True
|
||||
):
|
||||
if os.path.isdir(item):
|
||||
for root, _, files in os.walk(item, followlinks=followlinks):
|
||||
for f in files:
|
||||
@@ -173,16 +176,35 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
||||
|
||||
|
||||
def to_unix_path(path):
|
||||
if not WINDOWS or not path:
|
||||
if not IS_WINDOWS or not path:
|
||||
return path
|
||||
return re.sub(r"[\\]+", "/", path)
|
||||
|
||||
|
||||
def normalize_path(path):
|
||||
path = os.path.abspath(path)
|
||||
if not IS_WINDOWS or not path.startswith("\\\\"):
|
||||
return path
|
||||
try:
|
||||
result = proc.exec_command(["net", "use"])
|
||||
if result["returncode"] != 0:
|
||||
return path
|
||||
share_re = re.compile(r"\s([A-Z]\:)\s+(\\\\[^\s]+)")
|
||||
for line in result["out"].split("\n"):
|
||||
share = share_re.search(line)
|
||||
if not share:
|
||||
continue
|
||||
path = path.replace(share.group(2), share.group(1))
|
||||
except OSError:
|
||||
pass
|
||||
return path
|
||||
|
||||
|
||||
def expanduser(path):
|
||||
"""
|
||||
Be compatible with Python 3.8, on Windows skip HOME and check for USERPROFILE
|
||||
"""
|
||||
if not WINDOWS or not path.startswith("~") or "USERPROFILE" not in os.environ:
|
||||
if not IS_WINDOWS or not path.startswith("~") or "USERPROFILE" not in os.environ:
|
||||
return os.path.expanduser(path)
|
||||
return os.environ["USERPROFILE"] + path[1:]
|
||||
|
||||
|
||||
@@ -12,8 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import getenv
|
||||
from os.path import join
|
||||
import os
|
||||
import shutil
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@@ -28,7 +28,6 @@ from platformio.commands.lib.command import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.commands.system.prune import calculate_unnecessary_system_data
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
@@ -36,12 +35,9 @@ from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.proc import is_container
|
||||
|
||||
|
||||
def on_platformio_start(ctx, force, caller):
|
||||
ensure_python3(raise_exception=True)
|
||||
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
app.set_session_var("force_option", force)
|
||||
set_caller(caller)
|
||||
@@ -78,20 +74,15 @@ def on_platformio_exception(e):
|
||||
|
||||
|
||||
def set_caller(caller=None):
|
||||
caller = caller or getenv("PLATFORMIO_CALLER")
|
||||
caller = caller or os.getenv("PLATFORMIO_CALLER")
|
||||
if caller:
|
||||
return app.set_session_var("caller_id", caller)
|
||||
if getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
||||
if os.getenv("CODESPACES"):
|
||||
caller = "codespaces"
|
||||
elif os.getenv("VSCODE_PID") or os.getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif getenv("GITPOD_INSTANCE_ID") or getenv("GITPOD_WORKSPACE_URL"):
|
||||
elif os.getenv("GITPOD_WORKSPACE_ID") or os.getenv("GITPOD_WORKSPACE_URL"):
|
||||
caller = "gitpod"
|
||||
elif is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
return app.set_session_var("caller_id", caller)
|
||||
|
||||
|
||||
@@ -142,7 +133,7 @@ class Upgrader(object):
|
||||
|
||||
|
||||
def after_upgrade(ctx):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
last_version = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version == __version__:
|
||||
return
|
||||
@@ -207,7 +198,7 @@ def after_upgrade(ctx):
|
||||
click.style("https://github.com/platformio/platformio", fg="cyan"),
|
||||
)
|
||||
)
|
||||
if not getenv("PLATFORMIO_IDE"):
|
||||
if not os.getenv("PLATFORMIO_IDE"):
|
||||
click.echo(
|
||||
"- %s PlatformIO IDE for embedded development > %s"
|
||||
% (
|
||||
@@ -238,7 +229,7 @@ def check_platformio_upgrade():
|
||||
if pepver_to_semver(latest_version) <= pepver_to_semver(__version__):
|
||||
return
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
|
||||
click.echo("")
|
||||
click.echo("*" * terminal_width)
|
||||
@@ -248,10 +239,10 @@ def check_platformio_upgrade():
|
||||
fg="yellow",
|
||||
nl=False,
|
||||
)
|
||||
if getenv("PLATFORMIO_IDE"):
|
||||
if os.getenv("PLATFORMIO_IDE"):
|
||||
click.secho("PlatformIO IDE Menu: Upgrade PlatformIO", fg="cyan", nl=False)
|
||||
click.secho("`.", fg="yellow")
|
||||
elif join("Cellar", "platformio") in fs.get_source_dir():
|
||||
elif os.path.join("Cellar", "platformio") in fs.get_source_dir():
|
||||
click.secho("brew update && brew upgrade", fg="cyan", nl=False)
|
||||
click.secho("` command.", fg="yellow")
|
||||
else:
|
||||
@@ -291,7 +282,7 @@ def check_internal_updates(ctx, what): # pylint: disable=too-many-branches
|
||||
if not outdated_items:
|
||||
return
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
|
||||
click.echo("")
|
||||
click.echo("*" * terminal_width)
|
||||
@@ -353,7 +344,7 @@ def check_prune_system():
|
||||
if (unnecessary_size / 1024) < threshold_mb:
|
||||
return
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
click.echo()
|
||||
click.echo("*" * terminal_width)
|
||||
click.secho(
|
||||
|
||||
@@ -73,7 +73,7 @@ class FileDownloader(object):
|
||||
def start(self, with_progress=True, silent=False):
|
||||
label = "Downloading"
|
||||
itercontent = self._request.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
||||
fp = open(self._destination, "wb")
|
||||
fp = open(self._destination, "wb") # pylint: disable=consider-using-with
|
||||
try:
|
||||
if not with_progress or self.get_size() == -1:
|
||||
if not silent:
|
||||
|
||||
@@ -48,7 +48,7 @@ class LockFile(object):
|
||||
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
|
||||
self.timeout = timeout
|
||||
self.delay = delay
|
||||
self._lock_path = os.path.realpath(path) + ".lock"
|
||||
self._lock_path = os.path.abspath(path) + ".lock"
|
||||
self._fp = None
|
||||
|
||||
def _lock(self):
|
||||
@@ -62,7 +62,9 @@ class LockFile(object):
|
||||
else:
|
||||
raise LockFileExists
|
||||
|
||||
self._fp = open(self._lock_path, "w")
|
||||
self._fp = open( # pylint: disable=consider-using-with
|
||||
self._lock_path, mode="w", encoding="utf8"
|
||||
)
|
||||
try:
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user