mirror of
https://github.com/platformio/platformio-core.git
synced 2025-06-25 17:41:54 +02:00
Compare commits
196 Commits
Author | SHA1 | Date | |
---|---|---|---|
444c57b4a6 | |||
d787648e71 | |||
846588deec | |||
79142965ce | |||
93bc4fae6c | |||
1aa256d63c | |||
3a133af1a6 | |||
f93d3d509b | |||
145142ea6c | |||
b4b02982d6 | |||
841489c154 | |||
23c142dffd | |||
fc946baa93 | |||
a447022e7f | |||
4c697d9032 | |||
a71443a2ee | |||
20e076191e | |||
d907ecb9e9 | |||
c950d6d366 | |||
29cd2d2bdb | |||
a584a6bce3 | |||
4dc7ea5bd0 | |||
1be6e10f99 | |||
c9016d6939 | |||
baab25a48c | |||
4d4f5a217b | |||
b6d1f4d769 | |||
90fc36cf2d | |||
9be0a8248d | |||
d15314689d | |||
1d4b5c8051 | |||
47a87c57f2 | |||
ec2d01f277 | |||
4e05309e02 | |||
1fd3a4061f | |||
014ac79c87 | |||
dd3fe909a1 | |||
c1afb364e9 | |||
f3c27eadf6 | |||
fe2fd5e880 | |||
07e7dc4717 | |||
a94e5bd5ab | |||
f5ab0e5ddd | |||
3e20abec90 | |||
a4276b4ea6 | |||
cade63fba5 | |||
3a57661230 | |||
33fadd028d | |||
647b131d9b | |||
b537004a75 | |||
67b2759be2 | |||
fe2e8a0a40 | |||
03e84fe325 | |||
b45cdc9cb6 | |||
3aed8e1259 | |||
2d4a87238a | |||
023b58e9f0 | |||
3211a2b91b | |||
4b61de0136 | |||
e6ae18ab0d | |||
4230b223d2 | |||
d224ae658d | |||
20dc006345 | |||
13035ced59 | |||
b9d27240b5 | |||
2441d47321 | |||
cf497e8829 | |||
013153718d | |||
f1726843a2 | |||
44ef6e3469 | |||
eeb5ac456e | |||
aea9075d4b | |||
11a8d9ff7a | |||
7b587ba8bf | |||
9eb6e5166d | |||
aa580360e8 | |||
4c490cc63c | |||
882d4da8cb | |||
781114f026 | |||
7cf8d1d696 | |||
fd1333f031 | |||
8e21259222 | |||
9899547b73 | |||
4075789a32 | |||
ff364610c5 | |||
e5940673d7 | |||
fe140b0566 | |||
2ec5a3154e | |||
956f21b639 | |||
cdac7d497c | |||
591b377e4a | |||
c475578db6 | |||
2bad42ecb1 | |||
0acfc25d56 | |||
9d1593da0b | |||
e9433de50f | |||
fcba901611 | |||
0e3249e8b1 | |||
0d647e164b | |||
c01ef88265 | |||
9fb9e586a0 | |||
28bd200cd6 | |||
56be27fb0b | |||
32991356f3 | |||
dbe58b49bf | |||
d36e39418e | |||
c28740cfb1 | |||
430acc87de | |||
c0d97287dd | |||
0f3dbe623d | |||
6449115635 | |||
d085a02068 | |||
76a11a75b7 | |||
93018930ab | |||
621b24b665 | |||
7606dd4faf | |||
aa06d21abe | |||
042f8dc668 | |||
c4f76848a7 | |||
e1ff9a469d | |||
2239616484 | |||
55be7181b3 | |||
f519a9d524 | |||
f4319f670c | |||
80fc335528 | |||
353f440335 | |||
3e9ca48588 | |||
255e91b51c | |||
adf94843ea | |||
e3e08d9691 | |||
84c7ede0e1 | |||
28c90652bc | |||
a75da327d0 | |||
adf4012b96 | |||
1fe806269d | |||
ffacd17387 | |||
4742ffc9d8 | |||
700c705317 | |||
17ba91977d | |||
f31f9fa616 | |||
485f801c74 | |||
adab425c6d | |||
aabbbef944 | |||
14ce28e028 | |||
ca1f633f9c | |||
a2f3e85760 | |||
f422b5e05c | |||
ba58db3079 | |||
4729d9f55d | |||
41bd751ec2 | |||
c74c9778a1 | |||
f2d16e7631 | |||
b181406a1f | |||
dc16f80ffc | |||
125be4bfd4 | |||
14907579cd | |||
b0a1f3ae16 | |||
195304bbea | |||
e4c4f2ac50 | |||
77e6d1b099 | |||
cf4da42b25 | |||
51bf17515e | |||
1e2c37c190 | |||
204a60dd52 | |||
0f554d2f31 | |||
f382aae66b | |||
998da59f7c | |||
4cad98601d | |||
34545d3f12 | |||
127b422d25 | |||
8c61f0f6b6 | |||
fb93c1937c | |||
827bd09c61 | |||
984d63983d | |||
11df021750 | |||
ac6d94860b | |||
b238c55e53 | |||
961ab6b35e | |||
e1f34c7ea0 | |||
f70e6d50c6 | |||
540465291a | |||
0b3c0144e6 | |||
7ab27ddf9d | |||
e78bf51f68 | |||
5f8c15b96a | |||
9c61ef544d | |||
5548197a74 | |||
2458309d55 | |||
7229e1cce4 | |||
3e95134721 | |||
687189a142 | |||
51b4cd88db | |||
fe52b79eb2 | |||
091c96eb07 | |||
f2eead6ece | |||
2728c90441 |
21
.github/workflows/core.yml
vendored
21
.github/workflows/core.yml
vendored
@ -7,18 +7,18 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@ -27,17 +27,18 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run "codespell" on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
python -m pip install codespell
|
||||
make codespell
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Python Lint
|
||||
if: ${{ matrix.python-version != '3.6' }}
|
||||
run: |
|
||||
tox -e lint
|
||||
|
||||
- name: Integration Tests
|
||||
if: ${{ matrix.python-version == '3.9' }}
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
|
10
.github/workflows/deployment.yml
vendored
10
.github/workflows/deployment.yml
vendored
@ -12,19 +12,19 @@ jobs:
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox wheel
|
||||
pip install tox build
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
@ -34,8 +34,8 @@ jobs:
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
run: python setup.py sdist bdist_wheel
|
||||
- name: Build Python distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
|
16
.github/workflows/docs.yml
vendored
16
.github/workflows/docs.yml
vendored
@ -7,13 +7,13 @@ jobs:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
@ -101,7 +101,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
|
6
.github/workflows/examples.yml
vendored
6
.github/workflows/examples.yml
vendored
@ -15,14 +15,14 @@ jobs:
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -34,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
|
19
.github/workflows/projects.yml
vendored
19
.github/workflows/projects.yml
vendored
@ -13,11 +13,6 @@ jobs:
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
- esphome:
|
||||
repository: "esphome/esphome"
|
||||
folder: "esphome"
|
||||
config_dir: "esphome"
|
||||
env_name: "esp32-arduino"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
@ -34,36 +29,28 @@ jobs:
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
project: {"esphome": "", "repository": "esphome/esphome", "folder": "esphome", "config_dir": "esphome", "env_name": "esp32-arduino"}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Install ESPHome dependencies
|
||||
# Requires esptool package as it's used in a custom prescript
|
||||
if: ${{ contains(matrix.project.repository, 'esphome') }}
|
||||
run: pip install esptool==3.*
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
||||
|
79
HISTORY.rst
79
HISTORY.rst
@ -7,6 +7,8 @@ Release Notes
|
||||
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
|
||||
.. |PIOHOME| replace:: `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
@ -17,11 +19,86 @@ Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
6.1.19 (2025-??-??)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a regression issue where custom build flags were not properly reflected in the `compile_commands.json <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ file, ensuring accurate compilation database generation
|
||||
|
||||
6.1.18 (2025-03-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a regression issue that prevented |PIOHOME| from opening external links (`issue #5084 <https://github.com/platformio/platformio-core/issues/5084>`_)
|
||||
|
||||
6.1.17 (2025-02-13)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the `PLATFORMIO_RUN_JOBS <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_RUN_JOBS>`__ environment variable, allowing manual override of the number of parallel build jobs (`issue #5077 <https://github.com/platformio/platformio-core/issues/5077>`_)
|
||||
* Added support for ``tar.xz`` tarball dependencies (`pull #4974 <https://github.com/platformio/platformio-core/pull/4974>`_)
|
||||
* Ensured that dependencies of private libraries are no longer unnecessarily re-installed, optimizing dependency management and reducing redundant operations (`issue #4987 <https://github.com/platformio/platformio-core/issues/4987>`_)
|
||||
* Resolved an issue where the ``compiledb`` target failed to properly escape compiler executable paths containing spaces (`issue #4998 <https://github.com/platformio/platformio-core/issues/4998>`_)
|
||||
* Resolved an issue with incorrect path resolution when linking static libraries via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option (`issue #5004 <https://github.com/platformio/platformio-core/issues/5004>`_)
|
||||
* Resolved an issue where the ``--project-dir`` flag did not function correctly with the `pio check <https://docs.platformio.org/en/latest/core/userguide/cmd_check.html>`__ and `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ commands (`issue #5029 <https://github.com/platformio/platformio-core/issues/5029>`_)
|
||||
* Resolved an issue where the |LDF| occasionally excluded bundled platform libraries from the dependency graph (`pull #4941 <https://github.com/platformio/platformio-core/pull/4941>`_)
|
||||
|
||||
6.1.16 (2024-09-26)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.13
|
||||
* Introduced the `PLATFORMIO_SYSTEM_TYPE <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_SYSTEM_TYPE>`__ environment variable, enabling manual override of the detected system type for greater flexibility and control in custom build environments
|
||||
* Enhanced internet connection checks by falling back to HTTPS protocol when HTTP (port 80) fails (`issue #4980 <https://github.com/platformio/platformio-core/issues/4980>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.8.1) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.8.1>`__)
|
||||
* Upgraded the `Doctest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ testing framework to version 2.4.11, the `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ to version 1.15.2, and the `Unity <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/unity.html>`__ to version 2.6.0, incorporating the latest features and improvements for enhanced testing capabilities
|
||||
* Corrected an issue where the incorrect public class was imported for the ``DoctestTestRunner`` (`issue #4949 <https://github.com/platformio/platformio-core/issues/4949>`_)
|
||||
|
||||
6.1.15 (2024-04-25)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository (`issue #4885 <https://github.com/platformio/platformio-core/issues/4885>`_)
|
||||
* Resolved an issue related to the inaccurate detection of the Clang compiler (`pull #4897 <https://github.com/platformio/platformio-core/pull/4897>`_)
|
||||
|
||||
6.1.14 (2024-03-21)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the ``--json-output`` option to the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command, enabling users to generate test results in the JSON format
|
||||
* Upgraded the build engine to the latest version of SCons (4.7.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.7.0>`__)
|
||||
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
|
||||
* Addressed an issue where passing a relative path (``--project-dir``) to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command resulted in an error (`issue #4847 <https://github.com/platformio/platformio-core/issues/4847>`_)
|
||||
* Enhanced |STATICCODEANALYSIS| to accommodate scenarios where custom ``src_dir`` or ``include_dir`` are located outside the project folder (`pull #4874 <https://github.com/platformio/platformio-core/pull/4874>`_)
|
||||
* Corrected the validation of ``symlink://`` `package specifications <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ , resolving an issue that caused the package manager to repeatedly reinstall dependencies (`pull #4870 <https://github.com/platformio/platformio-core/pull/4870>`_)
|
||||
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
|
||||
* Resolved an issue where the |LDF| selected an incorrect library version (`issue #4860 <https://github.com/platformio/platformio-core/issues/4860>`_)
|
||||
* Resolved an issue with the ``hexlify`` filter in the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ command, ensuring proper representation of characters with Unicode code points higher than 127 (`issue #4732 <https://github.com/platformio/platformio-core/issues/4732>`_)
|
||||
|
||||
6.1.13 (2024-01-12)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Expanded support for SCons variables declared in the legacy format ``${SCONS_VARNAME}`` (`issue #4828 <https://github.com/platformio/platformio-core/issues/4828>`_)
|
||||
|
||||
6.1.12 (2024-01-10)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.12
|
||||
* Introduced the capability to launch the debug server in a separate process (`issue #4722 <https://github.com/platformio/platformio-core/issues/4722>`_)
|
||||
* Introduced a warning during the verification of MCU maximum RAM usage, signaling when the allocated RAM surpasses 100% (`issue #4791 <https://github.com/platformio/platformio-core/issues/4791>`_)
|
||||
* Drastically enhanced the speed of project building when operating in verbose mode (`issue #4783 <https://github.com/platformio/platformio-core/issues/4783>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.6.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.6.0>`__)
|
||||
* Enhanced the handling of built-in variables in |PIOCONF| during |INTERPOLATION| (`issue #4695 <https://github.com/platformio/platformio-core/issues/4695>`_)
|
||||
* Enhanced PIP dependency declarations for improved reliability and extended support to include Python 3.6 (`issue #4819 <https://github.com/platformio/platformio-core/issues/4819>`_)
|
||||
* Implemented automatic installation of missing dependencies when utilizing a SOCKS proxy (`issue #4822 <https://github.com/platformio/platformio-core/issues/4822>`_)
|
||||
* Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed (`issue #4699 <https://github.com/platformio/platformio-core/issues/4699>`_)
|
||||
* Rectified an issue where ``${platformio.name}`` erroneously represented ``None`` as the default `project name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/name.html>`__ (`issue #4717 <https://github.com/platformio/platformio-core/issues/4717>`_)
|
||||
* Resolved an issue where the ``COMPILATIONDB_INCLUDE_TOOLCHAIN`` setting was not correctly applying to private libraries (`issue #4762 <https://github.com/platformio/platformio-core/issues/4762>`_)
|
||||
* Resolved an issue where ``get_systype()`` inaccurately returned the architecture when executed within a Docker container on a 64-bit kernel with a 32-bit userspace (`issue #4777 <https://github.com/platformio/platformio-core/issues/4777>`_)
|
||||
* Resolved an issue with incorrect handling of the ``check_src_filters`` option when used in multiple environments (`issue #4788 <https://github.com/platformio/platformio-core/issues/4788>`_)
|
||||
* Resolved an issue where running `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ resulted in duplicated "include" entries (`issue #4723 <https://github.com/platformio/platformio-core/issues/4723>`_)
|
||||
* Resolved an issue where native debugging failed on the host machine (`issue #4745 <https://github.com/platformio/platformio-core/issues/4745>`_)
|
||||
* Resolved an issue where custom debug configurations were being inadvertently overwritten in VSCode's ``launch.json`` (`issue #4810 <https://github.com/platformio/platformio-core/issues/4810>`_)
|
||||
|
||||
6.1.11 (2023-08-31)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within |PIOHOME|
|
||||
|
||||
6.1.10 (2023-08-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
9
Makefile
9
Makefile
@ -10,10 +10,13 @@ format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
test:
|
||||
py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
before-commit: isort format lint
|
||||
test:
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: codespell isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
2
docs
2
docs
Submodule docs updated: fb83b09c41...70ab7ee27b
2
examples
2
examples
Submodule examples updated: 28c58d3b7c...0409a90a01
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
VERSION = (6, 1, 11)
|
||||
VERSION = (6, 1, "19a2")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@ -38,36 +38,8 @@ __registry_mirror_hosts__ = [
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40502.0",
|
||||
"tool-cppcheck": "~1.21100.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
] + __registry_mirror_hosts__
|
||||
|
||||
__install_requires__ = [
|
||||
# Core requirements
|
||||
"bottle == 0.12.*",
|
||||
"click >=8.0.4, <=8.2",
|
||||
"colorama",
|
||||
"marshmallow == 3.*",
|
||||
"pyelftools == 0.29",
|
||||
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
|
||||
"requests == 2.*",
|
||||
"semantic_version == 2.10.*",
|
||||
"tabulate == 0.*",
|
||||
] + [
|
||||
# PIO Home requirements
|
||||
"ajsonrpc == 1.2.*",
|
||||
"starlette >=0.19, <0.32",
|
||||
"uvicorn >=0.16, <0.24",
|
||||
"wsproto == 1.*",
|
||||
]
|
||||
|
@ -144,7 +144,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint:disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
|
@ -48,11 +48,13 @@ def team_list_cmd(orgname, json_output):
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
|
@ -36,6 +36,8 @@ ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVIC
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
@ -85,6 +87,8 @@ ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVI
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
@ -171,3 +175,9 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Zephyr framework USB CDC-ACM
|
||||
ATTRS{idVendor}=="2fe3", ATTRS{idProduct}=="0100", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
@ -147,13 +147,13 @@ if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
|
@ -54,11 +54,12 @@ def GetBuildType(env):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return "+".join(modes or ["release"])
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
@ -126,21 +127,26 @@ def ProcessProgramDeps(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
@ -214,6 +220,11 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
@ -376,6 +387,7 @@ def generate(env):
|
||||
env.AddMethod(GetBuildType)
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessProgramDeps)
|
||||
env.AddMethod(ProcessCompileDbToolchainOption)
|
||||
env.AddMethod(ProcessProjectDeps)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
|
@ -29,12 +29,7 @@ def IsIntegrationDump(_):
|
||||
def DumpIntegrationIncludes(env):
|
||||
result = dict(build=[], compatlib=[], toolchain=[])
|
||||
|
||||
result["build"].extend(
|
||||
[
|
||||
env.subst("$PROJECT_INCLUDE_DIR"),
|
||||
env.subst("$PROJECT_SRC_DIR"),
|
||||
]
|
||||
)
|
||||
# `env`(project) CPPPATH
|
||||
result["build"].extend(
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
|
@ -39,7 +39,7 @@ from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem, PackageSpec
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@ -309,10 +309,10 @@ class LibBuilderBase:
|
||||
if not self.dependencies or self._deps_are_processed:
|
||||
return
|
||||
self._deps_are_processed = True
|
||||
for item in self.dependencies:
|
||||
for dependency in self.dependencies:
|
||||
found = False
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item["name"] != lb.name:
|
||||
if not lb.is_dependency_compatible(dependency):
|
||||
continue
|
||||
found = True
|
||||
if lb not in self.depbuilders:
|
||||
@ -322,9 +322,28 @@ class LibBuilderBase:
|
||||
if not found and self.verbose:
|
||||
sys.stderr.write(
|
||||
"Warning: Ignored `%s` dependency for `%s` "
|
||||
"library\n" % (item["name"], self.name)
|
||||
"library\n" % (dependency["name"], self.name)
|
||||
)
|
||||
|
||||
def is_dependency_compatible(self, dependency):
|
||||
pkg = PackageItem(self.path)
|
||||
qualifiers = {"name": self.name, "version": self.version}
|
||||
if pkg.metadata:
|
||||
qualifiers = {"name": pkg.metadata.name, "version": pkg.metadata.version}
|
||||
if pkg.metadata.spec and pkg.metadata.spec.owner:
|
||||
qualifiers["owner"] = pkg.metadata.spec.owner
|
||||
dep_qualifiers = {
|
||||
k: v for k, v in dependency.items() if k in ("owner", "name", "version")
|
||||
}
|
||||
if (
|
||||
"version" in dep_qualifiers
|
||||
and not PackageSpec(dep_qualifiers["version"]).requirements
|
||||
):
|
||||
del dep_qualifiers["version"]
|
||||
return PackageCompatibility.from_dependency(dep_qualifiers).is_compatible(
|
||||
PackageCompatibility(**qualifiers)
|
||||
)
|
||||
|
||||
def get_search_files(self):
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
@ -477,6 +496,7 @@ class LibBuilderBase:
|
||||
self.is_built = True
|
||||
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
self.env.ProcessCompileDbToolchainOption()
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
@ -791,7 +811,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
if path not in include_dirs and path not in self.envorigin.get(
|
||||
"CPPPATH", []
|
||||
):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
@ -1137,6 +1159,8 @@ def ConfigureProjectLibBuilder(env):
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
# refill found libs after recursive search
|
||||
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
|
||||
for lb in lib_builders:
|
||||
for deplb in lb.depbuilders[:]:
|
||||
if deplb not in found_lbs:
|
||||
|
@ -23,10 +23,10 @@ from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8192
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
@ -20,19 +20,23 @@ from platformio.proc import exec_command
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def GetCompilerType(env):
|
||||
if env.subst("$CC").endswith("-gcc"):
|
||||
def GetCompilerType(env): # pylint: disable=too-many-return-statements
|
||||
CC = env.subst("$CC")
|
||||
if CC.endswith("-gcc"):
|
||||
return "gcc"
|
||||
if os.path.basename(CC) == "clang":
|
||||
return "clang"
|
||||
try:
|
||||
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
result = exec_command([CC, "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
output = "".join([result["out"], result["err"]]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
if "clang version" in output:
|
||||
return "clang"
|
||||
if "gcc" in output:
|
||||
return "gcc"
|
||||
|
@ -75,9 +75,11 @@ def LoadPioPlatform(env):
|
||||
continue
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path,
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
|
@ -61,7 +61,7 @@ def CleanProject(env, fullclean=False):
|
||||
print("Done cleaning")
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments
|
||||
def AddTarget( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
env,
|
||||
name,
|
||||
dependencies,
|
||||
|
@ -218,12 +218,11 @@ def CheckUploadSize(_, target, source, env):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if data_max_size and data_size > data_max_size:
|
||||
sys.stderr.write(
|
||||
"Warning! The data size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
|
||||
)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
|
@ -19,7 +19,6 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@ -60,7 +59,7 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@ -77,7 +76,7 @@ def cli(
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
@ -103,12 +102,23 @@ def cli(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_src_filters = [
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "src_dir")),
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "include_dir")),
|
||||
]
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
src_filters = (
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
@ -120,11 +130,13 @@ def cli(
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
src_filters=src_filters,
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
@ -137,14 +149,16 @@ def cli(
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, project_dir, config, envname, tool_options
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
@ -235,12 +249,12 @@ def collect_component_stats(result):
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = dirname(defect.file) or defect.file
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = dirname(component)
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
|
@ -29,7 +29,7 @@ class DefectItem:
|
||||
SEVERITY_LOW = 4
|
||||
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
|
||||
|
||||
def __init__(
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
severity,
|
||||
category,
|
||||
|
@ -63,7 +63,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, too-many-branches
|
||||
ctx,
|
||||
src,
|
||||
lib,
|
||||
|
@ -152,7 +152,7 @@ def cli(ctx, **options):
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
def lib_install( # pylint: disable=too-many-arguments,too-many-positional-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
click.secho(
|
||||
@ -210,7 +210,7 @@ def lib_uninstall(ctx, libraries, save, silent):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_update( # pylint: disable=too-many-arguments
|
||||
def lib_update( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
@ -159,7 +159,7 @@ def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branc
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
@ -224,7 +224,7 @@ def platform_uninstall(ctx, platforms):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
def platform_update( # pylint: disable=too-many-locals,too-many-arguments,too-many-positional-arguments
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
@ -18,7 +18,8 @@ import subprocess
|
||||
|
||||
import click
|
||||
|
||||
from platformio import VERSION, __install_requires__, __version__, app, exception
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.dependencies import get_pip_dependencies
|
||||
from platformio.http import fetch_remote_content
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.proc import get_pythonexe_path
|
||||
@ -37,7 +38,7 @@ DEVELOP_INIT_SCRIPT_URL = (
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(dev, only_dependencies, verbose):
|
||||
if only_dependencies:
|
||||
return upgrade_pypi_dependencies(verbose)
|
||||
return upgrade_pip_dependencies(verbose)
|
||||
|
||||
update_core_packages()
|
||||
|
||||
@ -102,7 +103,7 @@ def cli(dev, only_dependencies, verbose):
|
||||
return True
|
||||
|
||||
|
||||
def upgrade_pypi_dependencies(verbose):
|
||||
def upgrade_pip_dependencies(verbose):
|
||||
subprocess.run(
|
||||
[
|
||||
get_pythonexe_path(),
|
||||
@ -111,7 +112,7 @@ def upgrade_pypi_dependencies(verbose):
|
||||
"install",
|
||||
"--upgrade",
|
||||
"pip",
|
||||
*__install_requires__,
|
||||
*get_pip_dependencies(),
|
||||
],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE if not verbose else None,
|
||||
|
@ -17,6 +17,7 @@
|
||||
import importlib.util
|
||||
import inspect
|
||||
import locale
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
@ -41,10 +42,14 @@ else:
|
||||
if sys.version_info >= (3, 9):
|
||||
from asyncio import to_thread as aio_to_thread
|
||||
else:
|
||||
from starlette.concurrency import run_in_threadpool as aio_to_thread
|
||||
try:
|
||||
from starlette.concurrency import run_in_threadpool as aio_to_thread
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2 # DO NOT REMOVE IT. ESP8266/ESP32 depend on it
|
||||
PY36 = sys.version_info[0:2] == (3, 6)
|
||||
IS_CYGWIN = sys.platform.startswith("cygwin")
|
||||
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
|
||||
IS_MACOS = sys.platform.startswith("darwin")
|
||||
@ -132,3 +137,12 @@ def path_to_unicode(path):
|
||||
and custom device monitor filters
|
||||
"""
|
||||
return path
|
||||
|
||||
|
||||
def is_proxy_set(socks=False):
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
|
||||
value = os.getenv(var, os.getenv(var.lower()))
|
||||
if not value or (socks and not value.startswith("socks5://")):
|
||||
continue
|
||||
return True
|
||||
return False
|
||||
|
@ -55,9 +55,9 @@ from platformio.project.options import ProjectOptions
|
||||
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.argument("client_extra_args", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@ -65,10 +65,13 @@ def cli(
|
||||
load_mode,
|
||||
verbose,
|
||||
interface,
|
||||
__unprocessed,
|
||||
client_extra_args,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
if not interface and client_extra_args:
|
||||
raise click.UsageError("Please specify debugging interface")
|
||||
|
||||
# use env variables from Eclipse or CLion
|
||||
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
@ -83,7 +86,7 @@ def cli(
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
ctx, os.getcwd(), project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
configure_args = (
|
||||
@ -92,7 +95,7 @@ def cli(
|
||||
env_name,
|
||||
load_mode,
|
||||
verbose,
|
||||
__unprocessed,
|
||||
client_extra_args,
|
||||
)
|
||||
if helpers.is_gdbmi_mode():
|
||||
os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true"
|
||||
@ -103,19 +106,21 @@ def cli(
|
||||
else:
|
||||
debug_config = _configure(*configure_args)
|
||||
|
||||
_run(project_dir, debug_config, __unprocessed)
|
||||
_run(os.getcwd(), debug_config, client_extra_args)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed):
|
||||
def _configure(
|
||||
ctx, project_config, env_name, load_mode, verbose, client_extra_args
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
platform = PlatformFactory.from_env(env_name, autoinstall=True)
|
||||
debug_config = DebugConfigFactory.new(
|
||||
platform,
|
||||
project_config,
|
||||
env_name,
|
||||
)
|
||||
if "--version" in __unprocessed:
|
||||
if "--version" in client_extra_args:
|
||||
raise ReturnErrorCode(
|
||||
subprocess.run(
|
||||
[debug_config.client_executable_path, "--version"], check=True
|
||||
@ -161,12 +166,12 @@ def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed)
|
||||
return debug_config
|
||||
|
||||
|
||||
def _run(project_dir, debug_config, __unprocessed):
|
||||
def _run(project_dir, debug_config, client_extra_args):
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
client = GDBClientProcess(project_dir, debug_config)
|
||||
coro = client.run(__unprocessed)
|
||||
coro = client.run(client_extra_args)
|
||||
try:
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
loop.run_until_complete(coro)
|
||||
|
@ -24,7 +24,9 @@ from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, platform, project_config, env_name, port=None):
|
||||
DEFAULT_PORT = None
|
||||
|
||||
def __init__(self, platform, project_config, env_name):
|
||||
self.platform = platform
|
||||
self.project_config = project_config
|
||||
self.env_name = env_name
|
||||
@ -48,7 +50,6 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
self._load_cmds = None
|
||||
self._port = None
|
||||
|
||||
self.port = port
|
||||
self.server = self._configure_server()
|
||||
|
||||
try:
|
||||
@ -120,8 +121,10 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
@property
|
||||
def port(self):
|
||||
return (
|
||||
self.env_options.get("debug_port", self.tool_settings.get("port"))
|
||||
or self._port
|
||||
self._port
|
||||
or self.env_options.get("debug_port")
|
||||
or self.tool_settings.get("port")
|
||||
or self.DEFAULT_PORT
|
||||
)
|
||||
|
||||
@port.setter
|
||||
@ -145,7 +148,9 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
|
||||
data = load_build_metadata(
|
||||
os.getcwd(), self.env_name, cache=True, build_type="debug"
|
||||
)
|
||||
if not data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
return data
|
||||
@ -191,9 +196,11 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=result.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
(
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
)
|
||||
for a in result.get("arguments", [])
|
||||
],
|
||||
)
|
||||
|
@ -27,17 +27,13 @@ class DebugConfigFactory:
|
||||
|
||||
@classmethod
|
||||
def new(cls, platform, project_config, env_name):
|
||||
board_config = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
)
|
||||
tool_name = (
|
||||
board_config.get_debug_tool_name(
|
||||
project_config.get("env:" + env_name, "debug_tool")
|
||||
)
|
||||
if board_config
|
||||
else None
|
||||
)
|
||||
board_id = project_config.get("env:" + env_name, "board")
|
||||
config_cls = None
|
||||
tool_name = None
|
||||
if board_id:
|
||||
tool_name = platform.board_config(
|
||||
project_config.get("env:" + env_name, "board")
|
||||
).get_debug_tool_name(project_config.get("env:" + env_name, "debug_tool"))
|
||||
try:
|
||||
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
|
||||
config_cls = getattr(mod, cls.get_clsname(tool_name))
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class GenericDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":3333"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
@ -31,8 +32,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class JlinkDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":2331"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
@ -36,11 +37,6 @@ $LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2331"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super().server_ready_pattern or ("Waiting for GDB connection")
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class MspdebugDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":2000"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
@ -29,8 +30,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":2000"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class QemuDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":1234"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
@ -30,8 +31,3 @@ $LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":1234"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
|
||||
|
||||
|
||||
class RenodeDebugConfig(DebugConfigBase):
|
||||
DEFAULT_PORT = ":3333"
|
||||
GDB_INIT_SCRIPT = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
@ -33,11 +34,6 @@ $INIT_BREAK
|
||||
monitor start
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "port" not in kwargs:
|
||||
kwargs["port"] = ":3333"
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def server_ready_pattern(self):
|
||||
return super().server_ready_pattern or (
|
||||
|
@ -76,7 +76,7 @@ def get_default_debug_env(config):
|
||||
|
||||
def predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
): # pylint: disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
debug_testname = project_config.get("env:" + env_name, "debug_test")
|
||||
if debug_testname:
|
||||
test_names = list_test_names(project_config)
|
||||
|
@ -62,7 +62,9 @@ class DebugServerProcess(DebugBaseProcess):
|
||||
|
||||
openocd_pipe_allowed = all(
|
||||
[
|
||||
not self.debug_config.env_options.get("debug_port"),
|
||||
not self.debug_config.env_options.get(
|
||||
"debug_port", self.debug_config.tool_settings.get("port")
|
||||
),
|
||||
"gdb" in self.debug_config.client_executable_path,
|
||||
"openocd" in server_executable,
|
||||
]
|
||||
|
69
platformio/dependencies.py
Normal file
69
platformio/dependencies.py
Normal file
@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.compat import is_proxy_set
|
||||
|
||||
|
||||
def get_core_dependencies():
|
||||
return {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40801.0",
|
||||
"tool-cppcheck": "~1.21100.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
|
||||
def get_pip_dependencies():
|
||||
core = [
|
||||
"bottle == 0.13.*",
|
||||
"click >=8.0.4, <8.1.8",
|
||||
"colorama",
|
||||
"marshmallow == 3.*",
|
||||
"pyelftools >=0.27, <1",
|
||||
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
|
||||
"requests%s == 2.*" % ("[socks]" if is_proxy_set(socks=True) else ""),
|
||||
"semantic_version == 2.10.*",
|
||||
"tabulate == 0.*",
|
||||
]
|
||||
|
||||
home = [
|
||||
# PIO Home requirements
|
||||
"ajsonrpc == 1.2.*",
|
||||
"starlette >=0.19, <0.47",
|
||||
"uvicorn >=0.16, <0.35",
|
||||
"wsproto == 1.*",
|
||||
]
|
||||
|
||||
extra = []
|
||||
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||
extra.append(
|
||||
'chardet >= 3.0.2,<6; platform_system == "Darwin" and "arm" in platform_machine'
|
||||
)
|
||||
|
||||
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
|
||||
try:
|
||||
import ssl # pylint: disable=import-outside-toplevel
|
||||
|
||||
if ssl.OPENSSL_VERSION.startswith("OpenSSL ") and ssl.OPENSSL_VERSION_INFO < (
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
):
|
||||
extra.append("urllib3<2")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return core + home + extra
|
@ -89,7 +89,7 @@ def is_serial_port_ready(port, timeout=1):
|
||||
|
||||
|
||||
class SerialPortFinder:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
|
@ -144,9 +144,9 @@ def list_mdns_services():
|
||||
if service.properties:
|
||||
try:
|
||||
properties = {
|
||||
k.decode("utf8"): v.decode("utf8")
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
k.decode("utf8"): (
|
||||
v.decode("utf8") if isinstance(v, bytes) else v
|
||||
)
|
||||
for k, v in service.properties.items()
|
||||
}
|
||||
json.dumps(properties)
|
||||
|
@ -58,7 +58,7 @@ from platformio.project.options import ProjectOptions
|
||||
"--encoding",
|
||||
help=(
|
||||
"Set the encoding for the serial port "
|
||||
"(e.g. hexlify, Latin1, UTF-8) [default=%s]"
|
||||
"(e.g. hexlify, Latin-1, UTF-8) [default=%s]"
|
||||
% ProjectOptions["env.monitor_encoding"].default
|
||||
),
|
||||
)
|
||||
@ -125,9 +125,11 @@ def device_monitor_cmd(**options):
|
||||
options = apply_project_monitor_options(options, project_options)
|
||||
register_filters(platform=platform, options=options)
|
||||
options["port"] = SerialPortFinder(
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
board_config=(
|
||||
platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None
|
||||
),
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
).find(initial_port=options["port"])
|
||||
|
@ -25,11 +25,12 @@ from platformio.project.config import ProjectConfig
|
||||
class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def __init__(self, options=None):
|
||||
"""Called by PlatformIO to pass context"""
|
||||
miniterm.Transform.__init__(self)
|
||||
super().__init__()
|
||||
|
||||
self.options = options or {}
|
||||
self.project_dir = self.options.get("project_dir")
|
||||
self.environment = self.options.get("environment")
|
||||
self._running_terminal = None
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
if not self.environment:
|
||||
@ -47,6 +48,12 @@ class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def NAME(self):
|
||||
raise NotImplementedError("Please declare NAME attribute for the filter class")
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
self._running_terminal = terminal
|
||||
|
||||
def get_running_terminal(self):
|
||||
return self._running_terminal
|
||||
|
||||
|
||||
def register_filters(platform=None, options=None):
|
||||
# project filters
|
||||
|
@ -24,12 +24,18 @@ class Hexlify(DeviceMonitorFilterBase):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._counter = 0
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
# force to Latin-1, issue #4732
|
||||
if terminal.input_encoding == "UTF-8":
|
||||
terminal.set_rx_encoding("Latin-1")
|
||||
super().set_running_terminal(terminal)
|
||||
|
||||
def rx(self, text):
|
||||
result = ""
|
||||
for b in serial.iterbytes(text):
|
||||
for c in serial.iterbytes(text):
|
||||
if (self._counter % 16) == 0:
|
||||
result += "\n{:04X} | ".format(self._counter)
|
||||
asciicode = ord(b)
|
||||
asciicode = ord(c)
|
||||
if asciicode <= 255:
|
||||
result += "{:02X} ".format(asciicode)
|
||||
else:
|
||||
|
@ -110,6 +110,12 @@ def new_terminal(options):
|
||||
term.raw = options["raw"]
|
||||
term.set_rx_encoding(options["encoding"])
|
||||
term.set_tx_encoding(options["encoding"])
|
||||
for ts in (term.tx_transformations, term.rx_transformations):
|
||||
for t in ts:
|
||||
try:
|
||||
t.set_running_terminal(term)
|
||||
except AttributeError:
|
||||
pass
|
||||
return term
|
||||
|
||||
|
||||
|
@ -210,7 +210,7 @@ def change_filemtime(path, mtime):
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
def _onerror(func, path, __):
|
||||
def _onexc(func, path, _):
|
||||
try:
|
||||
st_mode = os.stat(path).st_mode
|
||||
if st_mode & stat.S_IREAD:
|
||||
@ -223,4 +223,7 @@ def rmtree(path):
|
||||
err=True,
|
||||
)
|
||||
|
||||
return shutil.rmtree(path, onerror=_onerror)
|
||||
# pylint: disable=unexpected-keyword-arg, deprecated-argument
|
||||
if sys.version_info < (3, 12):
|
||||
return shutil.rmtree(path, onerror=_onexc)
|
||||
return shutil.rmtree(path, onexc=_onexc)
|
||||
|
@ -372,15 +372,19 @@ class ProjectRPC(BaseRPCHandler):
|
||||
|
||||
return dict(
|
||||
platform=dict(
|
||||
ownername=platform_pkg.metadata.spec.owner
|
||||
if platform_pkg.metadata.spec
|
||||
else None,
|
||||
ownername=(
|
||||
platform_pkg.metadata.spec.owner
|
||||
if platform_pkg.metadata.spec
|
||||
else None
|
||||
),
|
||||
name=platform.name,
|
||||
title=platform.title,
|
||||
version=str(platform_pkg.metadata.version),
|
||||
),
|
||||
board=platform.board_config(board_id).get_brief_data()
|
||||
if board_id
|
||||
else None,
|
||||
board=(
|
||||
platform.board_config(board_id).get_brief_data()
|
||||
if board_id
|
||||
else None
|
||||
),
|
||||
frameworks=frameworks or None,
|
||||
)
|
||||
|
@ -13,7 +13,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
from urllib.parse import urljoin
|
||||
|
||||
@ -22,6 +21,7 @@ from urllib3.util.retry import Retry
|
||||
|
||||
from platformio import __check_internet_hosts__, app, util
|
||||
from platformio.cache import ContentCache, cleanup_content_cache
|
||||
from platformio.compat import is_proxy_set
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
@ -63,9 +63,11 @@ class HTTPSession(requests.Session):
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
return super().request(
|
||||
method,
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url),
|
||||
(
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url)
|
||||
),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
@ -188,12 +190,11 @@ class HTTPClient:
|
||||
@util.memoized(expire="10s")
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
use_proxy = is_proxy_set()
|
||||
socket.setdefaulttimeout(timeout)
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
|
||||
if not os.getenv(var) and not os.getenv(var.lower()):
|
||||
continue
|
||||
if use_proxy:
|
||||
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
return True
|
||||
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
|
||||
@ -203,6 +204,15 @@ def _internet_on():
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# falling back to HTTPs, issue #4980
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
requests.get("https://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
except requests.exceptions.RequestException:
|
||||
pass
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
@ -54,7 +54,7 @@ def package_exec_cmd(obj, package, call, args):
|
||||
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
||||
|
||||
# inject current python interpreter on Windows
|
||||
if args[0].endswith(".py"):
|
||||
if args and args[0].endswith(".py"):
|
||||
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
||||
if not os.path.exists(args[1]):
|
||||
args[1] = where_is_program(args[1])
|
||||
|
@ -222,9 +222,11 @@ def _install_project_env_libraries(project_env, options):
|
||||
|
||||
env_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), project_env),
|
||||
compatibility=PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None,
|
||||
compatibility=(
|
||||
PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None
|
||||
),
|
||||
)
|
||||
private_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "lib_dir"))
|
||||
@ -295,7 +297,11 @@ def _install_project_private_library_deps(private_pkg, private_lm, env_lm, optio
|
||||
if not spec.external and not spec.owner:
|
||||
continue
|
||||
pkg = private_lm.get_package(spec)
|
||||
if not pkg and not env_lm.get_package(spec):
|
||||
if (
|
||||
not pkg
|
||||
and not private_lm.get_package(spec)
|
||||
and not env_lm.get_package(spec)
|
||||
):
|
||||
pkg = env_lm.install(
|
||||
spec,
|
||||
skip_dependencies=True,
|
||||
|
@ -82,10 +82,11 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
help="Do not show interactive prompt",
|
||||
hidden=True,
|
||||
)
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals
|
||||
package, owner, typex, released_at, private, notify, no_interactive, non_interactive
|
||||
):
|
||||
click.secho("Preparing a package...", fg="cyan")
|
||||
package = os.path.abspath(package)
|
||||
no_interactive = no_interactive or non_interactive
|
||||
owner = owner or AccountClient().get_logged_username()
|
||||
do_not_pack = (
|
||||
|
@ -65,10 +65,12 @@ def print_search_item(item):
|
||||
click.echo(
|
||||
"%s • %s • Published on %s"
|
||||
% (
|
||||
item["type"].capitalize()
|
||||
if item["tier"] == "community"
|
||||
else click.style(
|
||||
("%s %s" % (item["tier"], item["type"])).title(), bold=True
|
||||
(
|
||||
item["type"].capitalize()
|
||||
if item["tier"] == "community"
|
||||
else click.style(
|
||||
("%s %s" % (item["tier"], item["type"])).title(), bold=True
|
||||
)
|
||||
),
|
||||
item["version"]["name"],
|
||||
util.parse_datetime(item["version"]["released_at"]).strftime("%c"),
|
||||
|
@ -111,7 +111,7 @@ def uninstall_project_env_dependencies(project_env, options=None):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_tools(project_env, options)
|
||||
)
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_libraries(project_env, options)
|
||||
|
@ -110,7 +110,7 @@ def update_project_env_dependencies(project_env, options=None):
|
||||
# custom tools
|
||||
if options.get("tools"):
|
||||
updated_conds.append(_update_project_env_custom_tools(project_env, options))
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
updated_conds.append(_update_project_env_custom_libraries(project_env, options))
|
||||
# declared dependencies
|
||||
|
@ -34,7 +34,7 @@ class FileDownloader:
|
||||
url,
|
||||
stream=True,
|
||||
)
|
||||
if self._http_response.status_code != 200:
|
||||
if self._http_response.status_code not in (200, 203):
|
||||
raise PackageException(
|
||||
"Got the unrecognized status code '{0}' when downloaded {1}".format(
|
||||
self._http_response.status_code, url
|
||||
|
@ -98,9 +98,13 @@ class PackageManagerInstallMixin:
|
||||
else:
|
||||
pkg = self.install_from_registry(
|
||||
spec,
|
||||
search_qualifiers=compatibility.to_search_qualifiers()
|
||||
if compatibility
|
||||
else None,
|
||||
search_qualifiers=(
|
||||
compatibility.to_search_qualifiers(
|
||||
["platforms", "frameworks", "authors"]
|
||||
)
|
||||
if compatibility
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
if not pkg or not pkg.metadata:
|
||||
|
@ -280,11 +280,15 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
|
||||
# external "URL" mismatch
|
||||
if spec.external:
|
||||
# local folder mismatch
|
||||
if os.path.abspath(spec.uri) == os.path.abspath(pkg.path) or (
|
||||
# local/symlinked folder mismatch
|
||||
check_conds = [
|
||||
os.path.abspath(spec.uri) == os.path.abspath(pkg.path),
|
||||
spec.uri.startswith("file://")
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:])
|
||||
):
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:]),
|
||||
spec.uri.startswith("symlink://")
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[10:]),
|
||||
]
|
||||
if any(check_conds):
|
||||
return True
|
||||
if spec.uri != pkg.metadata.spec.uri:
|
||||
return False
|
||||
|
@ -14,7 +14,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from platformio import __core_packages__, exception
|
||||
from platformio import exception
|
||||
from platformio.dependencies import get_core_dependencies
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
@ -23,7 +24,7 @@ from platformio.package.meta import PackageSpec
|
||||
def get_installed_core_packages():
|
||||
result = []
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
@ -32,11 +33,11 @@ def get_installed_core_packages():
|
||||
|
||||
|
||||
def get_core_package_dir(name, spec=None, auto_install=True):
|
||||
if name not in __core_packages__:
|
||||
if name not in get_core_dependencies():
|
||||
raise exception.PlatformioException("Please upgrade PlatformIO Core")
|
||||
pm = ToolPackageManager()
|
||||
spec = spec or PackageSpec(
|
||||
owner="platformio", name=name, requirements=__core_packages__[name]
|
||||
owner="platformio", name=name, requirements=get_core_dependencies()[name]
|
||||
)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
@ -50,7 +51,7 @@ def get_core_package_dir(name, spec=None, auto_install=True):
|
||||
|
||||
def update_core_packages():
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
try:
|
||||
pm.update(spec, spec)
|
||||
@ -65,7 +66,7 @@ def remove_unnecessary_core_packages(dry_run=False):
|
||||
pm = ToolPackageManager()
|
||||
best_pkg_versions = {}
|
||||
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
|
@ -38,7 +38,7 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
def manifest_names(self):
|
||||
return PackageType.get_manifest_map()[PackageType.PLATFORM]
|
||||
|
||||
def install( # pylint: disable=arguments-differ,too-many-arguments
|
||||
def install( # pylint: disable=arguments-differ,too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
spec,
|
||||
skip_dependencies=False,
|
||||
|
@ -294,9 +294,11 @@ class BaseManifestParser:
|
||||
if not matched_files:
|
||||
continue
|
||||
result[root] = dict(
|
||||
name="Examples"
|
||||
if root == examples_dir
|
||||
else os.path.relpath(root, examples_dir),
|
||||
name=(
|
||||
"Examples"
|
||||
if root == examples_dir
|
||||
else os.path.relpath(root, examples_dir)
|
||||
),
|
||||
base=os.path.relpath(root, package_dir),
|
||||
files=matched_files,
|
||||
)
|
||||
@ -540,6 +542,8 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
|
||||
"esp32": "espressif32",
|
||||
"arc32": "intel_arc32",
|
||||
"stm32": "ststm32",
|
||||
"nrf52": "nordicnrf52",
|
||||
"rp2040": "raspberrypi",
|
||||
}
|
||||
for arch in properties.get("architectures", "").split(","):
|
||||
if "particle-" in arch:
|
||||
|
@ -276,7 +276,7 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.21"
|
||||
version = "3.26.0"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||
f"v{version}/json/licenses.json"
|
||||
|
@ -65,7 +65,14 @@ class PackageType:
|
||||
|
||||
|
||||
class PackageCompatibility:
|
||||
KNOWN_QUALIFIERS = ("platforms", "frameworks", "authors")
|
||||
KNOWN_QUALIFIERS = (
|
||||
"owner",
|
||||
"name",
|
||||
"version",
|
||||
"platforms",
|
||||
"frameworks",
|
||||
"authors",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_dependency(cls, dependency):
|
||||
@ -89,19 +96,45 @@ class PackageCompatibility:
|
||||
def __repr__(self):
|
||||
return "PackageCompatibility <%s>" % self.qualifiers
|
||||
|
||||
def to_search_qualifiers(self):
|
||||
return self.qualifiers
|
||||
def to_search_qualifiers(self, fields=None):
|
||||
result = {}
|
||||
for name, value in self.qualifiers.items():
|
||||
if not fields or name in fields:
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
def is_compatible(self, other):
|
||||
assert isinstance(other, PackageCompatibility)
|
||||
for key, value in self.qualifiers.items():
|
||||
for key, current_value in self.qualifiers.items():
|
||||
other_value = other.qualifiers.get(key)
|
||||
if not value or not other_value:
|
||||
if not current_value or not other_value:
|
||||
continue
|
||||
if not items_in_list(value, other_value):
|
||||
if any(isinstance(v, list) for v in (current_value, other_value)):
|
||||
if not items_in_list(current_value, other_value):
|
||||
return False
|
||||
continue
|
||||
if key == "version":
|
||||
if not self._compare_versions(current_value, other_value):
|
||||
return False
|
||||
continue
|
||||
if current_value != other_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _compare_versions(self, current, other):
|
||||
if current == other:
|
||||
return True
|
||||
try:
|
||||
version = (
|
||||
other
|
||||
if isinstance(other, semantic_version.Version)
|
||||
else cast_version_to_semver(other)
|
||||
)
|
||||
return version in semantic_version.SimpleSpec(current)
|
||||
except ValueError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class PackageOutdatedResult:
|
||||
UPDATE_INCREMENT_MAJOR = "major"
|
||||
@ -163,7 +196,7 @@ class PackageOutdatedResult:
|
||||
|
||||
|
||||
class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments,too-many-positional-arguments
|
||||
self, raw=None, owner=None, id=None, name=None, requirements=None, uri=None
|
||||
):
|
||||
self._requirements = None
|
||||
@ -363,7 +396,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
parts.path.endswith(".git"),
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
parts.netloc in ("github.com", "gitlab.com", "bitbucket.com")
|
||||
and not parts.path.endswith((".zip", ".tar.gz")),
|
||||
and not parts.path.endswith((".zip", ".tar.gz", ".tar.xz")),
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
@ -485,9 +518,11 @@ class PackageItem:
|
||||
|
||||
def __eq__(self, other):
|
||||
conds = [
|
||||
os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
if self.path and other.path
|
||||
else self.path == other.path,
|
||||
(
|
||||
os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
if self.path and other.path
|
||||
else self.path == other.path
|
||||
),
|
||||
self.metadata == other.metadata,
|
||||
]
|
||||
return all(conds)
|
||||
|
@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from tarfile import open as tarfile_open
|
||||
from time import mktime
|
||||
from zipfile import ZipFile
|
||||
@ -82,19 +83,23 @@ class TARArchiver(BaseArchiver):
|
||||
).startswith(base)
|
||||
|
||||
def extract_item(self, item, dest_dir):
|
||||
if sys.version_info >= (3, 12):
|
||||
self._afo.extract(item, dest_dir, filter="data")
|
||||
return self.after_extract(item, dest_dir)
|
||||
|
||||
# apply custom security logic
|
||||
dest_dir = self.resolve_path(dest_dir)
|
||||
bad_conds = [
|
||||
self.is_bad_path(item.name, dest_dir),
|
||||
self.is_link(item) and self.is_bad_link(item, dest_dir),
|
||||
]
|
||||
if not any(bad_conds):
|
||||
super().extract_item(item, dest_dir)
|
||||
else:
|
||||
click.secho(
|
||||
if any(bad_conds):
|
||||
return click.secho(
|
||||
"Blocked insecure item `%s` from TAR archive" % item.name,
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
return super().extract_item(item, dest_dir)
|
||||
|
||||
|
||||
class ZIPArchiver(BaseArchiver):
|
||||
@ -147,6 +152,7 @@ class FileUnpacker:
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\xfd\x37\x7a\x58\x5a\x00": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
|
@ -44,7 +44,7 @@ def cast_version_to_semver(value, force=True, raise_exception=False):
|
||||
|
||||
def pepver_to_semver(pepver):
|
||||
return cast_version_to_semver(
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, count=1)
|
||||
)
|
||||
|
||||
|
||||
|
@ -44,7 +44,7 @@ class PlatformRunMixin:
|
||||
value = json.loads(value)
|
||||
return value
|
||||
|
||||
def run( # pylint: disable=too-many-arguments
|
||||
def run( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self, variables, targets, silent, verbose, jobs
|
||||
):
|
||||
assert isinstance(variables, dict)
|
||||
@ -116,9 +116,9 @@ class PlatformRunMixin:
|
||||
args,
|
||||
stdout=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stdout_line,
|
||||
data_callback=lambda data: None
|
||||
if self.silent
|
||||
else _write_and_flush(sys.stdout, data),
|
||||
data_callback=lambda data: (
|
||||
None if self.silent else _write_and_flush(sys.stdout, data)
|
||||
),
|
||||
),
|
||||
stderr=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stderr_line,
|
||||
|
@ -169,6 +169,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
|
||||
|
||||
def board_config(self, id_):
|
||||
assert id_
|
||||
return self.get_boards(id_)
|
||||
|
||||
def get_package_type(self, name):
|
||||
|
@ -33,7 +33,7 @@ class PlatformFactory:
|
||||
|
||||
@staticmethod
|
||||
def load_platform_module(name, path):
|
||||
# backward compatibiility with the legacy dev-platforms
|
||||
# backward compatibility with the legacy dev-platforms
|
||||
sys.modules["platformio.managers.platform"] = base
|
||||
try:
|
||||
return load_python_module("platformio.platform.%s" % name, path)
|
||||
|
@ -69,7 +69,7 @@ class BuildAsyncPipe(AsyncPipeBase):
|
||||
print_immediately = False
|
||||
|
||||
for char in iter(lambda: self._pipe_reader.read(1), ""):
|
||||
self._buffer += char
|
||||
# self._buffer += char
|
||||
|
||||
if line and char.strip() and line[-3:] == (char * 3):
|
||||
print_immediately = True
|
||||
|
@ -82,9 +82,11 @@ def lint_configuration(json_output=False):
|
||||
(
|
||||
click.style(error["type"], fg="red"),
|
||||
error["message"],
|
||||
error.get("source", "") + (f":{error.get('lineno')}")
|
||||
if "lineno" in error
|
||||
else "",
|
||||
(
|
||||
error.get("source", "") + (f":{error.get('lineno')}")
|
||||
if "lineno" in error
|
||||
else ""
|
||||
),
|
||||
)
|
||||
for error in errors
|
||||
],
|
||||
|
@ -68,7 +68,7 @@ def validate_boards(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("--no-install-dependencies", is_flag=True)
|
||||
@click.option("--env-prefix", default="")
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
def project_init_cmd(
|
||||
def project_init_cmd( # pylint: disable=too-many-positional-arguments
|
||||
project_dir,
|
||||
boards,
|
||||
ide,
|
||||
@ -79,6 +79,7 @@ def project_init_cmd(
|
||||
env_prefix,
|
||||
silent,
|
||||
):
|
||||
project_dir = os.path.abspath(project_dir)
|
||||
is_new_project = not is_platformio_project(project_dir)
|
||||
if is_new_project:
|
||||
if not silent:
|
||||
@ -200,9 +201,7 @@ new version when next recompiled. The header file eliminates the labor of
|
||||
finding and changing all the copies as well as the risk that a failure to
|
||||
find one copy will result in inconsistencies within a program.
|
||||
|
||||
In C, the usual convention is to give header files names that end with `.h'.
|
||||
It is most portable to use only letters, digits, dashes, and underscores in
|
||||
header file names, and at most one dot.
|
||||
In C, the convention is to give header files names that end with `.h'.
|
||||
|
||||
Read more about using header files in official GCC documentation:
|
||||
|
||||
@ -221,12 +220,12 @@ def init_lib_readme(lib_dir):
|
||||
fp.write(
|
||||
"""
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
PlatformIO will compile them to static libraries and link into the executable file.
|
||||
|
||||
The source code of each library should be placed in a an own separate directory
|
||||
("lib/your_library_name/[here are source files]").
|
||||
The source code of each library should be placed in a separate directory
|
||||
("lib/your_library_name/[Code]").
|
||||
|
||||
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
For example, see the structure of the following example libraries `Foo` and `Bar`:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
@ -236,7 +235,7 @@ For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| | |- library.json (optional. for custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
@ -248,7 +247,7 @@ For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
|--src
|
||||
|- main.c
|
||||
|
||||
and a contents of `src/main.c`:
|
||||
Example contents of `src/main.c` using Foo and Bar:
|
||||
```
|
||||
#include <Foo.h>
|
||||
#include <Bar.h>
|
||||
@ -260,8 +259,8 @@ int main (void)
|
||||
|
||||
```
|
||||
|
||||
PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries scanning project source files.
|
||||
The PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries by scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
|
@ -14,14 +14,16 @@
|
||||
|
||||
import configparser
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import MISSING, string_types
|
||||
from platformio.compat import MISSING, hashlib_encode_data, string_types
|
||||
from platformio.project import exception
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
@ -41,7 +43,17 @@ CONFIG_HEADER = """
|
||||
class ProjectConfigBase:
|
||||
ENVNAME_RE = re.compile(r"^[a-z\d\_\-]+$", flags=re.I)
|
||||
INLINE_COMMENT_RE = re.compile(r"\s+;.*$")
|
||||
VARTPL_RE = re.compile(r"\$\{([^\.\}\()]+)\.([^\}]+)\}")
|
||||
VARTPL_RE = re.compile(r"\$\{(?:([^\.\}\()]+)\.)?([^\}]+)\}")
|
||||
|
||||
BUILTIN_VARS = {
|
||||
"PROJECT_DIR": lambda: os.getcwd(), # pylint: disable=unnecessary-lambda
|
||||
"PROJECT_HASH": lambda: "%s-%s"
|
||||
% (
|
||||
os.path.basename(os.getcwd()),
|
||||
hashlib.sha1(hashlib_encode_data(os.getcwd())).hexdigest()[:10],
|
||||
),
|
||||
"UNIX_TIME": lambda: str(int(time.time())),
|
||||
}
|
||||
|
||||
CUSTOM_OPTION_PREFIXES = ("custom_", "board_")
|
||||
|
||||
@ -152,6 +164,7 @@ class ProjectConfigBase:
|
||||
|
||||
@staticmethod
|
||||
def get_section_scope(section):
|
||||
assert section
|
||||
return section.split(":", 1)[0] if ":" in section else section
|
||||
|
||||
def walk_options(self, root_section):
|
||||
@ -274,7 +287,7 @@ class ProjectConfigBase:
|
||||
value = (
|
||||
default if default != MISSING else self._parser.get(section, option)
|
||||
)
|
||||
return self._expand_interpolations(section, value)
|
||||
return self._expand_interpolations(section, option, value)
|
||||
|
||||
if option_meta.sysenvvar:
|
||||
envvar_value = os.getenv(option_meta.sysenvvar)
|
||||
@ -297,24 +310,50 @@ class ProjectConfigBase:
|
||||
if value == MISSING:
|
||||
return None
|
||||
|
||||
return self._expand_interpolations(section, value)
|
||||
return self._expand_interpolations(section, option, value)
|
||||
|
||||
def _expand_interpolations(self, parent_section, value):
|
||||
if (
|
||||
not value
|
||||
or not isinstance(value, string_types)
|
||||
or not all(["${" in value, "}" in value])
|
||||
):
|
||||
def _expand_interpolations(self, section, option, value):
|
||||
if not value or not isinstance(value, string_types) or not "$" in value:
|
||||
return value
|
||||
|
||||
# legacy support for variables delclared without "${}"
|
||||
legacy_vars = ["PROJECT_HASH"]
|
||||
stop = False
|
||||
while not stop:
|
||||
stop = True
|
||||
for name in legacy_vars:
|
||||
x = value.find(f"${name}")
|
||||
if x < 0 or value[x - 1] == "$":
|
||||
continue
|
||||
value = "%s${%s}%s" % (value[:x], name, value[x + len(name) + 1 :])
|
||||
stop = False
|
||||
warn_msg = (
|
||||
"Invalid variable declaration. Please use "
|
||||
f"`${{{name}}}` instead of `${name}`"
|
||||
)
|
||||
if warn_msg not in self.warnings:
|
||||
self.warnings.append(warn_msg)
|
||||
|
||||
if not all(["${" in value, "}" in value]):
|
||||
return value
|
||||
return self.VARTPL_RE.sub(
|
||||
lambda match: self._re_interpolation_handler(parent_section, match), value
|
||||
lambda match: self._re_interpolation_handler(section, option, match), value
|
||||
)
|
||||
|
||||
def _re_interpolation_handler(self, parent_section, match):
|
||||
def _re_interpolation_handler(self, parent_section, parent_option, match):
|
||||
section, option = match.group(1), match.group(2)
|
||||
|
||||
# handle built-in variables
|
||||
if section is None:
|
||||
if option in self.BUILTIN_VARS:
|
||||
return self.BUILTIN_VARS[option]()
|
||||
# SCons variables
|
||||
return f"${{{option}}}"
|
||||
|
||||
# handle system environment variables
|
||||
if section == "sysenv":
|
||||
return os.getenv(option)
|
||||
|
||||
# handle ${this.*}
|
||||
if section == "this":
|
||||
section = parent_section
|
||||
@ -322,21 +361,18 @@ class ProjectConfigBase:
|
||||
if not parent_section.startswith("env:"):
|
||||
raise exception.ProjectOptionValueError(
|
||||
f"`${{this.__env__}}` is called from the `{parent_section}` "
|
||||
"section that is not valid PlatformIO environment, see",
|
||||
option,
|
||||
" ",
|
||||
section,
|
||||
"section that is not valid PlatformIO environment. Please "
|
||||
f"check `{parent_option}` option in the `{section}` section"
|
||||
)
|
||||
return parent_section[4:]
|
||||
|
||||
# handle nested calls
|
||||
try:
|
||||
value = self.get(section, option)
|
||||
except RecursionError as exc:
|
||||
raise exception.ProjectOptionValueError(
|
||||
"Infinite recursion has been detected",
|
||||
option,
|
||||
" ",
|
||||
section,
|
||||
f"Infinite recursion has been detected for `{option}` "
|
||||
f"option in the `{section}` section"
|
||||
) from exc
|
||||
if isinstance(value, list):
|
||||
return "\n".join(value)
|
||||
@ -363,10 +399,8 @@ class ProjectConfigBase:
|
||||
if not self.expand_interpolations:
|
||||
return value
|
||||
raise exception.ProjectOptionValueError(
|
||||
exc.format_message(),
|
||||
option,
|
||||
" (%s) " % option_meta.description,
|
||||
section,
|
||||
"%s for `%s` option in the `%s` section (%s)"
|
||||
% (exc.format_message(), option, section, option_meta.description)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -439,8 +473,9 @@ class ProjectConfigLintMixin:
|
||||
try:
|
||||
config = cls.get_instance(path)
|
||||
config.validate(silent=True)
|
||||
warnings = config.warnings
|
||||
warnings = config.warnings # in case "as_tuple" fails
|
||||
config.as_tuple()
|
||||
warnings = config.warnings
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
if exc.__cause__ is not None:
|
||||
exc = exc.__cause__
|
||||
|
@ -51,4 +51,4 @@ class InvalidEnvNameError(ProjectError, UserSideException):
|
||||
|
||||
|
||||
class ProjectOptionValueError(ProjectError, UserSideException):
|
||||
MESSAGE = "{0} for option `{1}`{2}in section [{3}]"
|
||||
pass
|
||||
|
@ -131,45 +131,47 @@ def compute_project_checksum(config):
|
||||
return checksum.hexdigest()
|
||||
|
||||
|
||||
def load_build_metadata(project_dir, env_or_envs, cache=False, debug=False):
|
||||
def load_build_metadata(project_dir, env_or_envs, cache=False, build_type=None):
|
||||
assert env_or_envs
|
||||
env_names = env_or_envs
|
||||
if not isinstance(env_names, list):
|
||||
env_names = [env_names]
|
||||
|
||||
with fs.cd(project_dir):
|
||||
result = _get_cached_build_metadata(project_dir, env_names) if cache else {}
|
||||
result = _get_cached_build_metadata(env_names) if cache else {}
|
||||
# incompatible build-type data
|
||||
for name in list(result.keys()):
|
||||
build_type = result[name].get("build_type", "")
|
||||
outdated_conds = [
|
||||
not build_type,
|
||||
debug and "debug" not in build_type,
|
||||
not debug and "debug" in build_type,
|
||||
]
|
||||
if any(outdated_conds):
|
||||
del result[name]
|
||||
for env_name in list(result.keys()):
|
||||
if build_type is None:
|
||||
build_type = ProjectConfig.get_instance().get(
|
||||
f"env:{env_name}", "build_type"
|
||||
)
|
||||
if result[env_name].get("build_type", "") != build_type:
|
||||
del result[env_name]
|
||||
missed_env_names = set(env_names) - set(result.keys())
|
||||
if missed_env_names:
|
||||
result.update(_load_build_metadata(project_dir, missed_env_names, debug))
|
||||
result.update(
|
||||
_load_build_metadata(project_dir, missed_env_names, build_type)
|
||||
)
|
||||
|
||||
if not isinstance(env_or_envs, list) and env_or_envs in result:
|
||||
return result[env_or_envs]
|
||||
return result or None
|
||||
|
||||
|
||||
# Backward compatibiility with dev-platforms
|
||||
# Backward compatibility with dev-platforms
|
||||
load_project_ide_data = load_build_metadata
|
||||
|
||||
|
||||
def _load_build_metadata(project_dir, env_names, debug=False):
|
||||
def _load_build_metadata(project_dir, env_names, build_type=None):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio import app
|
||||
from platformio.run.cli import cli as cmd_run
|
||||
|
||||
args = ["--project-dir", project_dir, "--target", "__idedata"]
|
||||
if debug:
|
||||
if build_type == "debug":
|
||||
args.extend(["--target", "__debug"])
|
||||
# if build_type == "test":
|
||||
# args.extend(["--target", "__test"])
|
||||
for name in env_names:
|
||||
args.extend(["-e", name])
|
||||
app.set_session_var("pause_telemetry", True)
|
||||
@ -181,16 +183,16 @@ def _load_build_metadata(project_dir, env_names, debug=False):
|
||||
raise result.exception
|
||||
if '"includes":' not in result.output:
|
||||
raise exception.UserSideException(result.output)
|
||||
return _get_cached_build_metadata(project_dir, env_names)
|
||||
return _get_cached_build_metadata(env_names)
|
||||
|
||||
|
||||
def _get_cached_build_metadata(project_dir, env_names):
|
||||
build_dir = ProjectConfig.get_instance(
|
||||
os.path.join(project_dir, "platformio.ini")
|
||||
).get("platformio", "build_dir")
|
||||
def _get_cached_build_metadata(env_names):
|
||||
build_dir = ProjectConfig.get_instance().get("platformio", "build_dir")
|
||||
result = {}
|
||||
for name in env_names:
|
||||
if not os.path.isfile(os.path.join(build_dir, name, "idedata.json")):
|
||||
for env_name in env_names:
|
||||
if not os.path.isfile(os.path.join(build_dir, env_name, "idedata.json")):
|
||||
continue
|
||||
result[name] = fs.load_json(os.path.join(build_dir, name, "idedata.json"))
|
||||
result[env_name] = fs.load_json(
|
||||
os.path.join(build_dir, env_name, "idedata.json")
|
||||
)
|
||||
return result
|
||||
|
@ -91,9 +91,11 @@ class ProjectGenerator:
|
||||
"default_debug_env_name": get_default_debug_env(self.config),
|
||||
"env_name": self.env_name,
|
||||
"user_home_dir": os.path.abspath(fs.expanduser("~")),
|
||||
"platformio_path": sys.argv[0]
|
||||
if os.path.isfile(sys.argv[0])
|
||||
else where_is_program("platformio"),
|
||||
"platformio_path": (
|
||||
sys.argv[0]
|
||||
if os.path.isfile(sys.argv[0])
|
||||
else where_is_program("platformio")
|
||||
),
|
||||
"env_path": os.getenv("PATH"),
|
||||
"env_pathsep": os.pathsep,
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
# common.symbolFiles=<Symbol Files loaded by debugger>
|
||||
# (This value is overwritten by a launcher specific symbolFiles value if the latter exists)
|
||||
#
|
||||
# In runDir, symbolFiles and env fields you can use these macroses:
|
||||
# In runDir, symbolFiles and env fields you can use these macros:
|
||||
# ${PROJECT_DIR} - project directory absolute path
|
||||
# ${OUTPUT_PATH} - linker output path (relative to project directory path)
|
||||
# ${OUTPUT_BASENAME}- linker output filename
|
||||
|
@ -1,4 +1,3 @@
|
||||
% import codecs
|
||||
% import json
|
||||
% import os
|
||||
%
|
||||
@ -47,9 +46,14 @@
|
||||
% return data
|
||||
% end
|
||||
%
|
||||
% def _contains_external_configurations(launch_config):
|
||||
% def _contains_custom_configurations(launch_config):
|
||||
% pio_config_names = [
|
||||
% c["name"]
|
||||
% for c in get_pio_configurations()
|
||||
% ]
|
||||
% return any(
|
||||
% c.get("type", "") != "platformio-debug"
|
||||
% or c.get("name", "") in pio_config_names
|
||||
% for c in launch_config.get("configurations", [])
|
||||
% )
|
||||
% end
|
||||
@ -59,10 +63,14 @@
|
||||
% return launch_config
|
||||
% end
|
||||
%
|
||||
% pio_config_names = [
|
||||
% c["name"]
|
||||
% for c in get_pio_configurations()
|
||||
% ]
|
||||
% external_configurations = [
|
||||
% config
|
||||
% for config in launch_config["configurations"]
|
||||
% if config.get("type", "") != "platformio-debug"
|
||||
% c
|
||||
% for c in launch_config["configurations"]
|
||||
% if c.get("type", "") != "platformio-debug" or c.get("name", "") not in pio_config_names
|
||||
% ]
|
||||
%
|
||||
% launch_config["configurations"] = external_configurations
|
||||
@ -73,11 +81,11 @@
|
||||
% launch_config = {"version": "0.2.0", "configurations": []}
|
||||
% launch_file = os.path.join(project_dir, ".vscode", "launch.json")
|
||||
% if os.path.isfile(launch_file):
|
||||
% with codecs.open(launch_file, "r", encoding="utf8") as fp:
|
||||
% with open(launch_file, "r", encoding="utf8") as fp:
|
||||
% launch_data = _remove_comments(fp.readlines())
|
||||
% try:
|
||||
% prev_config = json.loads(launch_data)
|
||||
% if _contains_external_configurations(prev_config):
|
||||
% if _contains_custom_configurations(prev_config):
|
||||
% launch_config = _remove_pio_configurations(prev_config)
|
||||
% end
|
||||
% except:
|
||||
@ -91,9 +99,9 @@
|
||||
%
|
||||
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
|
||||
//
|
||||
// PIO Unified Debugger
|
||||
// PlatformIO Debugging Solution
|
||||
//
|
||||
// Documentation: https://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
// Documentation: https://docs.platformio.org/en/latest/plus/debugging.html
|
||||
// Configuration: https://docs.platformio.org/en/latest/projectconf/sections/env/options/debug/index.html
|
||||
|
||||
{{ json.dumps(get_launch_configuration(), indent=4, ensure_ascii=False) }}
|
||||
|
@ -14,17 +14,16 @@
|
||||
|
||||
# pylint: disable=redefined-builtin, too-many-arguments
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class ConfigOption: # pylint: disable=too-many-instance-attributes
|
||||
class ConfigOption: # pylint: disable=too-many-instance-attributes,too-many-positional-arguments
|
||||
def __init__(
|
||||
self,
|
||||
scope,
|
||||
@ -80,30 +79,6 @@ def ConfigEnvOption(*args, **kwargs):
|
||||
return ConfigOption("env", *args, **kwargs)
|
||||
|
||||
|
||||
def calculate_path_hash(path):
|
||||
return "%s-%s" % (
|
||||
os.path.basename(path),
|
||||
hashlib.sha1(hashlib_encode_data(path)).hexdigest()[:10],
|
||||
)
|
||||
|
||||
|
||||
def expand_dir_templates(path):
|
||||
project_dir = os.getcwd()
|
||||
tpls = {
|
||||
"$PROJECT_DIR": lambda: project_dir,
|
||||
"$PROJECT_HASH": lambda: calculate_path_hash(project_dir),
|
||||
}
|
||||
done = False
|
||||
while not done:
|
||||
done = True
|
||||
for tpl, cb in tpls.items():
|
||||
if tpl not in path:
|
||||
continue
|
||||
path = path.replace(tpl, cb())
|
||||
done = False
|
||||
return path
|
||||
|
||||
|
||||
def validate_dir(path):
|
||||
if not path:
|
||||
return path
|
||||
@ -112,8 +87,6 @@ def validate_dir(path):
|
||||
return path
|
||||
if path.startswith("~"):
|
||||
path = fs.expanduser(path)
|
||||
if "$" in path:
|
||||
path = expand_dir_templates(path)
|
||||
return os.path.abspath(path)
|
||||
|
||||
|
||||
@ -137,6 +110,7 @@ ProjectOptions = OrderedDict(
|
||||
group="generic",
|
||||
name="name",
|
||||
description="A project name",
|
||||
default=lambda: os.path.basename(os.getcwd()),
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
group="generic",
|
||||
@ -240,7 +214,7 @@ ProjectOptions = OrderedDict(
|
||||
"external library dependencies"
|
||||
),
|
||||
sysenvvar="PLATFORMIO_WORKSPACE_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", ".pio"),
|
||||
default=os.path.join("${PROJECT_DIR}", ".pio"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -274,7 +248,7 @@ ProjectOptions = OrderedDict(
|
||||
"System automatically adds this path to CPPPATH scope"
|
||||
),
|
||||
sysenvvar="PLATFORMIO_INCLUDE_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "include"),
|
||||
default=os.path.join("${PROJECT_DIR}", "include"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -285,7 +259,7 @@ ProjectOptions = OrderedDict(
|
||||
"project C/C++ source files"
|
||||
),
|
||||
sysenvvar="PLATFORMIO_SRC_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "src"),
|
||||
default=os.path.join("${PROJECT_DIR}", "src"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -293,7 +267,7 @@ ProjectOptions = OrderedDict(
|
||||
name="lib_dir",
|
||||
description="A storage for the custom/private project libraries",
|
||||
sysenvvar="PLATFORMIO_LIB_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "lib"),
|
||||
default=os.path.join("${PROJECT_DIR}", "lib"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -304,7 +278,7 @@ ProjectOptions = OrderedDict(
|
||||
"file system (SPIFFS, etc.)"
|
||||
),
|
||||
sysenvvar="PLATFORMIO_DATA_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "data"),
|
||||
default=os.path.join("${PROJECT_DIR}", "data"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -315,7 +289,7 @@ ProjectOptions = OrderedDict(
|
||||
"test source files"
|
||||
),
|
||||
sysenvvar="PLATFORMIO_TEST_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "test"),
|
||||
default=os.path.join("${PROJECT_DIR}", "test"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -323,7 +297,7 @@ ProjectOptions = OrderedDict(
|
||||
name="boards_dir",
|
||||
description="A storage for custom board manifests",
|
||||
sysenvvar="PLATFORMIO_BOARDS_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "boards"),
|
||||
default=os.path.join("${PROJECT_DIR}", "boards"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -331,7 +305,7 @@ ProjectOptions = OrderedDict(
|
||||
name="monitor_dir",
|
||||
description="A storage for custom monitor filters",
|
||||
sysenvvar="PLATFORMIO_MONITOR_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "monitor"),
|
||||
default=os.path.join("${PROJECT_DIR}", "monitor"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
ConfigPlatformioOption(
|
||||
@ -342,7 +316,7 @@ ProjectOptions = OrderedDict(
|
||||
"synchronize extra files between remote machines"
|
||||
),
|
||||
sysenvvar="PLATFORMIO_SHARED_DIR",
|
||||
default=os.path.join("$PROJECT_DIR", "shared"),
|
||||
default=os.path.join("${PROJECT_DIR}", "shared"),
|
||||
validate=validate_dir,
|
||||
),
|
||||
#
|
||||
@ -575,7 +549,7 @@ ProjectOptions = OrderedDict(
|
||||
ConfigEnvOption(
|
||||
group="monitor",
|
||||
name="monitor_encoding",
|
||||
description="Custom encoding (e.g. hexlify, Latin1, UTF-8)",
|
||||
description="Custom encoding (e.g. hexlify, Latin-1, UTF-8)",
|
||||
default="UTF-8",
|
||||
),
|
||||
# Library
|
||||
|
@ -23,7 +23,7 @@ from platformio.project.helpers import get_project_watch_lib_dirs, load_build_me
|
||||
from platformio.project.options import get_config_options_schema
|
||||
from platformio.test.result import TestCase, TestCaseSource, TestStatus
|
||||
from platformio.test.runners.base import TestRunnerBase
|
||||
from platformio.test.runners.doctest import DoctestTestCaseParser
|
||||
from platformio.test.runners.doctest import DoctestTestRunner
|
||||
from platformio.test.runners.googletest import GoogletestTestRunner
|
||||
from platformio.test.runners.unity import UnityTestRunner
|
||||
from platformio.util import get_systype
|
||||
|
@ -41,9 +41,11 @@ def access_list_cmd(owner, urn_type, json_output): # pylint: disable=unused-arg
|
||||
table_data.append(
|
||||
(
|
||||
"Access:",
|
||||
click.style("Private", fg="red")
|
||||
if resource.get("private", False)
|
||||
else "Public",
|
||||
(
|
||||
click.style("Private", fg="red")
|
||||
if resource.get("private", False)
|
||||
else "Public"
|
||||
),
|
||||
)
|
||||
)
|
||||
table_data.append(
|
||||
|
@ -42,7 +42,7 @@ class RegistryClient(HTTPClient):
|
||||
pass
|
||||
return False
|
||||
|
||||
def publish_package( # pylint: disable=redefined-builtin
|
||||
def publish_package( # pylint: disable=redefined-builtin, too-many-positional-arguments
|
||||
self, owner, type, archive_path, released_at=None, private=False, notify=True
|
||||
):
|
||||
with open(archive_path, "rb") as fp:
|
||||
@ -64,7 +64,7 @@ class RegistryClient(HTTPClient):
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def unpublish_package( # pylint: disable=redefined-builtin
|
||||
def unpublish_package( # pylint: disable=redefined-builtin, too-many-positional-arguments
|
||||
self, owner, type, name, version=None, undo=False
|
||||
):
|
||||
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
||||
@ -142,7 +142,9 @@ class RegistryClient(HTTPClient):
|
||||
x_with_authorization=self.allowed_private_packages(),
|
||||
)
|
||||
|
||||
def get_package(self, typex, owner, name, version=None, extra_path=None):
|
||||
def get_package(
|
||||
self, typex, owner, name, version=None, extra_path=None
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
try:
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
|
@ -54,9 +54,11 @@ class RegistryFileMirrorIterator:
|
||||
"head",
|
||||
self._url_parts.path,
|
||||
allow_redirects=False,
|
||||
params=dict(bypass=",".join(self._visited_mirrors))
|
||||
if self._visited_mirrors
|
||||
else None,
|
||||
params=(
|
||||
dict(bypass=",".join(self._visited_mirrors))
|
||||
if self._visited_mirrors
|
||||
else None
|
||||
),
|
||||
x_with_authorization=RegistryClient.allowed_private_packages(),
|
||||
)
|
||||
stop_conditions = [
|
||||
|
@ -110,7 +110,7 @@ def remote_update(agents, only_check, dry_run):
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def remote_run(
|
||||
def remote_run( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
agents,
|
||||
environment,
|
||||
@ -198,7 +198,7 @@ def remote_run(
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def remote_test( # pylint: disable=redefined-builtin
|
||||
def remote_test( # pylint: disable=redefined-builtin,too-many-positional-arguments
|
||||
ctx,
|
||||
agents,
|
||||
environment,
|
||||
|
@ -123,9 +123,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
index=i + 1,
|
||||
host=device[0] + ":" if len(result) > 1 else "",
|
||||
port=device[1]["port"],
|
||||
description=device[1]["description"]
|
||||
if device[1]["description"] != "n/a"
|
||||
else "",
|
||||
description=(
|
||||
device[1]["description"]
|
||||
if device[1]["description"] != "n/a"
|
||||
else ""
|
||||
),
|
||||
)
|
||||
)
|
||||
device_index = click.prompt(
|
||||
|
@ -239,7 +239,7 @@ class RunOrTestClient(AsyncClientBase):
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments
|
||||
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self, result, agent_id, ac_id, dbindex, fileobj
|
||||
):
|
||||
result = PROJECT_SYNC_STAGE.lookupByValue(result)
|
||||
|
@ -30,7 +30,7 @@ class SSLContextFactory(ssl.ClientContextFactory):
|
||||
ctx.load_verify_locations(certifi.where())
|
||||
return ctx
|
||||
|
||||
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments
|
||||
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments,too-many-positional-arguments
|
||||
self, connection, x509, errno, depth, status
|
||||
):
|
||||
cn = x509.get_subject().commonName
|
||||
|
@ -33,9 +33,11 @@ from platformio.test.runners.base import CTX_META_TEST_IS_RUNNING
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
try:
|
||||
DEFAULT_JOB_NUMS = cpu_count()
|
||||
SYSTEM_CPU_COUNT = cpu_count()
|
||||
except NotImplementedError:
|
||||
DEFAULT_JOB_NUMS = 1
|
||||
SYSTEM_CPU_COUNT = 1
|
||||
|
||||
DEFAULT_JOB_NUMS = int(os.getenv("PLATFORMIO_RUN_JOBS", SYSTEM_CPU_COUNT))
|
||||
|
||||
|
||||
@click.command("run", short_help="Run project targets (build, upload, clean, etc.)")
|
||||
@ -76,7 +78,7 @@ except NotImplementedError:
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
environment,
|
||||
target,
|
||||
@ -174,7 +176,7 @@ def cli(
|
||||
return True
|
||||
|
||||
|
||||
def process_env(
|
||||
def process_env( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
name,
|
||||
config,
|
||||
|
@ -22,7 +22,7 @@ from platformio.test.runners.base import CTX_META_TEST_RUNNING_NAME
|
||||
|
||||
|
||||
class EnvironmentProcessor:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
cmd_ctx,
|
||||
name,
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
@ -79,6 +80,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
help="A program argument (multiple are allowed)",
|
||||
)
|
||||
@click.option("--list-tests", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option("--json-output-path", type=click.Path())
|
||||
@click.option("--junit-output-path", type=click.Path())
|
||||
@click.option(
|
||||
@ -88,7 +90,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
help="Increase verbosity level, maximum is 3 levels (-vvv), see docs for details",
|
||||
)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals,redefined-builtin
|
||||
ctx,
|
||||
environment,
|
||||
ignore,
|
||||
@ -105,6 +107,7 @@ def cli( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
monitor_dtr,
|
||||
program_args,
|
||||
list_tests,
|
||||
json_output,
|
||||
json_output_path,
|
||||
junit_output_path,
|
||||
verbose,
|
||||
@ -156,6 +159,7 @@ def cli( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
stdout_report.generate(verbose=verbose or list_tests)
|
||||
|
||||
for output_format, output_path in [
|
||||
("json", subprocess.STDOUT if json_output else None),
|
||||
("json", json_output_path),
|
||||
("junit", junit_output_path),
|
||||
]:
|
||||
|
@ -15,6 +15,7 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
@ -24,6 +25,9 @@ from platformio.test.result import TestStatus
|
||||
|
||||
class JsonTestReport(TestReportBase):
|
||||
def generate(self, output_path, verbose=False):
|
||||
if output_path == subprocess.STDOUT:
|
||||
return click.echo("\n\n" + json.dumps(self.to_json()))
|
||||
|
||||
if os.path.isdir(output_path):
|
||||
output_path = os.path.join(
|
||||
output_path,
|
||||
@ -40,6 +44,8 @@ class JsonTestReport(TestReportBase):
|
||||
if verbose:
|
||||
click.secho(f"Saved JSON report to the {output_path}", fg="green")
|
||||
|
||||
return True
|
||||
|
||||
def to_json(self):
|
||||
result = dict(
|
||||
version="1.0",
|
||||
@ -62,11 +68,13 @@ class JsonTestReport(TestReportBase):
|
||||
test_dir=test_suite.test_dir,
|
||||
status=test_suite.status.name,
|
||||
duration=test_suite.duration,
|
||||
timestamp=datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
|
||||
"%Y-%m-%dT%H:%M:%S"
|
||||
)
|
||||
if test_suite.timestamp
|
||||
else None,
|
||||
timestamp=(
|
||||
datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
|
||||
"%Y-%m-%dT%H:%M:%S"
|
||||
)
|
||||
if test_suite.timestamp
|
||||
else None
|
||||
),
|
||||
testcase_nums=len(test_suite.cases),
|
||||
error_nums=test_suite.get_status_nums(TestStatus.ERRORED),
|
||||
failure_nums=test_suite.get_status_nums(TestStatus.FAILED),
|
||||
|
@ -55,7 +55,7 @@ class TestCaseSource:
|
||||
|
||||
|
||||
class TestCase:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
name,
|
||||
status,
|
||||
|
@ -26,7 +26,7 @@ CTX_META_TEST_RUNNING_NAME = __name__ + ".test_running_name"
|
||||
|
||||
|
||||
class TestRunnerOptions: # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
verbose=0,
|
||||
without_building=False,
|
||||
|
@ -101,7 +101,7 @@ class DoctestTestCaseParser:
|
||||
|
||||
|
||||
class DoctestTestRunner(TestRunnerBase):
|
||||
EXTRA_LIB_DEPS = ["doctest/doctest@^2.4.9"]
|
||||
EXTRA_LIB_DEPS = ["doctest/doctest@^2.4.11"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -88,7 +88,7 @@ class GoogletestTestCaseParser:
|
||||
|
||||
|
||||
class GoogletestTestRunner(TestRunnerBase):
|
||||
EXTRA_LIB_DEPS = ["google/googletest@^1.12.1"]
|
||||
EXTRA_LIB_DEPS = ["google/googletest@^1.15.2"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -26,7 +26,7 @@ from platformio.util import strip_ansi_codes
|
||||
|
||||
|
||||
class UnityTestRunner(TestRunnerBase):
|
||||
EXTRA_LIB_DEPS = ["throwtheswitch/Unity@^2.5.2"]
|
||||
EXTRA_LIB_DEPS = ["throwtheswitch/Unity@^2.6.0"]
|
||||
|
||||
# Examples:
|
||||
# test/test_foo.cpp:44:test_function_foo:FAIL: Expected 32 Was 33
|
||||
@ -184,10 +184,6 @@ void unityOutputComplete(void) { unittest_uart_end(); }
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Delete when Unity > 2.5.2 is released"""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_unity_framework_config(self):
|
||||
if not self.platform.is_embedded():
|
||||
return self.UNITY_FRAMEWORK_CONFIG["native"]
|
||||
|
@ -15,6 +15,7 @@
|
||||
import datetime
|
||||
import functools
|
||||
import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
@ -64,16 +65,16 @@ class memoized:
|
||||
|
||||
|
||||
class throttle:
|
||||
def __init__(self, threshhold):
|
||||
self.threshhold = threshhold # milliseconds
|
||||
def __init__(self, threshold):
|
||||
self.threshold = threshold # milliseconds
|
||||
self.last = 0
|
||||
|
||||
def __call__(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
diff = int(round((time.time() - self.last) * 1000))
|
||||
if diff < self.threshhold:
|
||||
time.sleep((self.threshhold - diff) * 0.001)
|
||||
if diff < self.threshold:
|
||||
time.sleep((self.threshold - diff) * 0.001)
|
||||
self.last = time.time()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
@ -136,6 +137,11 @@ def singleton(cls):
|
||||
|
||||
|
||||
def get_systype():
|
||||
# allow manual override, eg. for
|
||||
# windows on arm64 systems with emulated x86
|
||||
if "PLATFORMIO_SYSTEM_TYPE" in os.environ:
|
||||
return os.environ.get("PLATFORMIO_SYSTEM_TYPE")
|
||||
|
||||
system = platform.system().lower()
|
||||
arch = platform.machine().lower()
|
||||
if system == "windows":
|
||||
@ -143,6 +149,8 @@ def get_systype():
|
||||
arch = "x86_" + platform.architecture()[0]
|
||||
if "x86" in arch:
|
||||
arch = "amd64" if "64" in arch else "x86"
|
||||
if arch == "aarch64" and platform.architecture()[0] == "32bit":
|
||||
arch = "armv7l"
|
||||
return "%s_%s" % (system, arch) if arch else system
|
||||
|
||||
|
||||
@ -168,9 +176,8 @@ def items_in_list(needle, haystack):
|
||||
|
||||
|
||||
def parse_datetime(datestr):
|
||||
if "T" in datestr and "Z" in datestr:
|
||||
return datetime.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
|
||||
return datetime.datetime.strptime(datestr)
|
||||
assert "T" in datestr and "Z" in datestr
|
||||
return datetime.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
|
||||
def merge_dicts(d1, d2, path=None):
|
||||
|
@ -39,7 +39,7 @@ RST_COPYRIGHT = """.. Copyright (c) 2014-present PlatformIO <contact@platformio
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
SKIP_DEBUG_TOOLS = ["esp-bridge", "esp-builtin"]
|
||||
SKIP_DEBUG_TOOLS = ["esp-bridge", "esp-builtin", "dfu"]
|
||||
|
||||
STATIC_FRAMEWORK_DATA = {
|
||||
"arduino": {
|
||||
@ -357,6 +357,8 @@ Packages
|
||||
- Description"""
|
||||
)
|
||||
for name, options in dict(sorted(packages.items())).items():
|
||||
if name == "toolchain-gccarmnoneeab": # aceinna typo fix
|
||||
name = name + "i"
|
||||
package = REGCLIENT.get_package(
|
||||
"tool", options.get("owner", "platformio"), name
|
||||
)
|
||||
@ -411,6 +413,7 @@ Packages
|
||||
|
||||
|
||||
def generate_platform(pkg, rst_dir):
|
||||
owner = pkg.metadata.spec.owner
|
||||
name = pkg.metadata.name
|
||||
print("Processing platform: %s" % name)
|
||||
|
||||
@ -426,9 +429,9 @@ def generate_platform(pkg, rst_dir):
|
||||
p = PlatformFactory.new(name)
|
||||
assert p.repository_url.endswith(".git")
|
||||
github_url = p.repository_url[:-4]
|
||||
registry_url = reg_package_url("platform", pkg.metadata.spec.owner, name)
|
||||
registry_url = reg_package_url("platform", owner, name)
|
||||
|
||||
lines.append(".. _platform_%s:" % p.name)
|
||||
lines.append(".. _platform_%s:" % name)
|
||||
lines.append("")
|
||||
|
||||
lines.append(p.title)
|
||||
@ -437,7 +440,7 @@ def generate_platform(pkg, rst_dir):
|
||||
lines.append(":Registry:")
|
||||
lines.append(" `%s <%s>`__" % (registry_url, registry_url))
|
||||
lines.append(":Configuration:")
|
||||
lines.append(" :ref:`projectconf_env_platform` = ``%s``" % p.name)
|
||||
lines.append(" :ref:`projectconf_env_platform` = ``%s/%s``" % (owner, name))
|
||||
lines.append("")
|
||||
lines.append(p.description)
|
||||
lines.append(
|
||||
|
10
setup.py
10
setup.py
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import platform
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
from platformio import (
|
||||
@ -23,13 +22,8 @@ from platformio import (
|
||||
__title__,
|
||||
__url__,
|
||||
__version__,
|
||||
__install_requires__,
|
||||
)
|
||||
|
||||
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||
if platform.system() == "Darwin" and "arm" in platform.machine().lower():
|
||||
__install_requires__.append("chardet>=3.0.2,<4")
|
||||
|
||||
from platformio.dependencies import get_pip_dependencies
|
||||
|
||||
setup(
|
||||
name=__title__,
|
||||
@ -40,7 +34,7 @@ setup(
|
||||
author_email=__email__,
|
||||
url=__url__,
|
||||
license=__license__,
|
||||
install_requires=__install_requires__,
|
||||
install_requires=get_pip_dependencies(),
|
||||
python_requires=">=3.6",
|
||||
packages=find_packages(include=["platformio", "platformio.*"]),
|
||||
package_data={
|
||||
|
@ -18,7 +18,8 @@ import os
|
||||
|
||||
import pytest
|
||||
|
||||
from platformio import __core_packages__, fs
|
||||
from platformio import fs
|
||||
from platformio.dependencies import get_core_dependencies
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
@ -30,7 +31,7 @@ PROJECT_CONFIG_TPL = """
|
||||
[env]
|
||||
platform = platformio/atmelavr@^3.4.0
|
||||
lib_deps =
|
||||
milesburton/DallasTemperature@^3.9.1
|
||||
milesburton/DallasTemperature@^4.0.4
|
||||
https://github.com/esphome/ESPAsyncWebServer/archive/refs/tags/v2.1.0.zip
|
||||
|
||||
[env:baremetal]
|
||||
@ -177,7 +178,7 @@ def test_baremetal_project(
|
||||
),
|
||||
]
|
||||
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
|
||||
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
|
||||
PackageSpec("tool-scons@%s" % get_core_dependencies()["tool-scons"][1:]),
|
||||
PackageSpec("toolchain-atmelavr@1.70300.191015"),
|
||||
]
|
||||
|
||||
@ -210,11 +211,11 @@ def test_project(
|
||||
]
|
||||
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
|
||||
PackageSpec("framework-arduino-avr-attiny@1.5.2"),
|
||||
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
|
||||
PackageSpec("tool-scons@%s" % get_core_dependencies()["tool-scons"][1:]),
|
||||
PackageSpec("toolchain-atmelavr@1.70300.191015"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.9.1",
|
||||
"milesburton/DallasTemperature@^4.0.4",
|
||||
"https://github.com/esphome/ESPAsyncWebServer/archive/refs/tags/v2.1.0.zip",
|
||||
]
|
||||
|
||||
@ -240,7 +241,7 @@ def test_private_lib_deps(
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"bblanchon/ArduinoJson": "^5",
|
||||
"milesburton/DallasTemperature": "^3.9.1"
|
||||
"milesburton/DallasTemperature": "^4.0.4"
|
||||
}
|
||||
}
|
||||
"""
|
||||
@ -339,7 +340,7 @@ def test_remove_project_unused_libdeps(
|
||||
),
|
||||
]
|
||||
|
||||
# manually remove from cofiguration file
|
||||
# manually remove from configuration file
|
||||
config.set("env:baremetal", "lib_deps", ["bblanchon/ArduinoJson@^5"])
|
||||
config.save()
|
||||
result = clirunner.invoke(
|
||||
@ -445,7 +446,7 @@ def test_custom_project_libraries(
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec("ArduinoJson@5.13.4"),
|
||||
PackageSpec("Nanopb@0.4.7"),
|
||||
PackageSpec("Nanopb@0.4.91"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"bblanchon/ArduinoJson@^5",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user