mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 15:18:03 +01:00
Compare commits
152 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ab894f8a6 | ||
|
|
cc529de888 | ||
|
|
b7aec5a838 | ||
|
|
8011eea34a | ||
|
|
d22216d4d1 | ||
|
|
41c8e63cd5 | ||
|
|
3bbe176ae3 | ||
|
|
8e561c3c55 | ||
|
|
f00ef57089 | ||
|
|
4850c1069c | ||
|
|
487a894a71 | ||
|
|
8df56dfbb2 | ||
|
|
31d630b766 | ||
|
|
de02eafa06 | ||
|
|
903a013cbd | ||
|
|
6cf8b8172f | ||
|
|
444c57b4a6 | ||
|
|
d787648e71 | ||
|
|
846588deec | ||
|
|
79142965ce | ||
|
|
93bc4fae6c | ||
|
|
1aa256d63c | ||
|
|
3a133af1a6 | ||
|
|
f93d3d509b | ||
|
|
145142ea6c | ||
|
|
b4b02982d6 | ||
|
|
841489c154 | ||
|
|
23c142dffd | ||
|
|
fc946baa93 | ||
|
|
a447022e7f | ||
|
|
4c697d9032 | ||
|
|
a71443a2ee | ||
|
|
20e076191e | ||
|
|
d907ecb9e9 | ||
|
|
c950d6d366 | ||
|
|
29cd2d2bdb | ||
|
|
a584a6bce3 | ||
|
|
4dc7ea5bd0 | ||
|
|
1be6e10f99 | ||
|
|
c9016d6939 | ||
|
|
baab25a48c | ||
|
|
4d4f5a217b | ||
|
|
b6d1f4d769 | ||
|
|
90fc36cf2d | ||
|
|
9be0a8248d | ||
|
|
d15314689d | ||
|
|
1d4b5c8051 | ||
|
|
47a87c57f2 | ||
|
|
ec2d01f277 | ||
|
|
4e05309e02 | ||
|
|
1fd3a4061f | ||
|
|
014ac79c87 | ||
|
|
dd3fe909a1 | ||
|
|
c1afb364e9 | ||
|
|
f3c27eadf6 | ||
|
|
fe2fd5e880 | ||
|
|
07e7dc4717 | ||
|
|
a94e5bd5ab | ||
|
|
f5ab0e5ddd | ||
|
|
3e20abec90 | ||
|
|
a4276b4ea6 | ||
|
|
cade63fba5 | ||
|
|
3a57661230 | ||
|
|
33fadd028d | ||
|
|
647b131d9b | ||
|
|
b537004a75 | ||
|
|
67b2759be2 | ||
|
|
fe2e8a0a40 | ||
|
|
03e84fe325 | ||
|
|
b45cdc9cb6 | ||
|
|
3aed8e1259 | ||
|
|
2d4a87238a | ||
|
|
023b58e9f0 | ||
|
|
3211a2b91b | ||
|
|
4b61de0136 | ||
|
|
e6ae18ab0d | ||
|
|
4230b223d2 | ||
|
|
d224ae658d | ||
|
|
20dc006345 | ||
|
|
13035ced59 | ||
|
|
b9d27240b5 | ||
|
|
2441d47321 | ||
|
|
cf497e8829 | ||
|
|
013153718d | ||
|
|
f1726843a2 | ||
|
|
44ef6e3469 | ||
|
|
eeb5ac456e | ||
|
|
aea9075d4b | ||
|
|
11a8d9ff7a | ||
|
|
7b587ba8bf | ||
|
|
9eb6e5166d | ||
|
|
aa580360e8 | ||
|
|
4c490cc63c | ||
|
|
882d4da8cb | ||
|
|
781114f026 | ||
|
|
7cf8d1d696 | ||
|
|
fd1333f031 | ||
|
|
8e21259222 | ||
|
|
9899547b73 | ||
|
|
4075789a32 | ||
|
|
ff364610c5 | ||
|
|
e5940673d7 | ||
|
|
fe140b0566 | ||
|
|
2ec5a3154e | ||
|
|
956f21b639 | ||
|
|
cdac7d497c | ||
|
|
591b377e4a | ||
|
|
c475578db6 | ||
|
|
2bad42ecb1 | ||
|
|
0acfc25d56 | ||
|
|
9d1593da0b | ||
|
|
e9433de50f | ||
|
|
fcba901611 | ||
|
|
0e3249e8b1 | ||
|
|
0d647e164b | ||
|
|
c01ef88265 | ||
|
|
9fb9e586a0 | ||
|
|
28bd200cd6 | ||
|
|
56be27fb0b | ||
|
|
32991356f3 | ||
|
|
dbe58b49bf | ||
|
|
d36e39418e | ||
|
|
c28740cfb1 | ||
|
|
430acc87de | ||
|
|
c0d97287dd | ||
|
|
0f3dbe623d | ||
|
|
6449115635 | ||
|
|
d085a02068 | ||
|
|
76a11a75b7 | ||
|
|
93018930ab | ||
|
|
621b24b665 | ||
|
|
7606dd4faf | ||
|
|
aa06d21abe | ||
|
|
042f8dc668 | ||
|
|
c4f76848a7 | ||
|
|
e1ff9a469d | ||
|
|
2239616484 | ||
|
|
55be7181b3 | ||
|
|
f519a9d524 | ||
|
|
f4319f670c | ||
|
|
80fc335528 | ||
|
|
353f440335 | ||
|
|
3e9ca48588 | ||
|
|
255e91b51c | ||
|
|
adf94843ea | ||
|
|
e3e08d9691 | ||
|
|
84c7ede0e1 | ||
|
|
28c90652bc | ||
|
|
a75da327d0 | ||
|
|
adf4012b96 | ||
|
|
1fe806269d | ||
|
|
4742ffc9d8 |
52
.github/workflows/core.yml
vendored
52
.github/workflows/core.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: Core
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.7", "3.11", "3.12"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Python Lint
|
||||
if: ${{ matrix.python-version != '3.6' }}
|
||||
run: |
|
||||
tox -e lint
|
||||
|
||||
- name: Integration Tests
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Core*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
46
.github/workflows/deployment.yml
vendored
46
.github/workflows/deployment.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Deployment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "release/**"
|
||||
|
||||
jobs:
|
||||
deployment:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox wheel
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
|
||||
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
|
||||
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
# run: python setup.py sdist bdist_wheel
|
||||
run: python setup.py sdist
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
109
.github/workflows/docs.yml
vendored
109
.github/workflows/docs.yml
vendored
@@ -1,109 +0,0 @@
|
||||
name: Docs
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Build docs
|
||||
run: |
|
||||
tox -e docs
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Docs*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
- name: Preserve Docs
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
tar -czvf docs.tar.gz -C docs/_build html rtdpage
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
|
||||
deploy:
|
||||
name: Deploy Docs
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCS_REPO: platformio/platformio-docs
|
||||
DOCS_DIR: platformio-docs
|
||||
LATEST_DOCS_DIR: latest-docs
|
||||
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
run: |
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
id: get-destination-dir
|
||||
run: |
|
||||
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
|
||||
echo "::set-output name=dst_dir::stable"
|
||||
else
|
||||
echo "::set-output name=dst_dir::latest"
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
ref: gh-pages
|
||||
- name: Synchronize Docs
|
||||
run: |
|
||||
rm -rf ${{ env.DOCS_DIR }}/.git
|
||||
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
|
||||
rm -rf ${{ env.DOCS_DIR }}/page
|
||||
mkdir -p ${{ env.DOCS_DIR }}/page
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
|
||||
fi
|
||||
- name: Validate Docs
|
||||
run: |
|
||||
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
|
||||
echo "Docs folder is empty. Aborting!"
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
publish_dir: ./${{ env.DOCS_DIR }}
|
||||
commit_message: Sync Docs
|
||||
12
.github/workflows/examples.yml
vendored
12
.github/workflows/examples.yml
vendored
@@ -15,6 +15,16 @@ jobs:
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- name: Free Disk Space
|
||||
uses: endersonmenezes/free-disk-space@v3
|
||||
with:
|
||||
remove_android: true
|
||||
remove_dotnet: true
|
||||
remove_haskell: true
|
||||
# Faster cleanup
|
||||
remove_packages_one_command: true
|
||||
rm_cmd: "rmz"
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
@@ -34,7 +44,7 @@ jobs:
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
|
||||
69
.github/workflows/projects.yml
vendored
69
.github/workflows/projects.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: Projects
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- marlin:
|
||||
repository: "MarlinFirmware/Marlin"
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
- esphome:
|
||||
repository: "esphome/esphome"
|
||||
folder: "esphome"
|
||||
config_dir: "esphome"
|
||||
env_name: "esp32-arduino"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
config_dir: "smartknob"
|
||||
env_name: "view"
|
||||
- espurna:
|
||||
repository: "xoseperez/espurna"
|
||||
folder: "espurna"
|
||||
config_dir: "espurna/code"
|
||||
env_name: "nodemcu-lolin"
|
||||
- OpenMQTTGateway:
|
||||
repository: "1technophile/OpenMQTTGateway"
|
||||
folder: "OpenMQTTGateway"
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
project: {"esphome": "", "repository": "esphome/esphome", "folder": "esphome", "config_dir": "esphome", "env_name": "esp32-arduino"}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Install ESPHome dependencies
|
||||
# Requires esptool package as it's used in a custom prescript
|
||||
if: ${{ contains(matrix.project.repository, 'esphome') }}
|
||||
run: pip install esptool==3.*
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
||||
56
HISTORY.rst
56
HISTORY.rst
@@ -7,6 +7,8 @@ Release Notes
|
||||
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
|
||||
.. |PIOHOME| replace:: `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
@@ -17,6 +19,58 @@ Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
6.1.19 (2025-??-??)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.14
|
||||
* Fixed a regression issue where custom build flags were not properly reflected in the `compile_commands.json <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ file, ensuring accurate compilation database generation
|
||||
* Fixed an issue where fully-qualified serial port URLs (e.g., ``rfc2217://host:port``) were incorrectly treated as wildcard patterns (`issue #5225 <https://github.com/platformio/platformio-core/issues/5225>`_)
|
||||
|
||||
6.1.18 (2025-03-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a regression issue that prevented |PIOHOME| from opening external links (`issue #5084 <https://github.com/platformio/platformio-core/issues/5084>`_)
|
||||
|
||||
6.1.17 (2025-02-13)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the `PLATFORMIO_RUN_JOBS <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_RUN_JOBS>`__ environment variable, allowing manual override of the number of parallel build jobs (`issue #5077 <https://github.com/platformio/platformio-core/issues/5077>`_)
|
||||
* Added support for ``tar.xz`` tarball dependencies (`pull #4974 <https://github.com/platformio/platformio-core/pull/4974>`_)
|
||||
* Ensured that dependencies of private libraries are no longer unnecessarily re-installed, optimizing dependency management and reducing redundant operations (`issue #4987 <https://github.com/platformio/platformio-core/issues/4987>`_)
|
||||
* Resolved an issue where the ``compiledb`` target failed to properly escape compiler executable paths containing spaces (`issue #4998 <https://github.com/platformio/platformio-core/issues/4998>`_)
|
||||
* Resolved an issue with incorrect path resolution when linking static libraries via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option (`issue #5004 <https://github.com/platformio/platformio-core/issues/5004>`_)
|
||||
* Resolved an issue where the ``--project-dir`` flag did not function correctly with the `pio check <https://docs.platformio.org/en/latest/core/userguide/cmd_check.html>`__ and `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ commands (`issue #5029 <https://github.com/platformio/platformio-core/issues/5029>`_)
|
||||
* Resolved an issue where the |LDF| occasionally excluded bundled platform libraries from the dependency graph (`pull #4941 <https://github.com/platformio/platformio-core/pull/4941>`_)
|
||||
|
||||
6.1.16 (2024-09-26)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.13
|
||||
* Introduced the `PLATFORMIO_SYSTEM_TYPE <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_SYSTEM_TYPE>`__ environment variable, enabling manual override of the detected system type for greater flexibility and control in custom build environments
|
||||
* Enhanced internet connection checks by falling back to HTTPS protocol when HTTP (port 80) fails (`issue #4980 <https://github.com/platformio/platformio-core/issues/4980>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.8.1) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.8.1>`__)
|
||||
* Upgraded the `Doctest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ testing framework to version 2.4.11, the `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ to version 1.15.2, and the `Unity <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/unity.html>`__ to version 2.6.0, incorporating the latest features and improvements for enhanced testing capabilities
|
||||
* Corrected an issue where the incorrect public class was imported for the ``DoctestTestRunner`` (`issue #4949 <https://github.com/platformio/platformio-core/issues/4949>`_)
|
||||
|
||||
6.1.15 (2024-04-25)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository (`issue #4885 <https://github.com/platformio/platformio-core/issues/4885>`_)
|
||||
* Resolved an issue related to the inaccurate detection of the Clang compiler (`pull #4897 <https://github.com/platformio/platformio-core/pull/4897>`_)
|
||||
|
||||
6.1.14 (2024-03-21)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the ``--json-output`` option to the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command, enabling users to generate test results in the JSON format
|
||||
* Upgraded the build engine to the latest version of SCons (4.7.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.7.0>`__)
|
||||
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
|
||||
* Addressed an issue where passing a relative path (``--project-dir``) to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command resulted in an error (`issue #4847 <https://github.com/platformio/platformio-core/issues/4847>`_)
|
||||
* Enhanced |STATICCODEANALYSIS| to accommodate scenarios where custom ``src_dir`` or ``include_dir`` are located outside the project folder (`pull #4874 <https://github.com/platformio/platformio-core/pull/4874>`_)
|
||||
* Corrected the validation of ``symlink://`` `package specifications <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ , resolving an issue that caused the package manager to repeatedly reinstall dependencies (`pull #4870 <https://github.com/platformio/platformio-core/pull/4870>`_)
|
||||
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
|
||||
* Resolved an issue where the |LDF| selected an incorrect library version (`issue #4860 <https://github.com/platformio/platformio-core/issues/4860>`_)
|
||||
* Resolved an issue with the ``hexlify`` filter in the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ command, ensuring proper representation of characters with Unicode code points higher than 127 (`issue #4732 <https://github.com/platformio/platformio-core/issues/4732>`_)
|
||||
|
||||
6.1.13 (2024-01-12)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -46,7 +100,7 @@ test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within |PIOHOME|
|
||||
|
||||
6.1.10 (2023-08-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
9
Makefile
9
Makefile
@@ -10,10 +10,13 @@ format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
test:
|
||||
py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
before-commit: isort format lint
|
||||
test:
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: codespell isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: 3f02152561...23ef0f85ca
2
examples
2
examples
Submodule examples updated: f06e9656a4...0409a90a01
@@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
VERSION = (6, 1, 13)
|
||||
VERSION = (6, 1, "19a2")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -38,15 +38,6 @@ __registry_mirror_hosts__ = [
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40600.0",
|
||||
"tool-cppcheck": "~1.21100.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
|
||||
@@ -144,7 +144,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint:disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
|
||||
@@ -48,11 +48,13 @@ def team_list_cmd(orgname, json_output):
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
|
||||
@@ -36,6 +36,8 @@ ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVIC
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
@@ -85,6 +87,8 @@ ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVI
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
@@ -173,4 +177,7 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTR{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Zephyr framework USB CDC-ACM
|
||||
ATTRS{idVendor}=="2fe3", ATTRS{idProduct}=="0100", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
@@ -147,13 +147,13 @@ if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
|
||||
@@ -54,11 +54,12 @@ def GetBuildType(env):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return "+".join(modes or ["release"])
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
@@ -126,26 +127,26 @@ def ProcessProgramDeps(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
@@ -219,6 +220,11 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
|
||||
@@ -39,7 +39,7 @@ from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem, PackageSpec
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@@ -309,10 +309,10 @@ class LibBuilderBase:
|
||||
if not self.dependencies or self._deps_are_processed:
|
||||
return
|
||||
self._deps_are_processed = True
|
||||
for item in self.dependencies:
|
||||
for dependency in self.dependencies:
|
||||
found = False
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item["name"] != lb.name:
|
||||
if not lb.is_dependency_compatible(dependency):
|
||||
continue
|
||||
found = True
|
||||
if lb not in self.depbuilders:
|
||||
@@ -322,9 +322,28 @@ class LibBuilderBase:
|
||||
if not found and self.verbose:
|
||||
sys.stderr.write(
|
||||
"Warning: Ignored `%s` dependency for `%s` "
|
||||
"library\n" % (item["name"], self.name)
|
||||
"library\n" % (dependency["name"], self.name)
|
||||
)
|
||||
|
||||
def is_dependency_compatible(self, dependency):
|
||||
pkg = PackageItem(self.path)
|
||||
qualifiers = {"name": self.name, "version": self.version}
|
||||
if pkg.metadata:
|
||||
qualifiers = {"name": pkg.metadata.name, "version": pkg.metadata.version}
|
||||
if pkg.metadata.spec and pkg.metadata.spec.owner:
|
||||
qualifiers["owner"] = pkg.metadata.spec.owner
|
||||
dep_qualifiers = {
|
||||
k: v for k, v in dependency.items() if k in ("owner", "name", "version")
|
||||
}
|
||||
if (
|
||||
"version" in dep_qualifiers
|
||||
and not PackageSpec(dep_qualifiers["version"]).requirements
|
||||
):
|
||||
del dep_qualifiers["version"]
|
||||
return PackageCompatibility.from_dependency(dep_qualifiers).is_compatible(
|
||||
PackageCompatibility(**qualifiers)
|
||||
)
|
||||
|
||||
def get_search_files(self):
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
@@ -1140,6 +1159,8 @@ def ConfigureProjectLibBuilder(env):
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
# refill found libs after recursive search
|
||||
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
|
||||
for lb in lib_builders:
|
||||
for deplb in lb.depbuilders[:]:
|
||||
if deplb not in found_lbs:
|
||||
|
||||
@@ -23,10 +23,10 @@ from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8192
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
||||
@@ -20,19 +20,23 @@ from platformio.proc import exec_command
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def GetCompilerType(env):
|
||||
if env.subst("$CC").endswith("-gcc"):
|
||||
def GetCompilerType(env): # pylint: disable=too-many-return-statements
|
||||
CC = env.subst("$CC")
|
||||
if CC.endswith("-gcc"):
|
||||
return "gcc"
|
||||
if os.path.basename(CC) == "clang":
|
||||
return "clang"
|
||||
try:
|
||||
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
result = exec_command([CC, "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
output = "".join([result["out"], result["err"]]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
if "clang version" in output:
|
||||
return "clang"
|
||||
if "gcc" in output:
|
||||
return "gcc"
|
||||
|
||||
@@ -75,9 +75,11 @@ def LoadPioPlatform(env):
|
||||
continue
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path,
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
|
||||
@@ -61,7 +61,7 @@ def CleanProject(env, fullclean=False):
|
||||
print("Done cleaning")
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments
|
||||
def AddTarget( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
env,
|
||||
name,
|
||||
dependencies,
|
||||
|
||||
@@ -19,7 +19,6 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@@ -60,7 +59,7 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@@ -77,7 +76,7 @@ def cli(
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
@@ -103,10 +102,21 @@ def cli(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_src_filters = [
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "src_dir")),
|
||||
"+<%s>" % os.path.basename(config.get("platformio", "include_dir")),
|
||||
]
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
@@ -122,9 +132,11 @@ def cli(
|
||||
silent=silent,
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
@@ -137,14 +149,16 @@ def cli(
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, project_dir, config, envname, tool_options
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
@@ -235,12 +249,12 @@ def collect_component_stats(result):
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = dirname(defect.file) or defect.file
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = dirname(component)
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
|
||||
@@ -29,7 +29,7 @@ class DefectItem:
|
||||
SEVERITY_LOW = 4
|
||||
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
|
||||
|
||||
def __init__(
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
severity,
|
||||
category,
|
||||
|
||||
@@ -18,7 +18,7 @@ from pathlib import Path
|
||||
import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
class PlatformioCLI(click.Group):
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -84,7 +84,7 @@ class PlatformioCLI(click.MultiCommand):
|
||||
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
|
||||
return super().invoke(ctx)
|
||||
|
||||
def list_commands(self, ctx):
|
||||
def list_commands(self, ctx): # pylint: disable=unused-argument
|
||||
return sorted(list(self._find_pio_commands()))
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
|
||||
@@ -63,7 +63,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, too-many-branches
|
||||
ctx,
|
||||
src,
|
||||
lib,
|
||||
|
||||
@@ -152,7 +152,7 @@ def cli(ctx, **options):
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
def lib_install( # pylint: disable=too-many-arguments,too-many-positional-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
click.secho(
|
||||
@@ -210,7 +210,7 @@ def lib_uninstall(ctx, libraries, save, silent):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_update( # pylint: disable=too-many-arguments
|
||||
def lib_update( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
||||
@@ -159,7 +159,7 @@ def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branc
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
@@ -224,7 +224,7 @@ def platform_uninstall(ctx, platforms):
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||
def platform_update( # pylint: disable=too-many-locals,too-many-arguments,too-many-positional-arguments
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
||||
@@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
||||
@@ -19,9 +19,9 @@ import subprocess
|
||||
import click
|
||||
|
||||
from platformio import VERSION, __version__, app, exception
|
||||
from platformio.dependencies import get_pip_dependencies
|
||||
from platformio.http import fetch_remote_content
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.pipdeps import get_pip_dependencies
|
||||
from platformio.proc import get_pythonexe_path
|
||||
|
||||
PYPI_JSON_URL = "https://pypi.org/pypi/platformio/json"
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import importlib.util
|
||||
import inspect
|
||||
import locale
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
@@ -41,10 +42,14 @@ else:
|
||||
if sys.version_info >= (3, 9):
|
||||
from asyncio import to_thread as aio_to_thread
|
||||
else:
|
||||
from starlette.concurrency import run_in_threadpool as aio_to_thread
|
||||
try:
|
||||
from starlette.concurrency import run_in_threadpool as aio_to_thread
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2 # DO NOT REMOVE IT. ESP8266/ESP32 depend on it
|
||||
PY36 = sys.version_info[0:2] == (3, 6)
|
||||
IS_CYGWIN = sys.platform.startswith("cygwin")
|
||||
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
|
||||
IS_MACOS = sys.platform.startswith("darwin")
|
||||
@@ -132,3 +137,93 @@ def path_to_unicode(path):
|
||||
and custom device monitor filters
|
||||
"""
|
||||
return path
|
||||
|
||||
|
||||
def is_proxy_set(socks=False):
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
|
||||
value = os.getenv(var, os.getenv(var.lower()))
|
||||
if not value or (socks and not value.startswith("socks5://")):
|
||||
continue
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def click_launch(url, wait=False, locate=False) -> int:
|
||||
return _click_open_url(url, wait=wait, locate=locate)
|
||||
|
||||
|
||||
def _click_open_url( # pylint: disable=too-many-branches, too-many-return-statements, consider-using-with, import-outside-toplevel, unspecified-encoding
|
||||
url, wait=False, locate=False
|
||||
):
|
||||
"""
|
||||
Issue https://github.com/pallets/click/issues/2868
|
||||
Keep in sync with https://github.com/pallets/click/blob/main/src/click/_termui_impl.py
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
def _unquote_file(url) -> str:
|
||||
from urllib.parse import unquote
|
||||
|
||||
if url.startswith("file://"):
|
||||
url = unquote(url[7:])
|
||||
|
||||
return url
|
||||
|
||||
if IS_MACOS:
|
||||
args = ["open"]
|
||||
if wait:
|
||||
args.append("-W")
|
||||
if locate:
|
||||
args.append("-R")
|
||||
args.append(_unquote_file(url))
|
||||
null = open("/dev/null", "w")
|
||||
try:
|
||||
return subprocess.Popen(args, stderr=null).wait()
|
||||
finally:
|
||||
null.close()
|
||||
elif IS_WINDOWS:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = ["explorer", f"/select,{url}"]
|
||||
else:
|
||||
args = ["start"]
|
||||
if wait:
|
||||
args.append("/WAIT")
|
||||
args.append("")
|
||||
args.append(url)
|
||||
try:
|
||||
return subprocess.call(args, shell=True)
|
||||
except OSError:
|
||||
# Command not found
|
||||
return 127
|
||||
elif IS_CYGWIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = ["cygstart", os.path.dirname(url)]
|
||||
else:
|
||||
args = ["cygstart"]
|
||||
if wait:
|
||||
args.append("-w")
|
||||
args.append(url)
|
||||
try:
|
||||
return subprocess.call(args)
|
||||
except OSError:
|
||||
# Command not found
|
||||
return 127
|
||||
|
||||
try:
|
||||
if locate:
|
||||
url = os.path.dirname(_unquote_file(url)) or "."
|
||||
else:
|
||||
url = _unquote_file(url)
|
||||
c = subprocess.Popen(["xdg-open", url])
|
||||
if wait:
|
||||
return c.wait()
|
||||
return 0
|
||||
except OSError:
|
||||
if url.startswith(("http://", "https://")) and not locate and not wait:
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open(url)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
@@ -57,7 +57,7 @@ from platformio.project.options import ProjectOptions
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("client_extra_args", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
project_dir,
|
||||
project_conf,
|
||||
@@ -86,7 +86,7 @@ def cli(
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
ctx, os.getcwd(), project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
configure_args = (
|
||||
@@ -106,12 +106,14 @@ def cli(
|
||||
else:
|
||||
debug_config = _configure(*configure_args)
|
||||
|
||||
_run(project_dir, debug_config, client_extra_args)
|
||||
_run(os.getcwd(), debug_config, client_extra_args)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _configure(ctx, project_config, env_name, load_mode, verbose, client_extra_args):
|
||||
def _configure(
|
||||
ctx, project_config, env_name, load_mode, verbose, client_extra_args
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
platform = PlatformFactory.from_env(env_name, autoinstall=True)
|
||||
debug_config = DebugConfigFactory.new(
|
||||
platform,
|
||||
@@ -165,7 +167,10 @@ def _configure(ctx, project_config, env_name, load_mode, verbose, client_extra_a
|
||||
|
||||
|
||||
def _run(project_dir, debug_config, client_extra_args):
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
try:
|
||||
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
client = GDBClientProcess(project_dir, debug_config)
|
||||
|
||||
@@ -148,7 +148,9 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
)
|
||||
|
||||
def _load_build_data(self):
|
||||
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
|
||||
data = load_build_metadata(
|
||||
os.getcwd(), self.env_name, cache=True, build_type="debug"
|
||||
)
|
||||
if not data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
return data
|
||||
@@ -194,9 +196,11 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=result.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
(
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
)
|
||||
for a in result.get("arguments", [])
|
||||
],
|
||||
)
|
||||
|
||||
@@ -76,7 +76,7 @@ def get_default_debug_env(config):
|
||||
|
||||
def predebug_project(
|
||||
ctx, project_dir, project_config, env_name, preload, verbose
|
||||
): # pylint: disable=too-many-arguments
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
debug_testname = project_config.get("env:" + env_name, "debug_test")
|
||||
if debug_testname:
|
||||
test_names = list_test_names(project_config)
|
||||
|
||||
@@ -12,20 +12,27 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from platformio.compat import is_proxy_set
|
||||
|
||||
PY36 = sys.version_info[0:2] == (3, 6)
|
||||
|
||||
def get_core_dependencies():
|
||||
return {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pioremote": "~1.0.0",
|
||||
"tool-scons": "~4.40801.0",
|
||||
"tool-cppcheck": "~1.21100.0",
|
||||
"tool-clangtidy": "~1.150005.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
}
|
||||
|
||||
|
||||
def get_pip_dependencies():
|
||||
core = [
|
||||
"bottle == 0.12.*",
|
||||
"click >=8.0.4, <9",
|
||||
"bottle == 0.13.*",
|
||||
"click >=8.0.4, <8.4", # click 9.0 removes 'protected_args' attribute
|
||||
"colorama",
|
||||
"marshmallow == 3.*",
|
||||
"pyelftools == 0.30",
|
||||
"pyelftools >=0.27, <1",
|
||||
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
|
||||
"requests%s == 2.*" % ("[socks]" if is_proxy_set(socks=True) else ""),
|
||||
"semantic_version == 2.10.*",
|
||||
@@ -35,16 +42,16 @@ def get_pip_dependencies():
|
||||
home = [
|
||||
# PIO Home requirements
|
||||
"ajsonrpc == 1.2.*",
|
||||
"starlette >=0.19, <0.36",
|
||||
"uvicorn %s" % ("== 0.16.0" if PY36 else ">=0.16, <0.26"),
|
||||
"starlette >=0.19, <0.51",
|
||||
"uvicorn >=0.16, <0.39",
|
||||
"wsproto == 1.*",
|
||||
]
|
||||
|
||||
extra = []
|
||||
|
||||
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||
if platform.system() == "Darwin" and "arm" in platform.machine().lower():
|
||||
extra.append("chardet>=3.0.2,<6")
|
||||
extra.append(
|
||||
'chardet >= 3.0.2,<6; platform_system == "Darwin" and "arm" in platform_machine'
|
||||
)
|
||||
|
||||
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
|
||||
try:
|
||||
@@ -60,12 +67,3 @@ def get_pip_dependencies():
|
||||
pass
|
||||
|
||||
return core + home + extra
|
||||
|
||||
|
||||
def is_proxy_set(socks=False):
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
|
||||
value = os.getenv(var, os.getenv(var.lower()))
|
||||
if not value or (socks and not value.startswith("socks5://")):
|
||||
continue
|
||||
return True
|
||||
return False
|
||||
@@ -89,7 +89,7 @@ def is_serial_port_ready(port, timeout=1):
|
||||
|
||||
|
||||
class SerialPortFinder:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
@@ -133,6 +133,10 @@ class SerialPortFinder:
|
||||
|
||||
def find(self, initial_port=None):
|
||||
if initial_port:
|
||||
# Treat any URL (contains '://') as a literal port
|
||||
if "://" in initial_port:
|
||||
return initial_port
|
||||
# Otherwise fall back to existing wildcard logic
|
||||
if not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
return self.match_serial_port(initial_port)
|
||||
|
||||
@@ -144,9 +144,9 @@ def list_mdns_services():
|
||||
if service.properties:
|
||||
try:
|
||||
properties = {
|
||||
k.decode("utf8"): v.decode("utf8")
|
||||
if isinstance(v, bytes)
|
||||
else v
|
||||
k.decode("utf8"): (
|
||||
v.decode("utf8") if isinstance(v, bytes) else v
|
||||
)
|
||||
for k, v in service.properties.items()
|
||||
}
|
||||
json.dumps(properties)
|
||||
|
||||
@@ -58,7 +58,7 @@ from platformio.project.options import ProjectOptions
|
||||
"--encoding",
|
||||
help=(
|
||||
"Set the encoding for the serial port "
|
||||
"(e.g. hexlify, Latin1, UTF-8) [default=%s]"
|
||||
"(e.g. hexlify, Latin-1, UTF-8) [default=%s]"
|
||||
% ProjectOptions["env.monitor_encoding"].default
|
||||
),
|
||||
)
|
||||
@@ -125,9 +125,11 @@ def device_monitor_cmd(**options):
|
||||
options = apply_project_monitor_options(options, project_options)
|
||||
register_filters(platform=platform, options=options)
|
||||
options["port"] = SerialPortFinder(
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
board_config=(
|
||||
platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None
|
||||
),
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
).find(initial_port=options["port"])
|
||||
|
||||
@@ -25,11 +25,12 @@ from platformio.project.config import ProjectConfig
|
||||
class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def __init__(self, options=None):
|
||||
"""Called by PlatformIO to pass context"""
|
||||
miniterm.Transform.__init__(self)
|
||||
super().__init__()
|
||||
|
||||
self.options = options or {}
|
||||
self.project_dir = self.options.get("project_dir")
|
||||
self.environment = self.options.get("environment")
|
||||
self._running_terminal = None
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
if not self.environment:
|
||||
@@ -47,6 +48,12 @@ class DeviceMonitorFilterBase(miniterm.Transform):
|
||||
def NAME(self):
|
||||
raise NotImplementedError("Please declare NAME attribute for the filter class")
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
self._running_terminal = terminal
|
||||
|
||||
def get_running_terminal(self):
|
||||
return self._running_terminal
|
||||
|
||||
|
||||
def register_filters(platform=None, options=None):
|
||||
# project filters
|
||||
|
||||
@@ -24,12 +24,18 @@ class Hexlify(DeviceMonitorFilterBase):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._counter = 0
|
||||
|
||||
def set_running_terminal(self, terminal):
|
||||
# force to Latin-1, issue #4732
|
||||
if terminal.input_encoding == "UTF-8":
|
||||
terminal.set_rx_encoding("Latin-1")
|
||||
super().set_running_terminal(terminal)
|
||||
|
||||
def rx(self, text):
|
||||
result = ""
|
||||
for b in serial.iterbytes(text):
|
||||
for c in serial.iterbytes(text):
|
||||
if (self._counter % 16) == 0:
|
||||
result += "\n{:04X} | ".format(self._counter)
|
||||
asciicode = ord(b)
|
||||
asciicode = ord(c)
|
||||
if asciicode <= 255:
|
||||
result += "{:02X} ".format(asciicode)
|
||||
else:
|
||||
|
||||
@@ -110,6 +110,12 @@ def new_terminal(options):
|
||||
term.raw = options["raw"]
|
||||
term.set_rx_encoding(options["encoding"])
|
||||
term.set_tx_encoding(options["encoding"])
|
||||
for ts in (term.tx_transformations, term.rx_transformations):
|
||||
for t in ts:
|
||||
try:
|
||||
t.set_running_terminal(term)
|
||||
except AttributeError:
|
||||
pass
|
||||
return term
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ import socket
|
||||
|
||||
import click
|
||||
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.compat import IS_WINDOWS, click_launch
|
||||
from platformio.home.run import run_server
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
|
||||
@@ -86,7 +86,7 @@ def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||
"PlatformIO Home server is already started in another process.", fg="yellow"
|
||||
)
|
||||
if not no_open:
|
||||
click.launch(home_url)
|
||||
click_launch(home_url)
|
||||
return
|
||||
|
||||
run_server(
|
||||
|
||||
@@ -18,11 +18,9 @@ import os
|
||||
import shutil
|
||||
from functools import cmp_to_key
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.compat import aio_to_thread
|
||||
from platformio.compat import aio_to_thread, click_launch
|
||||
from platformio.device.list.util import list_logical_devices
|
||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||
from platformio.http import HTTPSession, ensure_internet_on
|
||||
@@ -84,15 +82,15 @@ class OSRPC(BaseRPCHandler):
|
||||
|
||||
@staticmethod
|
||||
def open_url(url):
|
||||
return click.launch(url)
|
||||
return click_launch(url)
|
||||
|
||||
@staticmethod
|
||||
def reveal_file(path):
|
||||
return click.launch(path, locate=True)
|
||||
return click_launch(path, locate=True)
|
||||
|
||||
@staticmethod
|
||||
def open_file(path):
|
||||
return click.launch(path)
|
||||
return click_launch(path)
|
||||
|
||||
@staticmethod
|
||||
def call_path_module_func(name, args, **kwargs):
|
||||
|
||||
@@ -372,15 +372,19 @@ class ProjectRPC(BaseRPCHandler):
|
||||
|
||||
return dict(
|
||||
platform=dict(
|
||||
ownername=platform_pkg.metadata.spec.owner
|
||||
if platform_pkg.metadata.spec
|
||||
else None,
|
||||
ownername=(
|
||||
platform_pkg.metadata.spec.owner
|
||||
if platform_pkg.metadata.spec
|
||||
else None
|
||||
),
|
||||
name=platform.name,
|
||||
title=platform.title,
|
||||
version=str(platform_pkg.metadata.version),
|
||||
),
|
||||
board=platform.board_config(board_id).get_brief_data()
|
||||
if board_id
|
||||
else None,
|
||||
board=(
|
||||
platform.board_config(board_id).get_brief_data()
|
||||
if board_id
|
||||
else None
|
||||
),
|
||||
frameworks=frameworks or None,
|
||||
)
|
||||
|
||||
@@ -21,8 +21,8 @@ from urllib3.util.retry import Retry
|
||||
|
||||
from platformio import __check_internet_hosts__, app, util
|
||||
from platformio.cache import ContentCache, cleanup_content_cache
|
||||
from platformio.compat import is_proxy_set
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.pipdeps import is_proxy_set
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
@@ -63,9 +63,11 @@ class HTTPSession(requests.Session):
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
return super().request(
|
||||
method,
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url),
|
||||
(
|
||||
url
|
||||
if url.startswith("http") or not self._x_base_url
|
||||
else urljoin(self._x_base_url, url)
|
||||
),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
@@ -188,10 +190,11 @@ class HTTPClient:
|
||||
@util.memoized(expire="10s")
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
use_proxy = is_proxy_set()
|
||||
socket.setdefaulttimeout(timeout)
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
if is_proxy_set():
|
||||
if use_proxy:
|
||||
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
return True
|
||||
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
|
||||
@@ -201,6 +204,15 @@ def _internet_on():
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# falling back to HTTPs, issue #4980
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
requests.get("https://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
except requests.exceptions.RequestException:
|
||||
pass
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ def package_exec_cmd(obj, package, call, args):
|
||||
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
||||
|
||||
# inject current python interpreter on Windows
|
||||
if args[0].endswith(".py"):
|
||||
if args and args[0].endswith(".py"):
|
||||
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
||||
if not os.path.exists(args[1]):
|
||||
args[1] = where_is_program(args[1])
|
||||
|
||||
@@ -222,9 +222,11 @@ def _install_project_env_libraries(project_env, options):
|
||||
|
||||
env_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), project_env),
|
||||
compatibility=PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None,
|
||||
compatibility=(
|
||||
PackageCompatibility(**compatibility_qualifiers)
|
||||
if compatibility_qualifiers
|
||||
else None
|
||||
),
|
||||
)
|
||||
private_lm = LibraryPackageManager(
|
||||
os.path.join(config.get("platformio", "lib_dir"))
|
||||
@@ -295,7 +297,11 @@ def _install_project_private_library_deps(private_pkg, private_lm, env_lm, optio
|
||||
if not spec.external and not spec.owner:
|
||||
continue
|
||||
pkg = private_lm.get_package(spec)
|
||||
if not pkg and not env_lm.get_package(spec):
|
||||
if (
|
||||
not pkg
|
||||
and not private_lm.get_package(spec)
|
||||
and not env_lm.get_package(spec)
|
||||
):
|
||||
pkg = env_lm.install(
|
||||
spec,
|
||||
skip_dependencies=True,
|
||||
|
||||
@@ -82,10 +82,11 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
help="Do not show interactive prompt",
|
||||
hidden=True,
|
||||
)
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
|
||||
def package_publish_cmd( # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals
|
||||
package, owner, typex, released_at, private, notify, no_interactive, non_interactive
|
||||
):
|
||||
click.secho("Preparing a package...", fg="cyan")
|
||||
package = os.path.abspath(package)
|
||||
no_interactive = no_interactive or non_interactive
|
||||
owner = owner or AccountClient().get_logged_username()
|
||||
do_not_pack = (
|
||||
|
||||
@@ -65,10 +65,12 @@ def print_search_item(item):
|
||||
click.echo(
|
||||
"%s • %s • Published on %s"
|
||||
% (
|
||||
item["type"].capitalize()
|
||||
if item["tier"] == "community"
|
||||
else click.style(
|
||||
("%s %s" % (item["tier"], item["type"])).title(), bold=True
|
||||
(
|
||||
item["type"].capitalize()
|
||||
if item["tier"] == "community"
|
||||
else click.style(
|
||||
("%s %s" % (item["tier"], item["type"])).title(), bold=True
|
||||
)
|
||||
),
|
||||
item["version"]["name"],
|
||||
util.parse_datetime(item["version"]["released_at"]).strftime("%c"),
|
||||
|
||||
@@ -111,7 +111,7 @@ def uninstall_project_env_dependencies(project_env, options=None):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_tools(project_env, options)
|
||||
)
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_libraries(project_env, options)
|
||||
|
||||
@@ -110,7 +110,7 @@ def update_project_env_dependencies(project_env, options=None):
|
||||
# custom tools
|
||||
if options.get("tools"):
|
||||
updated_conds.append(_update_project_env_custom_tools(project_env, options))
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
updated_conds.append(_update_project_env_custom_libraries(project_env, options))
|
||||
# declared dependencies
|
||||
|
||||
@@ -34,7 +34,7 @@ class FileDownloader:
|
||||
url,
|
||||
stream=True,
|
||||
)
|
||||
if self._http_response.status_code != 200:
|
||||
if self._http_response.status_code not in (200, 203):
|
||||
raise PackageException(
|
||||
"Got the unrecognized status code '{0}' when downloaded {1}".format(
|
||||
self._http_response.status_code, url
|
||||
|
||||
@@ -98,9 +98,13 @@ class PackageManagerInstallMixin:
|
||||
else:
|
||||
pkg = self.install_from_registry(
|
||||
spec,
|
||||
search_qualifiers=compatibility.to_search_qualifiers()
|
||||
if compatibility
|
||||
else None,
|
||||
search_qualifiers=(
|
||||
compatibility.to_search_qualifiers(
|
||||
["platforms", "frameworks", "authors"]
|
||||
)
|
||||
if compatibility
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
if not pkg or not pkg.metadata:
|
||||
|
||||
@@ -280,11 +280,15 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
|
||||
# external "URL" mismatch
|
||||
if spec.external:
|
||||
# local folder mismatch
|
||||
if os.path.abspath(spec.uri) == os.path.abspath(pkg.path) or (
|
||||
# local/symlinked folder mismatch
|
||||
check_conds = [
|
||||
os.path.abspath(spec.uri) == os.path.abspath(pkg.path),
|
||||
spec.uri.startswith("file://")
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:])
|
||||
):
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:]),
|
||||
spec.uri.startswith("symlink://")
|
||||
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[10:]),
|
||||
]
|
||||
if any(check_conds):
|
||||
return True
|
||||
if spec.uri != pkg.metadata.spec.uri:
|
||||
return False
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from platformio import __core_packages__, exception
|
||||
from platformio import exception
|
||||
from platformio.dependencies import get_core_dependencies
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
@@ -23,7 +24,7 @@ from platformio.package.meta import PackageSpec
|
||||
def get_installed_core_packages():
|
||||
result = []
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
@@ -32,11 +33,11 @@ def get_installed_core_packages():
|
||||
|
||||
|
||||
def get_core_package_dir(name, spec=None, auto_install=True):
|
||||
if name not in __core_packages__:
|
||||
if name not in get_core_dependencies():
|
||||
raise exception.PlatformioException("Please upgrade PlatformIO Core")
|
||||
pm = ToolPackageManager()
|
||||
spec = spec or PackageSpec(
|
||||
owner="platformio", name=name, requirements=__core_packages__[name]
|
||||
owner="platformio", name=name, requirements=get_core_dependencies()[name]
|
||||
)
|
||||
pkg = pm.get_package(spec)
|
||||
if pkg:
|
||||
@@ -50,7 +51,7 @@ def get_core_package_dir(name, spec=None, auto_install=True):
|
||||
|
||||
def update_core_packages():
|
||||
pm = ToolPackageManager()
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
try:
|
||||
pm.update(spec, spec)
|
||||
@@ -65,7 +66,7 @@ def remove_unnecessary_core_packages(dry_run=False):
|
||||
pm = ToolPackageManager()
|
||||
best_pkg_versions = {}
|
||||
|
||||
for name, requirements in __core_packages__.items():
|
||||
for name, requirements in get_core_dependencies().items():
|
||||
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
|
||||
@@ -38,7 +38,7 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
|
||||
def manifest_names(self):
|
||||
return PackageType.get_manifest_map()[PackageType.PLATFORM]
|
||||
|
||||
def install( # pylint: disable=arguments-differ,too-many-arguments
|
||||
def install( # pylint: disable=arguments-differ,too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
spec,
|
||||
skip_dependencies=False,
|
||||
|
||||
@@ -294,9 +294,11 @@ class BaseManifestParser:
|
||||
if not matched_files:
|
||||
continue
|
||||
result[root] = dict(
|
||||
name="Examples"
|
||||
if root == examples_dir
|
||||
else os.path.relpath(root, examples_dir),
|
||||
name=(
|
||||
"Examples"
|
||||
if root == examples_dir
|
||||
else os.path.relpath(root, examples_dir)
|
||||
),
|
||||
base=os.path.relpath(root, package_dir),
|
||||
files=matched_files,
|
||||
)
|
||||
|
||||
@@ -276,7 +276,7 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.22"
|
||||
version = "3.27.0"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||
f"v{version}/json/licenses.json"
|
||||
|
||||
@@ -65,7 +65,14 @@ class PackageType:
|
||||
|
||||
|
||||
class PackageCompatibility:
|
||||
KNOWN_QUALIFIERS = ("platforms", "frameworks", "authors")
|
||||
KNOWN_QUALIFIERS = (
|
||||
"owner",
|
||||
"name",
|
||||
"version",
|
||||
"platforms",
|
||||
"frameworks",
|
||||
"authors",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_dependency(cls, dependency):
|
||||
@@ -89,19 +96,45 @@ class PackageCompatibility:
|
||||
def __repr__(self):
|
||||
return "PackageCompatibility <%s>" % self.qualifiers
|
||||
|
||||
def to_search_qualifiers(self):
|
||||
return self.qualifiers
|
||||
def to_search_qualifiers(self, fields=None):
|
||||
result = {}
|
||||
for name, value in self.qualifiers.items():
|
||||
if not fields or name in fields:
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
def is_compatible(self, other):
|
||||
assert isinstance(other, PackageCompatibility)
|
||||
for key, value in self.qualifiers.items():
|
||||
for key, current_value in self.qualifiers.items():
|
||||
other_value = other.qualifiers.get(key)
|
||||
if not value or not other_value:
|
||||
if not current_value or not other_value:
|
||||
continue
|
||||
if not items_in_list(value, other_value):
|
||||
if any(isinstance(v, list) for v in (current_value, other_value)):
|
||||
if not items_in_list(current_value, other_value):
|
||||
return False
|
||||
continue
|
||||
if key == "version":
|
||||
if not self._compare_versions(current_value, other_value):
|
||||
return False
|
||||
continue
|
||||
if current_value != other_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _compare_versions(self, current, other):
|
||||
if current == other:
|
||||
return True
|
||||
try:
|
||||
version = (
|
||||
other
|
||||
if isinstance(other, semantic_version.Version)
|
||||
else cast_version_to_semver(other)
|
||||
)
|
||||
return version in semantic_version.SimpleSpec(current)
|
||||
except ValueError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class PackageOutdatedResult:
|
||||
UPDATE_INCREMENT_MAJOR = "major"
|
||||
@@ -163,7 +196,7 @@ class PackageOutdatedResult:
|
||||
|
||||
|
||||
class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments,too-many-positional-arguments
|
||||
self, raw=None, owner=None, id=None, name=None, requirements=None, uri=None
|
||||
):
|
||||
self._requirements = None
|
||||
@@ -363,7 +396,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
parts.path.endswith(".git"),
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
parts.netloc in ("github.com", "gitlab.com", "bitbucket.com")
|
||||
and not parts.path.endswith((".zip", ".tar.gz")),
|
||||
and not parts.path.endswith((".zip", ".tar.gz", ".tar.xz")),
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
@@ -485,9 +518,11 @@ class PackageItem:
|
||||
|
||||
def __eq__(self, other):
|
||||
conds = [
|
||||
os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
if self.path and other.path
|
||||
else self.path == other.path,
|
||||
(
|
||||
os.path.realpath(self.path) == os.path.realpath(other.path)
|
||||
if self.path and other.path
|
||||
else self.path == other.path
|
||||
),
|
||||
self.metadata == other.metadata,
|
||||
]
|
||||
return all(conds)
|
||||
|
||||
@@ -49,6 +49,7 @@ class PackagePacker:
|
||||
"__*",
|
||||
".DS_Store",
|
||||
".vscode",
|
||||
"**/.vscode/",
|
||||
".cache",
|
||||
"**/.cache",
|
||||
"**/__pycache__",
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from tarfile import open as tarfile_open
|
||||
from time import mktime
|
||||
from zipfile import ZipFile
|
||||
@@ -82,19 +83,23 @@ class TARArchiver(BaseArchiver):
|
||||
).startswith(base)
|
||||
|
||||
def extract_item(self, item, dest_dir):
|
||||
if sys.version_info >= (3, 12):
|
||||
self._afo.extract(item, dest_dir, filter="data")
|
||||
return self.after_extract(item, dest_dir)
|
||||
|
||||
# apply custom security logic
|
||||
dest_dir = self.resolve_path(dest_dir)
|
||||
bad_conds = [
|
||||
self.is_bad_path(item.name, dest_dir),
|
||||
self.is_link(item) and self.is_bad_link(item, dest_dir),
|
||||
]
|
||||
if not any(bad_conds):
|
||||
super().extract_item(item, dest_dir)
|
||||
else:
|
||||
click.secho(
|
||||
if any(bad_conds):
|
||||
return click.secho(
|
||||
"Blocked insecure item `%s` from TAR archive" % item.name,
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
return super().extract_item(item, dest_dir)
|
||||
|
||||
|
||||
class ZIPArchiver(BaseArchiver):
|
||||
@@ -147,6 +152,7 @@ class FileUnpacker:
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\xfd\x37\x7a\x58\x5a\x00": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
|
||||
@@ -44,7 +44,7 @@ def cast_version_to_semver(value, force=True, raise_exception=False):
|
||||
|
||||
def pepver_to_semver(pepver):
|
||||
return cast_version_to_semver(
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, count=1)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ class PlatformRunMixin:
|
||||
value = json.loads(value)
|
||||
return value
|
||||
|
||||
def run( # pylint: disable=too-many-arguments
|
||||
def run( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self, variables, targets, silent, verbose, jobs
|
||||
):
|
||||
assert isinstance(variables, dict)
|
||||
@@ -116,9 +116,9 @@ class PlatformRunMixin:
|
||||
args,
|
||||
stdout=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stdout_line,
|
||||
data_callback=lambda data: None
|
||||
if self.silent
|
||||
else _write_and_flush(sys.stdout, data),
|
||||
data_callback=lambda data: (
|
||||
None if self.silent else _write_and_flush(sys.stdout, data)
|
||||
),
|
||||
),
|
||||
stderr=proc.BuildAsyncPipe(
|
||||
line_callback=self._on_stderr_line,
|
||||
|
||||
@@ -33,7 +33,7 @@ class PlatformFactory:
|
||||
|
||||
@staticmethod
|
||||
def load_platform_module(name, path):
|
||||
# backward compatibiility with the legacy dev-platforms
|
||||
# backward compatibility with the legacy dev-platforms
|
||||
sys.modules["platformio.managers.platform"] = base
|
||||
try:
|
||||
return load_python_module("platformio.platform.%s" % name, path)
|
||||
|
||||
@@ -82,9 +82,11 @@ def lint_configuration(json_output=False):
|
||||
(
|
||||
click.style(error["type"], fg="red"),
|
||||
error["message"],
|
||||
error.get("source", "") + (f":{error.get('lineno')}")
|
||||
if "lineno" in error
|
||||
else "",
|
||||
(
|
||||
error.get("source", "") + (f":{error.get('lineno')}")
|
||||
if "lineno" in error
|
||||
else ""
|
||||
),
|
||||
)
|
||||
for error in errors
|
||||
],
|
||||
|
||||
@@ -68,7 +68,7 @@ def validate_boards(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("--no-install-dependencies", is_flag=True)
|
||||
@click.option("--env-prefix", default="")
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
def project_init_cmd(
|
||||
def project_init_cmd( # pylint: disable=too-many-positional-arguments
|
||||
project_dir,
|
||||
boards,
|
||||
ide,
|
||||
@@ -79,6 +79,7 @@ def project_init_cmd(
|
||||
env_prefix,
|
||||
silent,
|
||||
):
|
||||
project_dir = os.path.abspath(project_dir)
|
||||
is_new_project = not is_platformio_project(project_dir)
|
||||
if is_new_project:
|
||||
if not silent:
|
||||
@@ -200,9 +201,7 @@ new version when next recompiled. The header file eliminates the labor of
|
||||
finding and changing all the copies as well as the risk that a failure to
|
||||
find one copy will result in inconsistencies within a program.
|
||||
|
||||
In C, the usual convention is to give header files names that end with `.h'.
|
||||
It is most portable to use only letters, digits, dashes, and underscores in
|
||||
header file names, and at most one dot.
|
||||
In C, the convention is to give header files names that end with `.h'.
|
||||
|
||||
Read more about using header files in official GCC documentation:
|
||||
|
||||
@@ -221,12 +220,12 @@ def init_lib_readme(lib_dir):
|
||||
fp.write(
|
||||
"""
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
PlatformIO will compile them to static libraries and link into the executable file.
|
||||
|
||||
The source code of each library should be placed in a an own separate directory
|
||||
("lib/your_library_name/[here are source files]").
|
||||
The source code of each library should be placed in a separate directory
|
||||
("lib/your_library_name/[Code]").
|
||||
|
||||
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
For example, see the structure of the following example libraries `Foo` and `Bar`:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
@@ -236,7 +235,7 @@ For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| | |- library.json (optional. for custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
@@ -248,7 +247,7 @@ For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
|--src
|
||||
|- main.c
|
||||
|
||||
and a contents of `src/main.c`:
|
||||
Example contents of `src/main.c` using Foo and Bar:
|
||||
```
|
||||
#include <Foo.h>
|
||||
#include <Bar.h>
|
||||
@@ -260,8 +259,8 @@ int main (void)
|
||||
|
||||
```
|
||||
|
||||
PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries scanning project source files.
|
||||
The PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries by scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
|
||||
@@ -347,7 +347,7 @@ class ProjectConfigBase:
|
||||
if section is None:
|
||||
if option in self.BUILTIN_VARS:
|
||||
return self.BUILTIN_VARS[option]()
|
||||
# SCons varaibles
|
||||
# SCons variables
|
||||
return f"${{{option}}}"
|
||||
|
||||
# handle system environment variables
|
||||
|
||||
@@ -131,45 +131,47 @@ def compute_project_checksum(config):
|
||||
return checksum.hexdigest()
|
||||
|
||||
|
||||
def load_build_metadata(project_dir, env_or_envs, cache=False, debug=False):
|
||||
def load_build_metadata(project_dir, env_or_envs, cache=False, build_type=None):
|
||||
assert env_or_envs
|
||||
env_names = env_or_envs
|
||||
if not isinstance(env_names, list):
|
||||
env_names = [env_names]
|
||||
|
||||
with fs.cd(project_dir):
|
||||
result = _get_cached_build_metadata(project_dir, env_names) if cache else {}
|
||||
result = _get_cached_build_metadata(env_names) if cache else {}
|
||||
# incompatible build-type data
|
||||
for name in list(result.keys()):
|
||||
build_type = result[name].get("build_type", "")
|
||||
outdated_conds = [
|
||||
not build_type,
|
||||
debug and "debug" not in build_type,
|
||||
not debug and "debug" in build_type,
|
||||
]
|
||||
if any(outdated_conds):
|
||||
del result[name]
|
||||
for env_name in list(result.keys()):
|
||||
if build_type is None:
|
||||
build_type = ProjectConfig.get_instance().get(
|
||||
f"env:{env_name}", "build_type"
|
||||
)
|
||||
if result[env_name].get("build_type", "") != build_type:
|
||||
del result[env_name]
|
||||
missed_env_names = set(env_names) - set(result.keys())
|
||||
if missed_env_names:
|
||||
result.update(_load_build_metadata(project_dir, missed_env_names, debug))
|
||||
result.update(
|
||||
_load_build_metadata(project_dir, missed_env_names, build_type)
|
||||
)
|
||||
|
||||
if not isinstance(env_or_envs, list) and env_or_envs in result:
|
||||
return result[env_or_envs]
|
||||
return result or None
|
||||
|
||||
|
||||
# Backward compatibiility with dev-platforms
|
||||
# Backward compatibility with dev-platforms
|
||||
load_project_ide_data = load_build_metadata
|
||||
|
||||
|
||||
def _load_build_metadata(project_dir, env_names, debug=False):
|
||||
def _load_build_metadata(project_dir, env_names, build_type=None):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio import app
|
||||
from platformio.run.cli import cli as cmd_run
|
||||
|
||||
args = ["--project-dir", project_dir, "--target", "__idedata"]
|
||||
if debug:
|
||||
if build_type == "debug":
|
||||
args.extend(["--target", "__debug"])
|
||||
# if build_type == "test":
|
||||
# args.extend(["--target", "__test"])
|
||||
for name in env_names:
|
||||
args.extend(["-e", name])
|
||||
app.set_session_var("pause_telemetry", True)
|
||||
@@ -181,16 +183,16 @@ def _load_build_metadata(project_dir, env_names, debug=False):
|
||||
raise result.exception
|
||||
if '"includes":' not in result.output:
|
||||
raise exception.UserSideException(result.output)
|
||||
return _get_cached_build_metadata(project_dir, env_names)
|
||||
return _get_cached_build_metadata(env_names)
|
||||
|
||||
|
||||
def _get_cached_build_metadata(project_dir, env_names):
|
||||
build_dir = ProjectConfig.get_instance(
|
||||
os.path.join(project_dir, "platformio.ini")
|
||||
).get("platformio", "build_dir")
|
||||
def _get_cached_build_metadata(env_names):
|
||||
build_dir = ProjectConfig.get_instance().get("platformio", "build_dir")
|
||||
result = {}
|
||||
for name in env_names:
|
||||
if not os.path.isfile(os.path.join(build_dir, name, "idedata.json")):
|
||||
for env_name in env_names:
|
||||
if not os.path.isfile(os.path.join(build_dir, env_name, "idedata.json")):
|
||||
continue
|
||||
result[name] = fs.load_json(os.path.join(build_dir, name, "idedata.json"))
|
||||
result[env_name] = fs.load_json(
|
||||
os.path.join(build_dir, env_name, "idedata.json")
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -91,9 +91,11 @@ class ProjectGenerator:
|
||||
"default_debug_env_name": get_default_debug_env(self.config),
|
||||
"env_name": self.env_name,
|
||||
"user_home_dir": os.path.abspath(fs.expanduser("~")),
|
||||
"platformio_path": sys.argv[0]
|
||||
if os.path.isfile(sys.argv[0])
|
||||
else where_is_program("platformio"),
|
||||
"platformio_path": (
|
||||
sys.argv[0]
|
||||
if os.path.isfile(sys.argv[0])
|
||||
else where_is_program("platformio")
|
||||
),
|
||||
"env_path": os.getenv("PATH"),
|
||||
"env_pathsep": os.pathsep,
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
# common.symbolFiles=<Symbol Files loaded by debugger>
|
||||
# (This value is overwritten by a launcher specific symbolFiles value if the latter exists)
|
||||
#
|
||||
# In runDir, symbolFiles and env fields you can use these macroses:
|
||||
# In runDir, symbolFiles and env fields you can use these macros:
|
||||
# ${PROJECT_DIR} - project directory absolute path
|
||||
# ${OUTPUT_PATH} - linker output path (relative to project directory path)
|
||||
# ${OUTPUT_BASENAME}- linker output filename
|
||||
|
||||
@@ -23,7 +23,7 @@ from platformio import fs
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class ConfigOption: # pylint: disable=too-many-instance-attributes
|
||||
class ConfigOption: # pylint: disable=too-many-instance-attributes,too-many-positional-arguments
|
||||
def __init__(
|
||||
self,
|
||||
scope,
|
||||
@@ -549,7 +549,7 @@ ProjectOptions = OrderedDict(
|
||||
ConfigEnvOption(
|
||||
group="monitor",
|
||||
name="monitor_encoding",
|
||||
description="Custom encoding (e.g. hexlify, Latin1, UTF-8)",
|
||||
description="Custom encoding (e.g. hexlify, Latin-1, UTF-8)",
|
||||
default="UTF-8",
|
||||
),
|
||||
# Library
|
||||
|
||||
@@ -23,7 +23,7 @@ from platformio.project.helpers import get_project_watch_lib_dirs, load_build_me
|
||||
from platformio.project.options import get_config_options_schema
|
||||
from platformio.test.result import TestCase, TestCaseSource, TestStatus
|
||||
from platformio.test.runners.base import TestRunnerBase
|
||||
from platformio.test.runners.doctest import DoctestTestCaseParser
|
||||
from platformio.test.runners.doctest import DoctestTestRunner
|
||||
from platformio.test.runners.googletest import GoogletestTestRunner
|
||||
from platformio.test.runners.unity import UnityTestRunner
|
||||
from platformio.util import get_systype
|
||||
|
||||
@@ -41,9 +41,11 @@ def access_list_cmd(owner, urn_type, json_output): # pylint: disable=unused-arg
|
||||
table_data.append(
|
||||
(
|
||||
"Access:",
|
||||
click.style("Private", fg="red")
|
||||
if resource.get("private", False)
|
||||
else "Public",
|
||||
(
|
||||
click.style("Private", fg="red")
|
||||
if resource.get("private", False)
|
||||
else "Public"
|
||||
),
|
||||
)
|
||||
)
|
||||
table_data.append(
|
||||
|
||||
@@ -42,7 +42,7 @@ class RegistryClient(HTTPClient):
|
||||
pass
|
||||
return False
|
||||
|
||||
def publish_package( # pylint: disable=redefined-builtin
|
||||
def publish_package( # pylint: disable=redefined-builtin, too-many-positional-arguments
|
||||
self, owner, type, archive_path, released_at=None, private=False, notify=True
|
||||
):
|
||||
with open(archive_path, "rb") as fp:
|
||||
@@ -64,7 +64,7 @@ class RegistryClient(HTTPClient):
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def unpublish_package( # pylint: disable=redefined-builtin
|
||||
def unpublish_package( # pylint: disable=redefined-builtin, too-many-positional-arguments
|
||||
self, owner, type, name, version=None, undo=False
|
||||
):
|
||||
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
||||
@@ -142,7 +142,9 @@ class RegistryClient(HTTPClient):
|
||||
x_with_authorization=self.allowed_private_packages(),
|
||||
)
|
||||
|
||||
def get_package(self, typex, owner, name, version=None, extra_path=None):
|
||||
def get_package(
|
||||
self, typex, owner, name, version=None, extra_path=None
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
try:
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
|
||||
@@ -54,9 +54,11 @@ class RegistryFileMirrorIterator:
|
||||
"head",
|
||||
self._url_parts.path,
|
||||
allow_redirects=False,
|
||||
params=dict(bypass=",".join(self._visited_mirrors))
|
||||
if self._visited_mirrors
|
||||
else None,
|
||||
params=(
|
||||
dict(bypass=",".join(self._visited_mirrors))
|
||||
if self._visited_mirrors
|
||||
else None
|
||||
),
|
||||
x_with_authorization=RegistryClient.allowed_private_packages(),
|
||||
)
|
||||
stop_conditions = [
|
||||
|
||||
@@ -110,7 +110,7 @@ def remote_update(agents, only_check, dry_run):
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def remote_run(
|
||||
def remote_run( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
agents,
|
||||
environment,
|
||||
@@ -198,7 +198,7 @@ def remote_run(
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.pass_obj
|
||||
@click.pass_context
|
||||
def remote_test( # pylint: disable=redefined-builtin
|
||||
def remote_test( # pylint: disable=redefined-builtin,too-many-positional-arguments
|
||||
ctx,
|
||||
agents,
|
||||
environment,
|
||||
|
||||
@@ -123,9 +123,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
||||
index=i + 1,
|
||||
host=device[0] + ":" if len(result) > 1 else "",
|
||||
port=device[1]["port"],
|
||||
description=device[1]["description"]
|
||||
if device[1]["description"] != "n/a"
|
||||
else "",
|
||||
description=(
|
||||
device[1]["description"]
|
||||
if device[1]["description"] != "n/a"
|
||||
else ""
|
||||
),
|
||||
)
|
||||
)
|
||||
device_index = click.prompt(
|
||||
|
||||
@@ -239,7 +239,7 @@ class RunOrTestClient(AsyncClientBase):
|
||||
except (AttributeError, pb.DeadReferenceError):
|
||||
self.disconnect(exit_code=1)
|
||||
|
||||
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments
|
||||
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self, result, agent_id, ac_id, dbindex, fileobj
|
||||
):
|
||||
result = PROJECT_SYNC_STAGE.lookupByValue(result)
|
||||
|
||||
@@ -30,7 +30,7 @@ class SSLContextFactory(ssl.ClientContextFactory):
|
||||
ctx.load_verify_locations(certifi.where())
|
||||
return ctx
|
||||
|
||||
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments
|
||||
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments,too-many-positional-arguments
|
||||
self, connection, x509, errno, depth, status
|
||||
):
|
||||
cn = x509.get_subject().commonName
|
||||
|
||||
@@ -33,9 +33,11 @@ from platformio.test.runners.base import CTX_META_TEST_IS_RUNNING
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
try:
|
||||
DEFAULT_JOB_NUMS = cpu_count()
|
||||
SYSTEM_CPU_COUNT = cpu_count()
|
||||
except NotImplementedError:
|
||||
DEFAULT_JOB_NUMS = 1
|
||||
SYSTEM_CPU_COUNT = 1
|
||||
|
||||
DEFAULT_JOB_NUMS = int(os.getenv("PLATFORMIO_RUN_JOBS", SYSTEM_CPU_COUNT))
|
||||
|
||||
|
||||
@click.command("run", short_help="Run project targets (build, upload, clean, etc.)")
|
||||
@@ -76,7 +78,7 @@ except NotImplementedError:
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
environment,
|
||||
target,
|
||||
@@ -174,7 +176,7 @@ def cli(
|
||||
return True
|
||||
|
||||
|
||||
def process_env(
|
||||
def process_env( # pylint: disable=too-many-positional-arguments
|
||||
ctx,
|
||||
name,
|
||||
config,
|
||||
|
||||
@@ -22,7 +22,7 @@ from platformio.test.runners.base import CTX_META_TEST_RUNNING_NAME
|
||||
|
||||
|
||||
class EnvironmentProcessor:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
cmd_ctx,
|
||||
name,
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
@@ -79,6 +80,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
help="A program argument (multiple are allowed)",
|
||||
)
|
||||
@click.option("--list-tests", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option("--json-output-path", type=click.Path())
|
||||
@click.option("--junit-output-path", type=click.Path())
|
||||
@click.option(
|
||||
@@ -88,7 +90,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
help="Increase verbosity level, maximum is 3 levels (-vvv), see docs for details",
|
||||
)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals,redefined-builtin
|
||||
ctx,
|
||||
environment,
|
||||
ignore,
|
||||
@@ -105,6 +107,7 @@ def cli( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
monitor_dtr,
|
||||
program_args,
|
||||
list_tests,
|
||||
json_output,
|
||||
json_output_path,
|
||||
junit_output_path,
|
||||
verbose,
|
||||
@@ -156,6 +159,7 @@ def cli( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
|
||||
stdout_report.generate(verbose=verbose or list_tests)
|
||||
|
||||
for output_format, output_path in [
|
||||
("json", subprocess.STDOUT if json_output else None),
|
||||
("json", json_output_path),
|
||||
("junit", junit_output_path),
|
||||
]:
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
|
||||
@@ -24,6 +25,9 @@ from platformio.test.result import TestStatus
|
||||
|
||||
class JsonTestReport(TestReportBase):
|
||||
def generate(self, output_path, verbose=False):
|
||||
if output_path == subprocess.STDOUT:
|
||||
return click.echo("\n\n" + json.dumps(self.to_json()))
|
||||
|
||||
if os.path.isdir(output_path):
|
||||
output_path = os.path.join(
|
||||
output_path,
|
||||
@@ -40,6 +44,8 @@ class JsonTestReport(TestReportBase):
|
||||
if verbose:
|
||||
click.secho(f"Saved JSON report to the {output_path}", fg="green")
|
||||
|
||||
return True
|
||||
|
||||
def to_json(self):
|
||||
result = dict(
|
||||
version="1.0",
|
||||
@@ -62,11 +68,13 @@ class JsonTestReport(TestReportBase):
|
||||
test_dir=test_suite.test_dir,
|
||||
status=test_suite.status.name,
|
||||
duration=test_suite.duration,
|
||||
timestamp=datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
|
||||
"%Y-%m-%dT%H:%M:%S"
|
||||
)
|
||||
if test_suite.timestamp
|
||||
else None,
|
||||
timestamp=(
|
||||
datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
|
||||
"%Y-%m-%dT%H:%M:%S"
|
||||
)
|
||||
if test_suite.timestamp
|
||||
else None
|
||||
),
|
||||
testcase_nums=len(test_suite.cases),
|
||||
error_nums=test_suite.get_status_nums(TestStatus.ERRORED),
|
||||
failure_nums=test_suite.get_status_nums(TestStatus.FAILED),
|
||||
|
||||
@@ -55,7 +55,7 @@ class TestCaseSource:
|
||||
|
||||
|
||||
class TestCase:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
name,
|
||||
status,
|
||||
|
||||
@@ -26,7 +26,7 @@ CTX_META_TEST_RUNNING_NAME = __name__ + ".test_running_name"
|
||||
|
||||
|
||||
class TestRunnerOptions: # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
self,
|
||||
verbose=0,
|
||||
without_building=False,
|
||||
|
||||
@@ -101,7 +101,7 @@ class DoctestTestCaseParser:
|
||||
|
||||
|
||||
class DoctestTestRunner(TestRunnerBase):
|
||||
EXTRA_LIB_DEPS = ["doctest/doctest@^2.4.9"]
|
||||
EXTRA_LIB_DEPS = ["doctest/doctest@^2.4.11"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@@ -88,7 +88,7 @@ class GoogletestTestCaseParser:
|
||||
|
||||
|
||||
class GoogletestTestRunner(TestRunnerBase):
|
||||
EXTRA_LIB_DEPS = ["google/googletest@^1.12.1"]
|
||||
EXTRA_LIB_DEPS = ["google/googletest@^1.15.2"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@@ -26,7 +26,7 @@ from platformio.util import strip_ansi_codes
|
||||
|
||||
|
||||
class UnityTestRunner(TestRunnerBase):
|
||||
EXTRA_LIB_DEPS = ["throwtheswitch/Unity@^2.5.2"]
|
||||
EXTRA_LIB_DEPS = ["throwtheswitch/Unity@^2.6.0"]
|
||||
|
||||
# Examples:
|
||||
# test/test_foo.cpp:44:test_function_foo:FAIL: Expected 32 Was 33
|
||||
@@ -184,10 +184,6 @@ void unityOutputComplete(void) { unittest_uart_end(); }
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Delete when Unity > 2.5.2 is released"""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_unity_framework_config(self):
|
||||
if not self.platform.is_embedded():
|
||||
return self.UNITY_FRAMEWORK_CONFIG["native"]
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import datetime
|
||||
import functools
|
||||
import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
@@ -64,16 +65,16 @@ class memoized:
|
||||
|
||||
|
||||
class throttle:
|
||||
def __init__(self, threshhold):
|
||||
self.threshhold = threshhold # milliseconds
|
||||
def __init__(self, threshold):
|
||||
self.threshold = threshold # milliseconds
|
||||
self.last = 0
|
||||
|
||||
def __call__(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
diff = int(round((time.time() - self.last) * 1000))
|
||||
if diff < self.threshhold:
|
||||
time.sleep((self.threshhold - diff) * 0.001)
|
||||
if diff < self.threshold:
|
||||
time.sleep((self.threshold - diff) * 0.001)
|
||||
self.last = time.time()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
@@ -136,6 +137,11 @@ def singleton(cls):
|
||||
|
||||
|
||||
def get_systype():
|
||||
# allow manual override, eg. for
|
||||
# windows on arm64 systems with emulated x86
|
||||
if "PLATFORMIO_SYSTEM_TYPE" in os.environ:
|
||||
return os.environ.get("PLATFORMIO_SYSTEM_TYPE")
|
||||
|
||||
system = platform.system().lower()
|
||||
arch = platform.machine().lower()
|
||||
if system == "windows":
|
||||
|
||||
@@ -357,6 +357,8 @@ Packages
|
||||
- Description"""
|
||||
)
|
||||
for name, options in dict(sorted(packages.items())).items():
|
||||
if name == "toolchain-gccarmnoneeab": # aceinna typo fix
|
||||
name = name + "i"
|
||||
package = REGCLIENT.get_package(
|
||||
"tool", options.get("owner", "platformio"), name
|
||||
)
|
||||
@@ -411,6 +413,7 @@ Packages
|
||||
|
||||
|
||||
def generate_platform(pkg, rst_dir):
|
||||
owner = pkg.metadata.spec.owner
|
||||
name = pkg.metadata.name
|
||||
print("Processing platform: %s" % name)
|
||||
|
||||
@@ -426,9 +429,9 @@ def generate_platform(pkg, rst_dir):
|
||||
p = PlatformFactory.new(name)
|
||||
assert p.repository_url.endswith(".git")
|
||||
github_url = p.repository_url[:-4]
|
||||
registry_url = reg_package_url("platform", pkg.metadata.spec.owner, name)
|
||||
registry_url = reg_package_url("platform", owner, name)
|
||||
|
||||
lines.append(".. _platform_%s:" % p.name)
|
||||
lines.append(".. _platform_%s:" % name)
|
||||
lines.append("")
|
||||
|
||||
lines.append(p.title)
|
||||
@@ -437,7 +440,7 @@ def generate_platform(pkg, rst_dir):
|
||||
lines.append(":Registry:")
|
||||
lines.append(" `%s <%s>`__" % (registry_url, registry_url))
|
||||
lines.append(":Configuration:")
|
||||
lines.append(" :ref:`projectconf_env_platform` = ``%s``" % p.name)
|
||||
lines.append(" :ref:`projectconf_env_platform` = ``%s/%s``" % (owner, name))
|
||||
lines.append("")
|
||||
lines.append(p.description)
|
||||
lines.append(
|
||||
|
||||
2
setup.py
2
setup.py
@@ -23,7 +23,7 @@ from platformio import (
|
||||
__url__,
|
||||
__version__,
|
||||
)
|
||||
from platformio.pipdeps import get_pip_dependencies
|
||||
from platformio.dependencies import get_pip_dependencies
|
||||
|
||||
setup(
|
||||
name=__title__,
|
||||
|
||||
@@ -18,7 +18,8 @@ import os
|
||||
|
||||
import pytest
|
||||
|
||||
from platformio import __core_packages__, fs
|
||||
from platformio import fs
|
||||
from platformio.dependencies import get_core_dependencies
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
@@ -30,7 +31,7 @@ PROJECT_CONFIG_TPL = """
|
||||
[env]
|
||||
platform = platformio/atmelavr@^3.4.0
|
||||
lib_deps =
|
||||
milesburton/DallasTemperature@^3.9.1
|
||||
milesburton/DallasTemperature@^4.0.4
|
||||
https://github.com/esphome/ESPAsyncWebServer/archive/refs/tags/v2.1.0.zip
|
||||
|
||||
[env:baremetal]
|
||||
@@ -177,7 +178,7 @@ def test_baremetal_project(
|
||||
),
|
||||
]
|
||||
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
|
||||
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
|
||||
PackageSpec("tool-scons@%s" % get_core_dependencies()["tool-scons"][1:]),
|
||||
PackageSpec("toolchain-atmelavr@1.70300.191015"),
|
||||
]
|
||||
|
||||
@@ -210,11 +211,11 @@ def test_project(
|
||||
]
|
||||
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
|
||||
PackageSpec("framework-arduino-avr-attiny@1.5.2"),
|
||||
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
|
||||
PackageSpec("tool-scons@%s" % get_core_dependencies()["tool-scons"][1:]),
|
||||
PackageSpec("toolchain-atmelavr@1.70300.191015"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.9.1",
|
||||
"milesburton/DallasTemperature@^4.0.4",
|
||||
"https://github.com/esphome/ESPAsyncWebServer/archive/refs/tags/v2.1.0.zip",
|
||||
]
|
||||
|
||||
@@ -240,7 +241,7 @@ def test_private_lib_deps(
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"bblanchon/ArduinoJson": "^5",
|
||||
"milesburton/DallasTemperature": "^3.9.1"
|
||||
"milesburton/DallasTemperature": "^4.0.4"
|
||||
}
|
||||
}
|
||||
"""
|
||||
@@ -339,7 +340,7 @@ def test_remove_project_unused_libdeps(
|
||||
),
|
||||
]
|
||||
|
||||
# manually remove from cofiguration file
|
||||
# manually remove from configuration file
|
||||
config.set("env:baremetal", "lib_deps", ["bblanchon/ArduinoJson@^5"])
|
||||
config.save()
|
||||
result = clirunner.invoke(
|
||||
@@ -445,7 +446,7 @@ def test_custom_project_libraries(
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec("ArduinoJson@5.13.4"),
|
||||
PackageSpec("Nanopb@0.4.8"),
|
||||
PackageSpec("Nanopb@0.4.91"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"bblanchon/ArduinoJson@^5",
|
||||
|
||||
@@ -24,7 +24,7 @@ PROJECT_OUTDATED_CONFIG_TPL = """
|
||||
platform = platformio/atmelavr@^2
|
||||
framework = arduino
|
||||
board = attiny88
|
||||
lib_deps = milesburton/DallasTemperature@~3.8.0
|
||||
lib_deps = milesburton/DallasTemperature@~3.9.0
|
||||
"""
|
||||
|
||||
PROJECT_UPDATED_CONFIG_TPL = """
|
||||
@@ -32,7 +32,7 @@ PROJECT_UPDATED_CONFIG_TPL = """
|
||||
platform = platformio/atmelavr@<4
|
||||
framework = arduino
|
||||
board = attiny88
|
||||
lib_deps = milesburton/DallasTemperature@^3.8.0
|
||||
lib_deps = milesburton/DallasTemperature@^3.9.0
|
||||
"""
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path):
|
||||
re.MULTILINE,
|
||||
)
|
||||
assert re.search(
|
||||
r"^DallasTemperature\s+3\.8\.1\s+3\.\d+\.\d+\s+3\.\d+\.\d+\s+Library\s+devkit",
|
||||
r"^DallasTemperature\s+3\.\d\.1\s+3\.\d+\.\d+\s+4\.\d+\.\d+\s+Library\s+devkit",
|
||||
result.output,
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
||||
@@ -58,11 +58,14 @@ def test_global_packages(
|
||||
validate_cliresult(result)
|
||||
assert pkgs_to_names(LibraryPackageManager().get_installed()) == [
|
||||
"ArduinoJson",
|
||||
"Async TCP",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"AsyncTCP_RP2040W",
|
||||
"Bounce2",
|
||||
"ESP Async WebServer",
|
||||
"ESPAsyncTCP",
|
||||
"ESPAsyncTCP-esphome",
|
||||
"Homie",
|
||||
]
|
||||
# uninstall all deps
|
||||
@@ -95,11 +98,14 @@ def test_global_packages(
|
||||
validate_cliresult(result)
|
||||
assert pkgs_to_names(LibraryPackageManager().get_installed()) == [
|
||||
"ArduinoJson",
|
||||
"Async TCP",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"AsyncTCP_RP2040W",
|
||||
"Bounce2",
|
||||
"ESP Async WebServer",
|
||||
"ESPAsyncTCP",
|
||||
"ESPAsyncTCP-esphome",
|
||||
]
|
||||
# remove specific dependency
|
||||
result = clirunner.invoke(
|
||||
@@ -114,7 +120,9 @@ def test_global_packages(
|
||||
assert pkgs_to_names(LibraryPackageManager().get_installed()) == [
|
||||
"ArduinoJson",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"Bounce2",
|
||||
"ESPAsyncTCP",
|
||||
]
|
||||
|
||||
# custom storage
|
||||
|
||||
@@ -16,7 +16,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from platformio import __core_packages__, fs
|
||||
from platformio import fs
|
||||
from platformio.dependencies import get_core_dependencies
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
@@ -26,12 +27,14 @@ from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
DALLASTEMPERATURE_LATEST_VERSION = "3.11.0"
|
||||
|
||||
PROJECT_OUTDATED_CONFIG_TPL = """
|
||||
[env:devkit]
|
||||
platform = platformio/atmelavr@^2
|
||||
framework = arduino
|
||||
board = attiny88
|
||||
lib_deps = milesburton/DallasTemperature@~3.8.0
|
||||
lib_deps = milesburton/DallasTemperature@^3.9.1
|
||||
"""
|
||||
|
||||
PROJECT_UPDATED_CONFIG_TPL = """
|
||||
@@ -39,7 +42,7 @@ PROJECT_UPDATED_CONFIG_TPL = """
|
||||
platform = platformio/atmelavr@<4
|
||||
framework = arduino
|
||||
board = attiny88
|
||||
lib_deps = milesburton/DallasTemperature@^3.8.0
|
||||
lib_deps = milesburton/DallasTemperature@^3.9.1
|
||||
"""
|
||||
|
||||
|
||||
@@ -162,7 +165,7 @@ def test_project(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), "devkit")
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec("DallasTemperature@3.8.1"),
|
||||
PackageSpec(f"DallasTemperature@{DALLASTEMPERATURE_LATEST_VERSION}"),
|
||||
PackageSpec(
|
||||
"OneWire@%s" % get_pkg_latest_version("paulstoffregen/OneWire")
|
||||
),
|
||||
@@ -172,11 +175,11 @@ def test_project(
|
||||
]
|
||||
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
|
||||
PackageSpec("framework-arduino-avr-attiny@1.3.2"),
|
||||
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
|
||||
PackageSpec("tool-scons@%s" % get_core_dependencies()["tool-scons"][1:]),
|
||||
PackageSpec("toolchain-atmelavr@1.50400.190710"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@~3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
|
||||
# update packages
|
||||
@@ -192,22 +195,19 @@ def test_project(
|
||||
assert pkgs[0].metadata.name == "atmelavr"
|
||||
assert pkgs[0].metadata.version.major == 3
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec(
|
||||
"DallasTemperature@%s"
|
||||
% get_pkg_latest_version("milesburton/DallasTemperature")
|
||||
),
|
||||
PackageSpec("DallasTemperature@3.11.0"),
|
||||
PackageSpec(
|
||||
"OneWire@%s" % get_pkg_latest_version("paulstoffregen/OneWire")
|
||||
),
|
||||
]
|
||||
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
|
||||
PackageSpec("framework-arduino-avr-attiny@1.3.2"),
|
||||
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
|
||||
PackageSpec("tool-scons@%s" % get_core_dependencies()["tool-scons"][1:]),
|
||||
PackageSpec("toolchain-atmelavr@1.70300.191015"),
|
||||
PackageSpec("toolchain-atmelavr@1.50400.190710"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
|
||||
# update again
|
||||
@@ -227,7 +227,7 @@ def test_custom_project_libraries(
|
||||
project_dir = tmp_path / "project"
|
||||
project_dir.mkdir()
|
||||
(project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL)
|
||||
spec = "milesburton/DallasTemperature@~3.8.0"
|
||||
spec = "milesburton/DallasTemperature@^3.9.1"
|
||||
result = clirunner.invoke(
|
||||
package_install_cmd,
|
||||
["-d", str(project_dir), "-e", "devkit", "-l", spec],
|
||||
@@ -240,7 +240,7 @@ def test_custom_project_libraries(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), "devkit")
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec("DallasTemperature@3.8.1"),
|
||||
PackageSpec(f"DallasTemperature@{DALLASTEMPERATURE_LATEST_VERSION}"),
|
||||
PackageSpec(
|
||||
"OneWire@%s" % get_pkg_latest_version("paulstoffregen/OneWire")
|
||||
),
|
||||
@@ -248,15 +248,15 @@ def test_custom_project_libraries(
|
||||
# update package
|
||||
result = clirunner.invoke(
|
||||
package_update_cmd,
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.8.0"],
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.9.1"],
|
||||
)
|
||||
assert ProjectConfig().get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
# try again
|
||||
result = clirunner.invoke(
|
||||
package_update_cmd,
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.8.0"],
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.9.1"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Already up-to-date." in result.output
|
||||
@@ -273,16 +273,13 @@ def test_custom_project_libraries(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), "devkit")
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec(
|
||||
"DallasTemperature@%s"
|
||||
% get_pkg_latest_version("milesburton/DallasTemperature")
|
||||
),
|
||||
PackageSpec("DallasTemperature@3.11.0"),
|
||||
PackageSpec(
|
||||
"OneWire@%s" % get_pkg_latest_version("paulstoffregen/OneWire")
|
||||
),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
|
||||
# unknown libraries
|
||||
|
||||
@@ -803,3 +803,49 @@ check_src_filters =
|
||||
assert errors + warnings + style == EXPECTED_DEFECTS
|
||||
assert "test.cpp" in result.output
|
||||
assert "main.cpp" not in result.output
|
||||
|
||||
|
||||
def test_check_sources_in_project_root(clirunner, validate_cliresult, tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("project")
|
||||
|
||||
config = (
|
||||
"""
|
||||
[platformio]
|
||||
src_dir = ./
|
||||
"""
|
||||
+ DEFAULT_CONFIG
|
||||
)
|
||||
tmpdir.join("platformio.ini").write(config)
|
||||
tmpdir.join("main.cpp").write(TEST_CODE)
|
||||
tmpdir.mkdir("spi").join("uart.cpp").write(TEST_CODE)
|
||||
|
||||
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
validate_cliresult(result)
|
||||
|
||||
errors, warnings, style = count_defects(result.output)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert errors + warnings + style == EXPECTED_DEFECTS * 2
|
||||
|
||||
|
||||
def test_check_sources_in_external_dir(clirunner, validate_cliresult, tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("project")
|
||||
external_src_dir = tmpdir_factory.mktemp("external_src_dir")
|
||||
|
||||
config = (
|
||||
f"""
|
||||
[platformio]
|
||||
src_dir = {external_src_dir}
|
||||
"""
|
||||
+ DEFAULT_CONFIG
|
||||
)
|
||||
tmpdir.join("platformio.ini").write(config)
|
||||
external_src_dir.join("main.cpp").write(TEST_CODE)
|
||||
|
||||
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
validate_cliresult(result)
|
||||
|
||||
errors, warnings, style = count_defects(result.output)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert errors + warnings + style == EXPECTED_DEFECTS
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from platformio import fs
|
||||
from platformio.commands.boards import cli as cmd_boards
|
||||
from platformio.project.commands.init import project_init_cmd
|
||||
from platformio.project.config import ProjectConfig
|
||||
@@ -36,27 +37,28 @@ def test_init_default(clirunner, validate_cliresult):
|
||||
validate_pioproject(os.getcwd())
|
||||
|
||||
|
||||
def test_init_ext_folder(clirunner, validate_cliresult):
|
||||
with clirunner.isolated_filesystem():
|
||||
ext_folder_name = "ext_folder"
|
||||
os.makedirs(ext_folder_name)
|
||||
result = clirunner.invoke(project_init_cmd, ["-d", ext_folder_name])
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(os.path.join(os.getcwd(), ext_folder_name))
|
||||
|
||||
|
||||
def test_init_duplicated_boards(clirunner, validate_cliresult, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
for _ in range(2):
|
||||
result = clirunner.invoke(
|
||||
project_init_cmd,
|
||||
["-b", "uno", "-b", "uno", "--no-install-dependencies"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(str(tmpdir))
|
||||
config = ProjectConfig(os.path.join(os.getcwd(), "platformio.ini"))
|
||||
config.validate()
|
||||
assert set(config.sections()) == set(["env:uno"])
|
||||
project_dir = str(tmpdir.join("ext_folder"))
|
||||
os.makedirs(project_dir)
|
||||
|
||||
with fs.cd(os.path.dirname(project_dir)):
|
||||
result = clirunner.invoke(
|
||||
project_init_cmd,
|
||||
[
|
||||
"-d",
|
||||
os.path.basename(project_dir),
|
||||
"-b",
|
||||
"uno",
|
||||
"-b",
|
||||
"uno",
|
||||
"--no-install-dependencies",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(project_dir)
|
||||
config = ProjectConfig(os.path.join(project_dir, "platformio.ini"))
|
||||
config.validate()
|
||||
assert set(config.sections()) == set(["env:uno"])
|
||||
|
||||
|
||||
def test_init_ide_without_board(clirunner, tmpdir):
|
||||
|
||||
@@ -42,7 +42,7 @@ board = devkit
|
||||
framework = foo
|
||||
lib_deps =
|
||||
CustomLib
|
||||
ArduinoJson @ 5.10.1
|
||||
ArduinoJson @ 6.18.5
|
||||
"""
|
||||
)
|
||||
result = clirunner.invoke(
|
||||
@@ -163,7 +163,7 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory
|
||||
storage_dir = tmpdir_factory.mktemp("test-updates")
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-d", str(storage_dir), "install", "ArduinoJson @ 5.10.1", "Blynk @ ~0.5.0"],
|
||||
["-d", str(storage_dir), "install", "ArduinoJson @ 6.18.5", "Blynk @ ~1.2"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(
|
||||
@@ -173,17 +173,17 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory
|
||||
outdated = json.loads(result.stdout)
|
||||
assert len(outdated) == 2
|
||||
# ArduinoJson
|
||||
assert outdated[0]["version"] == "5.10.1"
|
||||
assert outdated[0]["version"] == "6.18.5"
|
||||
assert outdated[0]["versionWanted"] is None
|
||||
assert semantic_version.Version(
|
||||
outdated[0]["versionLatest"]
|
||||
) > semantic_version.Version("6.16.0")
|
||||
) > semantic_version.Version("6.18.5")
|
||||
# Blynk
|
||||
assert outdated[1]["version"] == "0.5.4"
|
||||
assert outdated[1]["version"] == "1.2.0"
|
||||
assert outdated[1]["versionWanted"] is None
|
||||
assert semantic_version.Version(
|
||||
outdated[1]["versionLatest"]
|
||||
) > semantic_version.Version("0.6.0")
|
||||
) > semantic_version.Version("1.2.0")
|
||||
|
||||
# check with spec
|
||||
result = clirunner.invoke(
|
||||
@@ -194,19 +194,19 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory
|
||||
"update",
|
||||
"--dry-run",
|
||||
"--json-output",
|
||||
"ArduinoJson @ ^5",
|
||||
"ArduinoJson @ ^6",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
outdated = json.loads(result.stdout)
|
||||
assert outdated[0]["version"] == "5.10.1"
|
||||
assert outdated[0]["versionWanted"] == "5.13.4"
|
||||
assert outdated[0]["version"] == "6.18.5"
|
||||
assert outdated[0]["versionWanted"] == "6.21.5"
|
||||
assert semantic_version.Version(
|
||||
outdated[0]["versionLatest"]
|
||||
) > semantic_version.Version("6.16.0")
|
||||
# update with spec
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--silent", "ArduinoJson @ ^5.10.1"]
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--silent", "ArduinoJson @ ^6.18.5"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(
|
||||
@@ -215,12 +215,12 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory
|
||||
validate_cliresult(result)
|
||||
items = json.loads(result.stdout)
|
||||
assert len(items) == 2
|
||||
assert items[0]["version"] == "5.13.4"
|
||||
assert items[1]["version"] == "0.5.4"
|
||||
assert items[0]["version"] == "6.21.5"
|
||||
assert items[1]["version"] == "1.2.0"
|
||||
|
||||
# Check incompatible
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "ArduinoJson @ ^5"]
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "ArduinoJson @ ^6"]
|
||||
)
|
||||
with pytest.raises(
|
||||
AssertionError,
|
||||
@@ -228,7 +228,7 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory
|
||||
):
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "ArduinoJson @ ^5"]
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "ArduinoJson @ ^6"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "ArduinoJson@5.13.4 is already up-to-date" in result.stdout
|
||||
assert "ArduinoJson@6.21.5 is already up-to-date" in result.stdout
|
||||
|
||||
@@ -23,6 +23,7 @@ from platformio.package.exception import UnknownPackageError
|
||||
from platformio.util import strip_ansi_codes
|
||||
|
||||
PlatformioCLI.leftover_args = ["--json-output"] # hook for click
|
||||
ARDUINO_JSON_VERSION = "6.21.5"
|
||||
|
||||
|
||||
def test_search(clirunner, validate_cliresult):
|
||||
@@ -44,10 +45,10 @@ def test_global_install_registry(clirunner, validate_cliresult, isolated_pio_cor
|
||||
"-g",
|
||||
"install",
|
||||
"64",
|
||||
"ArduinoJson@~5.10.0",
|
||||
"547@2.2.4",
|
||||
"ArduinoJson@~6",
|
||||
"547@2.7.3",
|
||||
"AsyncMqttClient@<=0.8.2",
|
||||
"Adafruit PN532@1.2.0",
|
||||
"Adafruit PN532@1.3.2",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
@@ -60,7 +61,7 @@ def test_global_install_registry(clirunner, validate_cliresult, isolated_pio_cor
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"ArduinoJson",
|
||||
"ArduinoJson@5.10.1",
|
||||
f"ArduinoJson@{ARDUINO_JSON_VERSION}",
|
||||
"NeoPixelBus",
|
||||
"AsyncMqttClient",
|
||||
"ESPAsyncTCP",
|
||||
@@ -79,7 +80,7 @@ def test_global_install_archive(clirunner, validate_cliresult, isolated_pio_core
|
||||
"install",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
|
||||
"SomeLib=https://dl.registry.platformio.org/download/milesburton/library/DallasTemperature/3.8.1/DallasTemperature-3.8.1.tar.gz",
|
||||
"SomeLib=https://dl.registry.platformio.org/download/milesburton/library/DallasTemperature/3.11.0/DallasTemperature-3.11.0.tar.gz",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
],
|
||||
)
|
||||
@@ -142,7 +143,7 @@ def test_install_duplicates( # pylint: disable=unused-argument
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://dl.registry.platformio.org/download/milesburton/library/DallasTemperature/3.8.1/DallasTemperature-3.8.1.tar.gz",
|
||||
"https://dl.registry.platformio.org/download/milesburton/library/DallasTemperature/3.11.0/DallasTemperature-3.11.0.tar.gz",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
@@ -176,11 +177,11 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
n in result.output
|
||||
for n in (
|
||||
"required: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"ArduinoJson @ 5.10.1",
|
||||
f"ArduinoJson @ {ARDUINO_JSON_VERSION}",
|
||||
"required: git+https://github.com/gioblu/PJON.git#3.0",
|
||||
"PJON @ 3.0.0+sha.1fb26f",
|
||||
)
|
||||
)
|
||||
), result.output
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
assert all(
|
||||
@@ -188,7 +189,7 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
for n in (
|
||||
"__pkg_dir",
|
||||
'"__src_url": "git+https://github.com/gioblu/PJON.git#6.2"',
|
||||
'"version": "5.10.1"',
|
||||
f'"version": "{ARDUINO_JSON_VERSION}"',
|
||||
)
|
||||
)
|
||||
items1 = [i["name"] for i in json.loads(result.output)]
|
||||
@@ -218,13 +219,13 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
]
|
||||
versions2 = [
|
||||
"ArduinoJson@5.8.2",
|
||||
"ArduinoJson@5.10.1",
|
||||
f"ArduinoJson@{ARDUINO_JSON_VERSION}",
|
||||
"AsyncMqttClient@0.8.2",
|
||||
"NeoPixelBus@2.2.4",
|
||||
"NeoPixelBus@2.7.3",
|
||||
"PJON@6.2.0+sha.07fe9aa",
|
||||
"PJON@3.0.0+sha.1fb26fd",
|
||||
"PubSubClient@2.6.0+sha.bef5814",
|
||||
"Adafruit PN532@1.2.0",
|
||||
"Adafruit PN532@1.3.2",
|
||||
]
|
||||
assert set(versions1) >= set(versions2)
|
||||
|
||||
@@ -234,7 +235,7 @@ def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert set(
|
||||
["Adafruit PN532", "AsyncMqttClient", "ESPAsyncTCP", "NeoPixelBus"]
|
||||
["Adafruit PN532", "AsyncMqttClient", "AsyncTCP", "ESPAsyncTCP", "NeoPixelBus"]
|
||||
) == set(lib["name"] for lib in output)
|
||||
|
||||
|
||||
@@ -249,7 +250,7 @@ def test_global_lib_update(clirunner, validate_cliresult):
|
||||
assert "__pkg_dir" in oudated[0]
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", oudated[0]["__pkg_dir"]])
|
||||
validate_cliresult(result)
|
||||
assert "Removing NeoPixelBus @ 2.2.4" in strip_ansi_codes(result.output)
|
||||
assert "Removing NeoPixelBus @ 2.7.3" in strip_ansi_codes(result.output)
|
||||
|
||||
# update all libraries
|
||||
result = clirunner.invoke(
|
||||
|
||||
@@ -63,7 +63,7 @@ def test_install_unknown_from_registry(clirunner):
|
||||
def test_install_core_3_dev_platform(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_install,
|
||||
["atmelavr@1.2.0", "--skip-default-package"],
|
||||
["atmelavr@2.2.0", "--skip-default-package"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
|
||||
@@ -71,11 +71,11 @@ def test_install_core_3_dev_platform(clirunner, validate_cliresult, isolated_pio
|
||||
def test_install_known_version(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_install,
|
||||
["atmelavr@2.0.0", "--skip-default-package", "--with-package", "tool-avrdude"],
|
||||
["atmelavr@4.2.0", "--skip-default-package", "--with-package", "tool-avrdude"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
output = strip_ansi_codes(result.output)
|
||||
assert "atmelavr @ 2.0.0" in output
|
||||
assert "atmelavr@4.2.0" in output
|
||||
assert not os.path.isdir(str(isolated_pio_core.join("packages")))
|
||||
|
||||
|
||||
@@ -128,14 +128,14 @@ def test_update_raw(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(cli_platform.platform_update, ["atmelavr"])
|
||||
validate_cliresult(result)
|
||||
output = strip_ansi_codes(result.output)
|
||||
assert "Removing atmelavr @ 2.0.0" in output
|
||||
assert "Removing atmelavr @ 4.2.0" in output
|
||||
assert "Platform Manager: Installing platformio/atmelavr @" in output
|
||||
assert len(isolated_pio_core.join("packages").listdir()) == 2
|
||||
|
||||
|
||||
def test_uninstall(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_uninstall, ["atmelavr@1.2.0", "atmelavr", "espressif8266"]
|
||||
cli_platform.platform_uninstall, ["atmelavr@2.2.0", "atmelavr", "espressif8266"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert not isolated_pio_core.join("platforms").listdir()
|
||||
|
||||
@@ -246,67 +246,67 @@ int main(int argc, char *argv[]) {
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform != "darwin", reason="runs only on macOS (issue with SimAVR)"
|
||||
)
|
||||
def test_custom_testing_command(clirunner, validate_cliresult, tmp_path: Path):
|
||||
project_dir = tmp_path / "project"
|
||||
project_dir.mkdir()
|
||||
(project_dir / "platformio.ini").write_text(
|
||||
"""
|
||||
[env:uno]
|
||||
platform = atmelavr
|
||||
framework = arduino
|
||||
board = uno
|
||||
# @pytest.mark.skipif(
|
||||
# sys.platform != "darwin", reason="runs only on macOS (issue with SimAVR)"
|
||||
# )
|
||||
# def test_custom_testing_command(clirunner, validate_cliresult, tmp_path: Path):
|
||||
# project_dir = tmp_path / "project"
|
||||
# project_dir.mkdir()
|
||||
# (project_dir / "platformio.ini").write_text(
|
||||
# """
|
||||
# [env:uno]
|
||||
# platform = atmelavr
|
||||
# framework = arduino
|
||||
# board = uno
|
||||
|
||||
platform_packages =
|
||||
platformio/tool-simavr @ ^1
|
||||
test_speed = 9600
|
||||
test_testing_command =
|
||||
${platformio.packages_dir}/tool-simavr/bin/simavr
|
||||
-m
|
||||
atmega328p
|
||||
-f
|
||||
16000000L
|
||||
${platformio.build_dir}/${this.__env__}/firmware.elf
|
||||
"""
|
||||
)
|
||||
test_dir = project_dir / "test" / "test_dummy"
|
||||
test_dir.mkdir(parents=True)
|
||||
(test_dir / "test_main.cpp").write_text(
|
||||
"""
|
||||
#include <Arduino.h>
|
||||
#include <unity.h>
|
||||
# platform_packages =
|
||||
# platformio/tool-simavr @ ^1
|
||||
# test_speed = 9600
|
||||
# test_testing_command =
|
||||
# ${platformio.packages_dir}/tool-simavr/bin/simavr
|
||||
# -m
|
||||
# atmega328p
|
||||
# -f
|
||||
# 16000000L
|
||||
# ${platformio.build_dir}/${this.__env__}/firmware.elf
|
||||
# """
|
||||
# )
|
||||
# test_dir = project_dir / "test" / "test_dummy"
|
||||
# test_dir.mkdir(parents=True)
|
||||
# (test_dir / "test_main.cpp").write_text(
|
||||
# """
|
||||
# #include <Arduino.h>
|
||||
# #include <unity.h>
|
||||
|
||||
void setUp(void) {
|
||||
// set stuff up here
|
||||
}
|
||||
# void setUp(void) {
|
||||
# // set stuff up here
|
||||
# }
|
||||
|
||||
void tearDown(void) {
|
||||
// clean stuff up here
|
||||
}
|
||||
# void tearDown(void) {
|
||||
# // clean stuff up here
|
||||
# }
|
||||
|
||||
void dummy_test(void) {
|
||||
TEST_ASSERT_EQUAL(1, 1);
|
||||
}
|
||||
# void dummy_test(void) {
|
||||
# TEST_ASSERT_EQUAL(1, 1);
|
||||
# }
|
||||
|
||||
void setup() {
|
||||
UNITY_BEGIN();
|
||||
RUN_TEST(dummy_test);
|
||||
UNITY_END();
|
||||
}
|
||||
# void setup() {
|
||||
# UNITY_BEGIN();
|
||||
# RUN_TEST(dummy_test);
|
||||
# UNITY_END();
|
||||
# }
|
||||
|
||||
void loop() {
|
||||
delay(1000);
|
||||
}
|
||||
"""
|
||||
)
|
||||
result = clirunner.invoke(
|
||||
pio_test_cmd,
|
||||
["-d", str(project_dir), "--without-uploading"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "dummy_test" in result.output
|
||||
# void loop() {
|
||||
# delay(1000);
|
||||
# }
|
||||
# """
|
||||
# )
|
||||
# result = clirunner.invoke(
|
||||
# pio_test_cmd,
|
||||
# ["-d", str(project_dir), "--without-uploading"],
|
||||
# )
|
||||
# validate_cliresult(result)
|
||||
# assert "dummy_test" in result.output
|
||||
|
||||
|
||||
def test_unity_setup_teardown(clirunner, validate_cliresult, tmpdir):
|
||||
|
||||
@@ -219,7 +219,7 @@ def test_install_from_registry(isolated_pio_core, tmpdir_factory):
|
||||
# test conflicted names
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("conflicted-storage")))
|
||||
lm.set_log_level(logging.ERROR)
|
||||
lm.install("z3t0/IRremote@2.6.1")
|
||||
lm.install("z3t0/IRremote")
|
||||
lm.install("mbed-yuhki50/IRremote")
|
||||
assert len(lm.get_installed()) == 2
|
||||
|
||||
@@ -335,7 +335,7 @@ def test_symlink(tmp_path: Path):
|
||||
# uninstall
|
||||
lm.uninstall("External")
|
||||
assert ["Installed"] == [pkg.metadata.name for pkg in lm.get_installed()]
|
||||
# ensure original package was not rmeoved
|
||||
# ensure original package was not removed
|
||||
assert external_pkg_dir.is_dir()
|
||||
|
||||
# install again, remove from a disk
|
||||
@@ -554,14 +554,14 @@ def test_uninstall(isolated_pio_core, tmpdir_factory):
|
||||
assert not lm.get_installed()
|
||||
|
||||
# test uninstall dependencies
|
||||
assert lm.install("AsyncMqttClient-esphome @ 0.8.4")
|
||||
assert lm.install("AsyncMqttClient-esphome")
|
||||
assert len(lm.get_installed()) == 3
|
||||
assert lm.uninstall("AsyncMqttClient-esphome", skip_dependencies=True)
|
||||
assert len(lm.get_installed()) == 2
|
||||
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
lm.set_log_level(logging.ERROR)
|
||||
assert lm.install("AsyncMqttClient-esphome @ 0.8.4")
|
||||
assert lm.install("AsyncMqttClient-esphome")
|
||||
assert lm.uninstall("AsyncMqttClient-esphome")
|
||||
assert not lm.get_installed()
|
||||
|
||||
@@ -604,23 +604,23 @@ def test_update_with_metadata(isolated_pio_core, tmpdir_factory):
|
||||
assert str(outdated.current) == "1.8.7"
|
||||
assert outdated.latest > semantic_version.Version("1.10.0")
|
||||
|
||||
pkg = lm.install("ArduinoJson @ 5.10.1")
|
||||
pkg = lm.install("ArduinoJson @ 6.19.4")
|
||||
# test latest
|
||||
outdated = lm.outdated(pkg)
|
||||
assert str(outdated.current) == "5.10.1"
|
||||
assert str(outdated.current) == "6.19.4"
|
||||
assert outdated.wanted is None
|
||||
assert outdated.latest > outdated.current
|
||||
assert outdated.latest > semantic_version.Version("5.99.99")
|
||||
|
||||
# test wanted
|
||||
outdated = lm.outdated(pkg, PackageSpec("ArduinoJson@~5"))
|
||||
assert str(outdated.current) == "5.10.1"
|
||||
assert str(outdated.wanted) == "5.13.4"
|
||||
outdated = lm.outdated(pkg, PackageSpec("ArduinoJson@~6"))
|
||||
assert str(outdated.current) == "6.19.4"
|
||||
assert str(outdated.wanted) == "6.21.5"
|
||||
assert outdated.latest > semantic_version.Version("6.16.0")
|
||||
|
||||
# update to the wanted 5.x
|
||||
new_pkg = lm.update("ArduinoJson@^5", PackageSpec("ArduinoJson@^5"))
|
||||
assert str(new_pkg.metadata.version) == "5.13.4"
|
||||
# update to the wanted 6.x
|
||||
new_pkg = lm.update("ArduinoJson@^6", PackageSpec("ArduinoJson@^6"))
|
||||
assert str(new_pkg.metadata.version) == "6.21.5"
|
||||
# check that old version is removed
|
||||
assert len(lm.get_installed()) == 2
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user