Compare commits

..

200 Commits

Author SHA1 Message Date
Ivan Kravets
76b6de55d1 Added --json-output support for pkg list command 2024-03-29 20:46:32 +02:00
Ivan Kravets
d9a5b9def3 Raise exec exception by default 2024-03-29 20:44:16 +02:00
Ivan Kravets
3347e4b63f Merge branch 'develop' into feature/v7 2024-03-26 17:50:04 +02:00
Ivan Kravets
c475578db6 Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository // Resolve #4885 2024-03-26 13:44:26 +02:00
soiamsoNG
2bad42ecb1 Update 99-platformio-udev.rules (#4880)
Fix typo
2024-03-23 16:26:25 +02:00
Ivan Kravets
f3cfcd54a7 Update deps 2024-03-21 22:41:59 +02:00
Ivan Kravets
6ffd9124ba Merge branch 'develop' into feature/v7
# Conflicts:
#	platformio/commands/upgrade.py
2024-03-21 22:38:29 +02:00
Ivan Kravets
0acfc25d56 Bump version to 6.1.15a1 2024-03-21 21:30:22 +02:00
Ivan Kravets
e9433de50f Merge tag 'v6.1.14' into develop
Bump version to 6.1.14
2024-03-21 21:11:24 +02:00
Ivan Kravets
9d1593da0b Merge branch 'release/v6.1.14' 2024-03-21 21:11:24 +02:00
Ivan Kravets
fcba901611 Bump version to 6.1.14 2024-03-21 21:11:15 +02:00
Ivan Kravets
0e3249e8b1 Update deps 2024-03-21 21:08:41 +02:00
Ivan Kravets
0d647e164b Skip unnecessary import 2024-03-18 17:45:54 +02:00
Ivan Kravets
c01ef88265 Upgraded the build engine to the latest version of SCons (4.7.0) 2024-03-18 14:27:45 +02:00
Ivan Kravets
9fb9e586a0 Sync docs 2024-03-18 13:14:39 +02:00
Ivan Kravets
28bd200cd6 Move core/pip dependencies to the separate "dependencies" module 2024-03-18 13:14:22 +02:00
Ivan Kravets
25f7749e35 Merge branch 'develop' into feature/v7
# Conflicts:
#	docs
2024-03-16 18:00:14 +02:00
Ivan Kravets
56be27fb0b Bump version to 6.1.14rc1 2024-03-16 15:15:05 +02:00
Ivan Kravets
32991356f3 Enhanced Static Code Analysis to accommodate scenarios where custom src_dir or include_dir are located outside the project folder // Issue #4874 2024-03-16 14:07:01 +02:00
adrianstephens
dbe58b49bf paths fix for check tool (#4874)
* paths fix for check tool

* Minor changes

- Handle an edge case on Windows when sources and the project are located on different drives
- Cover edge cases with tests

---------

Co-authored-by: Valerii Koval <valeros@users.noreply.github.com>
2024-03-16 14:00:30 +02:00
Ivan Kravets
d36e39418e Corrected the validation of `symlink://` package specifications // Issue #4870 2024-03-16 13:45:21 +02:00
Ivan Kravets
c28740cfb1 Update tests 2024-03-16 13:43:44 +02:00
Will Miles
430acc87de Fix symlink package spec validation (#4870)
When validating symlink:// packages, use the specified symlink path,
the same as for file:// packages.  This fixes missing symlink packages
in 'pkg list' and reinstalling on every build.
2024-03-16 13:29:31 +02:00
Ivan Kravets
c0d97287dd Add support for Python 3.12+ Tar extract filter 2024-03-16 12:32:15 +02:00
Ivan Kravets
0f3dbe623d Update tests 2024-03-16 12:30:51 +02:00
Ivan Kravets
6449115635 Update dependencies 2024-03-16 12:30:38 +02:00
Ivan Kravets
d085a02068 Sync docs 2024-02-22 18:27:22 +02:00
Ivan Kravets
76a11a75b7 Sync docs 2024-02-16 22:47:02 +02:00
Ivan Kravets
12e7979ec6 Skip empty params 2024-02-16 22:36:43 +02:00
Ivan Kravets
18413f54f6 Sync conflicted items 2024-02-16 21:19:39 +02:00
Ivan Kravets
d684233315 Merge branch 'develop' into feature/v7
# Conflicts:
#	docs
#	platformio/builder/tools/piobuild.py
#	platformio/debug/config/base.py
#	platformio/project/helpers.py
2024-02-16 17:22:49 +02:00
Ivan Kravets
93018930ab Bump version to 6.1.14b1 2024-02-16 17:08:28 +02:00
Ivan Kravets
621b24b665 Introduced the `--json-output option to the pio test` command // Resolve #4740 2024-02-16 17:08:03 +02:00
Ivan Kravets
7606dd4faf Minor improvements // Issue #4710 2024-02-16 17:06:50 +02:00
Ivan Kravets
aa06d21abe Resolved an issue with the hexlify filter in the device monitor command // Resolve #4732 2024-02-15 22:28:09 +02:00
Ivan Kravets
042f8dc668 Bump version to 6.1.14a2 2024-02-13 22:48:06 +02:00
Ivan Kravets
c4f76848a7 Validate only owner/name/version fields 2024-02-13 22:47:22 +02:00
Ivan Kravets
e1ff9a469d Resolved an issue where the LDF selected an incorrect library version // Resolve #4860 2024-02-13 19:32:57 +02:00
Ivan Kravets
2239616484 Sync docs 2024-02-13 19:01:20 +02:00
Jakob
55be7181b3 Add STM32 virtual COM port to udev.rules (#4773)
* add Espressif USB JTAG/serial debug unit to udev

* add STM32 virtual COM port
2024-02-10 22:21:49 +02:00
Ivan Kravets
f519a9d524 Update SPDX to 3.23 2024-02-09 11:34:12 +02:00
newmansnj
f4319f670c Update piomaxlen.py change from 8192 to 8191 MAX_LINE_LENGTH (#4856)
The maximum length of the string that you can use at the command prompt is 8191 characters.

This limitation applies to:

the command line
individual environment variables that are inherited by other processes, such as the PATH variable
all environment variable expansions
If you use Command Prompt to run batch files, this limitation also applies to batch file processing.

https://learn.microsoft.com/en-us/troubleshoot/windows-client/shell-experience/command-line-string-limitation#:~:text=The%20maximum%20length%20of%20the,such%20as%20the%20PATH%20variable
2024-02-09 08:20:40 +02:00
Levente Farkas
80fc335528 add LuatOS ESP32-C3 Development Board (#4852) 2024-02-07 13:42:49 +02:00
Ivan Kravets
353f440335 Addressed an issue where passing a relative path to the pio project init // Resolve #4847 2024-02-07 13:36:21 +02:00
Ivan Kravets
3e9ca48588 Update deps 2024-02-07 13:35:50 +02:00
Song Yadong
255e91b51c fix a typo (#4845) 2024-01-29 08:54:18 +02:00
Ivan Kravets
ce91ef6e08 Revert ""memusage_dir" option 2024-01-27 16:24:31 +02:00
Ivan Kravets
7ba086bdcb Merge branch 'develop' into feature/v7
# Conflicts:
#	platformio/__init__.py
#	platformio/commands/upgrade.py
#	platformio/http.py
#	platformio/project/options.py
#	platformio/registry/mirror.py
#	setup.py
2024-01-27 14:11:01 +02:00
Ivan Kravets
adf94843ea Apply formatting 2024-01-27 14:01:42 +02:00
Ivan Kravets
e3e08d9691 Resolved an issue related to the relative package path in the pio pkg publish 2024-01-22 22:57:07 +02:00
Ivan Kravets
84c7ede0e1 Sync docs 2024-01-22 22:54:58 +02:00
Ivan Kravets
28c90652bc Update deps 2024-01-22 22:51:23 +02:00
Ivan Kravets
a75da327d0 Broadened version support for the pyelftools dependency // Resolve #4834 2024-01-17 12:45:33 +02:00
Ivan Kravets
adf4012b96 Update deps 2024-01-17 12:43:57 +02:00
Ivan Kravets
1fe806269d Start 6.1.14a1 2024-01-12 12:53:51 +02:00
Ivan Kravets
ffacd17387 Merge branch 'release/v6.1.13' 2024-01-12 12:52:01 +02:00
Ivan Kravets
4742ffc9d8 Merge tag 'v6.1.13' into develop
Bump version to 6.1.13
2024-01-12 12:52:01 +02:00
Ivan Kravets
700c705317 Bump version to 6.1.13 2024-01-12 12:51:49 +02:00
Ivan Kravets
17ba91977d Update deps 2024-01-12 12:51:28 +02:00
Ivan Kravets
f31f9fa616 Expanded support for SCons variables declared in the legacy format `${SCONS_VARNAME}` // Resolve #4828 2024-01-11 21:33:01 +02:00
Ivan Kravets
485f801c74 Bump version to 6.1.13a2 2024-01-11 19:23:59 +02:00
Ivan Kravets
adab425c6d Expanded support for SCons variables declared in the legacy format `${SCONS_VARNAME}` // Resolve #4828 2024-01-11 19:23:26 +02:00
Ivan Kravets
aabbbef944 Start 6.1.13 2024-01-10 16:39:38 +02:00
Ivan Kravets
14ce28e028 Merge tag 'v6.1.12' into develop
Bump version to 6.1.12
2024-01-10 16:17:11 +02:00
Ivan Kravets
ca1f633f9c Merge branch 'release/v6.1.12' 2024-01-10 16:17:10 +02:00
Ivan Kravets
a2f3e85760 Bump version to 6.1.12 2024-01-10 16:17:02 +02:00
Ivan Kravets
f422b5e05c Bump version to 6.1.12rc1 2024-01-09 21:01:09 +02:00
Ivan Kravets
ba58db3079 Introduced the capability to launch the debug server in a separate process // Resolve #4722 2024-01-09 21:00:42 +02:00
Ivan Kravets
4729d9f55d Bump version to 6.1.12b2 2024-01-06 17:48:47 +02:00
Ivan Kravets
41bd751ec2 Implemente automatic installation of missing dependencies when utilizing a SOCKS proxy // Resolve #4822 2024-01-06 17:48:18 +02:00
Ivan Kravets
c74c9778a1 Enhance PIP dependency declarations // Resolve #4819 2024-01-06 16:24:01 +02:00
Ivan Kravets
f2d16e7631 Bump version to 6.1.12b1 2023-12-31 14:56:39 +02:00
Ivan Kravets
b181406a1f Resolve an issue where custom debug configurations were being inadvertently overwritten in VSCode's `launch.json` // Resolve #4810 2023-12-31 14:54:57 +02:00
Ivan Kravets
dc16f80ffc Minor improvements // Issue #4699 2023-12-30 18:27:40 +02:00
Ivan Kravets
125be4bfd4 urllib3 v2.0 only supports OpenSSL 1.1.1+ // Issue #4614 2023-12-30 14:44:35 +02:00
Ivan Kravets
14907579cd Docs: Sync dev-platforms 2023-12-27 15:23:01 +02:00
Ivan Kravets
b0a1f3ae16 Update deps 2023-12-22 19:55:15 +02:00
Ivan Kravets
195304bbea Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed // Resolve #4699 2023-12-09 15:27:06 +02:00
Ivan Kravets
e4c4f2ac50 Resolved an issue where native debugging failed on the host machine // Resolve #4745 2023-12-09 15:12:16 +02:00
Ivan Kravets
77e6d1b099 PyLint fix for Python 3.12 2023-12-09 13:51:51 +02:00
Ivan Kravets
cf4da42b25 Bump version to 6.1.12a6 2023-12-09 13:18:21 +02:00
Ivan Kravets
51bf17515e Revert back normalizing platformio.*_dir option 2023-12-09 13:18:03 +02:00
Ivan Kravets
1e2c37c190 Bump version to 6.1.12a5 2023-12-09 13:06:42 +02:00
Ivan Kravets
204a60dd52 Parse only $PROJECT_HASH legacy built-in variable 2023-12-09 13:06:20 +02:00
Ivan Kravets
0f554d2f31 PyLint fixes 2023-12-09 13:05:51 +02:00
Ivan Kravets
f382aae66b Do not normalize platformio.***_dir path when validating 2023-12-09 13:05:30 +02:00
Ivan Kravets
998da59f7c Add support for Python 3.12 2023-12-08 20:18:29 +02:00
Ivan Kravets
4cad98601d Bump version to 6.1.12a4 2023-12-08 19:48:36 +02:00
Ivan Kravets
34545d3f12 Switch to Python 3.11 for CI tasks 2023-12-08 19:45:31 +02:00
Ivan Kravets
127b422d25 Rectified an issue where ${platformio.name} erroneously represented None as the default project name // Resolve #4717 2023-12-08 19:38:50 +02:00
Ivan Kravets
8c61f0f6b6 Enhanced the handling of built-in variables during interpolation // Resolve #4695 2023-12-08 19:12:00 +02:00
Ivan Kravets
fb93c1937c Provide a map for nrf52 and rp2040 declared in library.properties // Resolve #4803 2023-12-07 14:09:11 +02:00
Ivan Kravets
827bd09c61 Bump version to 6.1.12a3 2023-12-01 20:28:50 +02:00
Ivan Kravets
984d63983d Upgrade the build engine to the latest version of SCons (4.6.0) // Resolve #4789 2023-12-01 20:28:25 +02:00
Ivan Kravets
11df021750 Resolve an issue where running pio project metadata resulted in duplicated include entries // Resolve #4723 2023-12-01 19:38:25 +02:00
Ivan Kravets
ac6d94860b Update deps 2023-12-01 15:55:36 +02:00
Ivan Kravets
b238c55e53 Introduced a warning during the verification of MCU maximum RAM usage // Resolve #4791 2023-11-28 18:15:16 +02:00
valeros
961ab6b35e Properly handle the check_src_filters option per environment
Resolves #4788
2023-11-17 13:12:40 +02:00
Ivan Kravets
e1f34c7ea0 Drastically enhanced the speed of project building when operating in verbose mode // Resolve #4783 2023-11-16 20:05:44 +02:00
Ryan Govostes
f70e6d50c6 Remove unused build output buffer (#4786)
* Remove unused build output buffer

* Update proc.py

* Update proc.py

---------

Co-authored-by: Ivan Kravets <me@ikravets.com>
2023-11-16 19:59:30 +02:00
Ivan Kravets
540465291a Resolved an issue where `get_systype()` inaccurately returned the architecture // Resolve #4777 2023-11-16 19:01:24 +02:00
D. Stuart Freeman
0b3c0144e6 fix get_systype() on raspbian (#4784)
fix get_systype on raspbian

fixes #4777
2023-11-16 18:57:16 +02:00
Ivan Kravets
7ab27ddf9d Resolved an issue where the `COMPILATIONDB_INCLUDE_TOOLCHAIN` setting was not correctly applying to private libraries // Resolve #4762 2023-11-16 18:54:45 +02:00
Pierre Baudin
e78bf51f68 Issue #4762 compiledb include toolchains with user defined libs (#4763)
Issue #4762 separated out the optional addition of toolchains to compiledb include paths and added a call to it in the library building step for user defined libraries, allowing the automatic inclusion of toolchains in private lib compiledb generation
2023-11-16 18:44:32 +02:00
Jakob
5f8c15b96a add Espressif USB JTAG/serial debug unit to udev (#4759) 2023-10-16 00:01:23 +03:00
Ivan Kravets
9c61ef544d Fix test 2023-10-16 00:01:05 +03:00
Ivan Kravets
5548197a74 Update "pyelftools" to 0.30 2023-10-09 11:41:22 +03:00
Ivan Kravets
2458309d55 Update SPDX to 3.22 2023-10-09 11:41:01 +03:00
valeros
7229e1cce4 Add missing Zephyr project files in tests for PIO Check 2023-09-30 12:09:36 +03:00
Ivan Kravets
3e95134721 Sync docs 2023-09-29 23:24:03 +03:00
Ivan Kravets
687189a142 Sync docs 2023-09-23 20:51:47 +03:00
Ivan Kravets
51b4cd88db Docs: Add image with PlatformIO CLI for VSCode 2023-09-22 20:02:48 +03:00
Ivan Kravets
fe52b79eb2 Docs: Fix example with the custom common options 2023-09-13 20:21:22 +03:00
Ivan Kravets
091c96eb07 Sync docs 2023-08-31 19:40:20 +03:00
Ivan Kravets
f2eead6ece Bump version to 6.1.12a1 2023-08-31 19:32:15 +03:00
Ivan Kravets
c2b3097618 Merge branch 'release/v6.1.11' 2023-08-31 19:31:46 +03:00
Ivan Kravets
2728c90441 Merge tag 'v6.1.11' into develop
Bump version to 6.1.11
2023-08-31 19:31:46 +03:00
Ivan Kravets
5cac6d8b88 Bump version to 6.1.11 2023-08-31 19:31:37 +03:00
Ivan Kravets
bd34c0f437 Bump version to 6.1.12a1 2023-08-31 17:55:04 +03:00
Ivan Kravets
f1c445be15 Bump version to 6.1.11 2023-08-31 17:54:11 +03:00
Ivan Kravets
b88c393b4e Ensure SCons is installed on "project init" 2023-08-31 15:53:29 +03:00
Ivan Kravets
897844ebc1 Docs: Sync dev-platforms 2023-08-18 14:39:03 +03:00
Ivan Kravets
69acd5c9b4 Merge branch 'develop' into feature/v7
# Conflicts:
#	docs
2023-08-16 17:45:29 +03:00
Ivan Kravets
00409fc096 Sync docs 2023-08-16 17:44:37 +03:00
Ivan Kravets
33f2cd5dd5 Rename memusage.read_report to load_report 2023-08-16 17:24:13 +03:00
Ivan Kravets
562fb22a70 Merge branch 'develop' into feature/v7
# Conflicts:
#	docs
2023-08-14 19:04:00 +03:00
Ivan Kravets
b75bdbd320 Bump version to 6.1.11a2 2023-08-14 19:03:19 +03:00
Ivan Kravets
a0f8def616 Resolved a regression issue with Memory Inspection in PIO Home 2023-08-14 19:02:58 +03:00
Ivan Kravets
c946613019 Improve docs for the Installer Script 2023-08-14 16:18:28 +03:00
valeros
2ee8214485 Use CC location for resolving toolchain path in VSCode template
VSCode uses the "packagetoolchainBinDir" option to find the objdump
binary and if GDB is shipped separately from the toolchain package then
VSCode will fail to launch a debug session due to missing objdump
2023-08-14 15:57:00 +03:00
Ivan Kravets
7e89e551ae Sync docs 2023-08-14 14:04:09 +03:00
Ivan Kravets
6972c9c100 Sync docs 2023-08-14 13:05:30 +03:00
Ivan Kravets
5cfaea91d6 Docs: Update docs to use curl or wget on Unix when installing PlatformIO Core 2023-08-14 11:45:31 +03:00
Ivan Kravets
ce735c0ae5 Sync docs 2023-08-14 10:59:43 +03:00
Ivan Kravets
007dc7e96d Merge branch 'develop' into feature/v7
# Conflicts:
#	platformio/project/integration/tpls/clion/CMakeListsPrivate.txt.tpl
2023-08-11 18:21:06 +03:00
Ivan Kravets
aa0df36c8a Remove CLion from tests 2023-08-11 13:49:16 +03:00
Ivan Kravets
99224d7d4e Remove CLion from tests 2023-08-11 13:49:03 +03:00
Ivan Kravets
532759c0c6 Bump version to 6.1.11a1 2023-08-11 13:21:44 +03:00
Ivan Kravets
fb43d2508a Merge branch 'release/v6.1.10' 2023-08-11 13:18:54 +03:00
Ivan Kravets
07944a9d5b Merge tag 'v6.1.10' into develop
Bump version to 6.1.10
2023-08-11 13:18:54 +03:00
Ivan Kravets
8b6a4b8ce8 Bump version to 6.1.10 2023-08-11 13:18:41 +03:00
Ivan Kravets
6e75dc0d57 Remove unnecessary files 2023-08-11 12:46:19 +03:00
Ivan Kravets
a733f3c868 Update slogan 2023-08-11 12:07:16 +03:00
Ivan Kravets
65397fe059 Bump version to 6.1.10b1 2023-08-04 20:16:49 +03:00
Ivan Kravets
48a823d39e Do not check internet connection on the postponed telemetry // Resolve #4706 2023-08-04 20:15:14 +03:00
Ivan Kravets
1f7bda7136 Migrate from "requests" to the "httpx" 2023-07-31 19:13:05 +03:00
Ivan Kravets
6b2d04b810 Export http.fetch_http_content to "public" 2023-07-31 19:12:31 +03:00
Ivan Kravets
c9b3e4ed65 Update code in accordance to the Python 3.11 2023-07-31 19:10:05 +03:00
Ivan Kravets
527e7f16f6 Close API client on exit 2023-07-31 19:09:32 +03:00
Ivan Kravets
30fad62d05 Merge branch 'develop' into feature/v7 2023-07-29 16:05:02 +03:00
Ivan Kravets
f8b5266c1e Bump version to 6.1.10a4 2023-07-29 16:04:20 +03:00
Ivan Kravets
9170eee6e4 Resolved an issue with "ModuleNotFoundError: No module named 'chardet'" on macOS ARM // Resolve #4702 2023-07-29 16:03:14 +03:00
Ivan Kravets
89f4574680 Remove unused files 2023-07-29 16:02:18 +03:00
Ivan Kravets
831f7f52bc Automatically update PIO Core PyPi dependencies on "upgrade" operation 2023-07-29 16:01:57 +03:00
Ivan Kravets
ff6b6df9ce Merge branch 'develop' into feature/v7 2023-07-28 18:40:46 +03:00
Ivan Kravets
dccc14b507 Ensure that PIO Core System Info works on all supported Pythons 2023-07-28 18:40:14 +03:00
Ivan Kravets
3a21f48c9c Lock "marshmallow" dependency to the 3.19.0 for Python 3.7 2023-07-28 18:37:30 +03:00
Ivan Kravets
54ff3a8d4e Test all compatible Pythons 2023-07-28 18:24:47 +03:00
Ivan Kravets
fbb752b321 Merge branch 'develop' into feature/v7 2023-07-28 15:33:50 +03:00
Ivan Kravets
4474175e52 Docs: Sync renesas-ra dev-platform 2023-07-28 14:34:59 +03:00
Ivan Kravets
c3b8f2d3c0 Merge branch 'develop' into feature/v7 2023-07-27 15:04:19 +03:00
Ivan Kravets
a983075dac Update deps 2023-07-27 15:04:09 +03:00
Ivan Kravets
3268b516a9 Respect user privacy based on UA data 2023-07-27 15:03:57 +03:00
Ivan Kravets
451a3fc87b Implement read_report RPC 2023-07-27 14:53:17 +03:00
Ivan Kravets
c4126ea5b3 Implement memusage.history RPC 2023-07-27 14:43:03 +03:00
Ivan Kravets
1d44b3e9c8 Make methods "async" to avoid thread issue with fs.cd 2023-07-27 14:42:34 +03:00
Ivan Kravets
154244b7e3 Move "sections" data to the "memory" space 2023-07-27 14:41:54 +03:00
Ivan Kravets
33abe19831 Fix issue with changing working dir 2023-07-25 15:55:09 +03:00
Ivan Kravets
a3ad3103ef Implement memory usage profiling RPC 2023-07-25 12:31:25 +03:00
Ivan Kravets
65b31c69b0 Support "force_ansi" option for core.exec and allow to raise exception on cmd error 2023-07-25 12:25:57 +03:00
Ivan Kravets
d2fd0f242e Support "force_ansi" option for core.exec and allow to raise exception on cmd error 2023-07-25 12:25:42 +03:00
Ivan Kravets
e3557760df Raise generic Python exception 2023-07-25 12:22:41 +03:00
Ivan Kravets
6313042291 Remove unused code 2023-07-21 16:35:42 +03:00
Ivan Kravets
5f75e36efd Move "get_project_id" to the project helpers 2023-07-21 15:28:26 +03:00
Ivan Kravets
9deb7f4275 Run sync RPC methods in thread 2023-07-21 15:27:31 +03:00
Ivan Kravets
9affc023a2 Restore env.GetBuildType, community projects and dev-platforms depend on it 2023-07-19 15:46:10 +03:00
Ivan Kravets
fb2f850f1d Deploy docs only on "develop" and "master" branches 2023-07-19 15:44:41 +03:00
Ivan Kravets
45da8da093 Merge branch 'develop' into feature/v7 2023-07-19 15:13:07 +03:00
Ivan Kravets
b135a73945 Construct build directory based on the build type // Resolve #4373 2023-07-19 15:12:31 +03:00
valeros
5c9b373b65 Update Cppcheck to v2.11 2023-07-17 14:12:52 +03:00
Ivan Kravets
0da1a38df5 Replace "idedata" target with "pio project metadata" command 2023-07-15 15:31:01 +03:00
Ivan Kravets
0fe6bf262e Ensure that the `monitor` target is not executed if any of the preceding targets encounter failures 2023-07-15 15:15:21 +03:00
Ivan Kravets
390755c499 Implement app.get_project_id() 2023-07-15 14:56:56 +03:00
Ivan Kravets
deca77d1a3 Implement project.configuration endpoint 2023-07-15 14:22:27 +03:00
Ivan Kravets
bc2e51d51f Bump version to 6.1.10a3 2023-07-15 12:09:15 +03:00
Ivan Kravets
bce70d4945 Remove unnecessary dependencies 2023-07-15 12:08:41 +03:00
Ivan Kravets
940fa327f5 Lock "starlette==0.29.0" to Python 3.7 2023-07-13 17:14:59 +03:00
Ivan Kravets
db8f027f30 Refactor platform-related calls to PlatformFactory.from_env 2023-07-12 18:28:20 +03:00
valeros
39b61d50e6 Fix issues reported by Pylint 2023-07-12 10:39:50 +03:00
valeros
f85c3081fe Minor improvements for firmware memory footprint analysis
- Added section name to the section data so that a dev-platform can apply a regex to determine section location in memory
- Performance improvement by analyzing memory location of a firmware section only once
2023-07-11 22:12:54 +03:00
Ivan Kravets
2a1fd273ee Normalize configuration file path 2023-07-11 20:13:26 +03:00
Ivan Kravets
a423a4dde4 Rename "PackageMetaData" class to "PackageMetadata" 2023-07-11 20:12:20 +03:00
Ivan Kravets
abda3edad6 Use "UNIX_TIME" as the main variable 2023-07-11 15:20:57 +03:00
Ivan Kravets
be4d016f61 Resolved an issue with "pio pkg exec" command on WIndows while executing Python scripts from a package 2023-07-11 14:23:05 +03:00
Ivan Kravets
68e62c7137 Expose sizedata API 2023-07-10 17:41:21 +03:00
Ivan Kravets
bf8f1e9efb Remove redundant code 2023-07-07 22:48:09 +03:00
Ivan Kravets
a102fd2d48 Bump version to 6.1.10a2 2023-07-07 22:36:58 +03:00
Ivan Kravets
ff221b103a Resolved an issue that caused generated projects for PlatformIO IDE for VSCode to break when the `-iprefix` compiler flag was used 2023-07-07 22:36:43 +03:00
Ivan Kravets
646aa4f45b Bump version to 6.1.10a1 2023-07-06 14:44:56 +03:00
Ivan Kravets
325d4c16b8 Merge tag 'v6.1.9' into develop
Bump version to 6.1.9
2023-07-06 14:34:07 +03:00
154 changed files with 2270 additions and 2020 deletions

View File

@@ -8,12 +8,12 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-20.04, windows-latest, macos-latest]
python-version: ["3.6", "3.9", "3.11"]
python-version: ["3.6", "3.7", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: "recursive"
@@ -27,12 +27,17 @@ jobs:
python -m pip install --upgrade pip
pip install tox
- name: Core System Info
run: |
tox -e py
- name: Python Lint
if: ${{ matrix.python-version != '3.6' }}
run: |
tox -e lint
- name: Integration Tests
if: ${{ matrix.python-version == '3.11' }}
run: |
tox -e testcore

View File

@@ -12,14 +12,14 @@ jobs:
environment: production
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: "recursive"
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.9"
python-version: "3.11"
- name: Install dependencies
run: |
@@ -35,7 +35,8 @@ jobs:
tox -e testcore
- name: Build Python source tarball
run: python setup.py sdist bdist_wheel
# run: python setup.py sdist bdist_wheel
run: python setup.py sdist
- name: Publish package to PyPI
if: ${{ github.ref == 'refs/heads/master' }}

View File

@@ -7,13 +7,13 @@ jobs:
name: Build Docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: "recursive"
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
@@ -49,12 +49,12 @@ jobs:
name: Deploy Docs
needs: build
runs-on: ubuntu-latest
if: ${{ github.event_name == 'push' && (github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master') }}
env:
DOCS_REPO: platformio/platformio-docs
DOCS_DIR: platformio-docs
LATEST_DOCS_DIR: latest-docs
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
if: ${{ github.event_name == 'push' }}
steps:
- name: Download artifact
uses: actions/download-artifact@v3
@@ -78,7 +78,7 @@ jobs:
fi
- name: Checkout latest Docs
continue-on-error: true
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: ${{ env.DOCS_REPO }}
path: ${{ env.DOCS_DIR }}

View File

@@ -15,14 +15,14 @@ jobs:
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: "recursive"
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.9"
python-version: "3.11"
- name: Install dependencies
run: |

View File

@@ -40,20 +40,20 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: 3.9
python-version: 3.11
- name: Install PlatformIO
run: pip install -U .
- name: Check out ${{ matrix.project.repository }}
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
submodules: "recursive"
repository: ${{ matrix.project.repository }}

View File

@@ -7,13 +7,74 @@ Release Notes
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
.. _release_notes_6:
PlatformIO Core 6
-----------------
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
Unlock the true potential of embedded software development with
PlatformIO's collaborative ecosystem, embracing declarative principles,
test-driven methodologies, and modern toolchains for unrivaled success.
6.1.15 (2024-??-??)
~~~~~~~~~~~~~~~~~~~
* Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository (`issue #4885 <https://github.com/platformio/platformio-core/issues/4885>`_)
6.1.14 (2024-03-21)
~~~~~~~~~~~~~~~~~~~
* Introduced the ``--json-output`` option to the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command, enabling users to generate test results in the JSON format
* Upgraded the build engine to the latest version of SCons (4.7.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.7.0>`__)
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
* Addressed an issue where passing a relative path (``--project-dir``) to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command resulted in an error (`issue #4847 <https://github.com/platformio/platformio-core/issues/4847>`_)
* Enhanced |STATICCODEANALYSIS| to accommodate scenarios where custom ``src_dir`` or ``include_dir`` are located outside the project folder (`pull #4874 <https://github.com/platformio/platformio-core/pull/4874>`_)
* Corrected the validation of ``symlink://`` `package specifications <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ , resolving an issue that caused the package manager to repeatedly reinstall dependencies (`pull #4870 <https://github.com/platformio/platformio-core/pull/4870>`_)
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
* Resolved an issue where the |LDF| selected an incorrect library version (`issue #4860 <https://github.com/platformio/platformio-core/issues/4860>`_)
* Resolved an issue with the ``hexlify`` filter in the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ command, ensuring proper representation of characters with Unicode code points higher than 127 (`issue #4732 <https://github.com/platformio/platformio-core/issues/4732>`_)
6.1.13 (2024-01-12)
~~~~~~~~~~~~~~~~~~~
* Expanded support for SCons variables declared in the legacy format ``${SCONS_VARNAME}`` (`issue #4828 <https://github.com/platformio/platformio-core/issues/4828>`_)
6.1.12 (2024-01-10)
~~~~~~~~~~~~~~~~~~~
* Added support for Python 3.12
* Introduced the capability to launch the debug server in a separate process (`issue #4722 <https://github.com/platformio/platformio-core/issues/4722>`_)
* Introduced a warning during the verification of MCU maximum RAM usage, signaling when the allocated RAM surpasses 100% (`issue #4791 <https://github.com/platformio/platformio-core/issues/4791>`_)
* Drastically enhanced the speed of project building when operating in verbose mode (`issue #4783 <https://github.com/platformio/platformio-core/issues/4783>`_)
* Upgraded the build engine to the latest version of SCons (4.6.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.6.0>`__)
* Enhanced the handling of built-in variables in |PIOCONF| during |INTERPOLATION| (`issue #4695 <https://github.com/platformio/platformio-core/issues/4695>`_)
* Enhanced PIP dependency declarations for improved reliability and extended support to include Python 3.6 (`issue #4819 <https://github.com/platformio/platformio-core/issues/4819>`_)
* Implemented automatic installation of missing dependencies when utilizing a SOCKS proxy (`issue #4822 <https://github.com/platformio/platformio-core/issues/4822>`_)
* Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed (`issue #4699 <https://github.com/platformio/platformio-core/issues/4699>`_)
* Rectified an issue where ``${platformio.name}`` erroneously represented ``None`` as the default `project name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/name.html>`__ (`issue #4717 <https://github.com/platformio/platformio-core/issues/4717>`_)
* Resolved an issue where the ``COMPILATIONDB_INCLUDE_TOOLCHAIN`` setting was not correctly applying to private libraries (`issue #4762 <https://github.com/platformio/platformio-core/issues/4762>`_)
* Resolved an issue where ``get_systype()`` inaccurately returned the architecture when executed within a Docker container on a 64-bit kernel with a 32-bit userspace (`issue #4777 <https://github.com/platformio/platformio-core/issues/4777>`_)
* Resolved an issue with incorrect handling of the ``check_src_filters`` option when used in multiple environments (`issue #4788 <https://github.com/platformio/platformio-core/issues/4788>`_)
* Resolved an issue where running `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ resulted in duplicated "include" entries (`issue #4723 <https://github.com/platformio/platformio-core/issues/4723>`_)
* Resolved an issue where native debugging failed on the host machine (`issue #4745 <https://github.com/platformio/platformio-core/issues/4745>`_)
* Resolved an issue where custom debug configurations were being inadvertently overwritten in VSCode's ``launch.json`` (`issue #4810 <https://github.com/platformio/platformio-core/issues/4810>`_)
6.1.11 (2023-08-31)
~~~~~~~~~~~~~~~~~~~
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
6.1.10 (2023-08-11)
~~~~~~~~~~~~~~~~~~~
* Resolved an issue that caused generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to break when the ``-iprefix`` compiler flag was used
* Resolved an issue encountered while utilizing the `pio pkg exec <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_exec.html>`__ command on the Windows platform to execute Python scripts from a package
* Implemented a crucial improvement to the `pio run <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, guaranteeing that the ``monitor`` target is not executed if any of the preceding targets, such as ``upload``, encounter failures
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.11 with new checks, CLI commands and various analysis improvements
* Resolved a critical issue that arose on macOS ARM platforms due to the Python "requests" module, leading to a "ModuleNotFoundError: No module named 'chardet'" (`issue #4702 <https://github.com/platformio/platformio-core/issues/4702>`_)
6.1.9 (2023-07-06)
~~~~~~~~~~~~~~~~~~

View File

@@ -36,9 +36,11 @@ PlatformIO Core
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
:target: https://platformio.org?utm_source=github&utm_medium=core
`PlatformIO <https://platformio.org>`_ is a professional collaborative platform for embedded development.
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
Unlock the true potential of embedded software development with
PlatformIO's collaborative ecosystem, embracing declarative principles,
test-driven methodologies, and modern toolchains for unrivaled success.
* Open source, maximum permissive Apache 2.0 license
* Cross-platform IDE and Unified Debugger

2
docs

Submodule docs updated: f8dbf012e4...670721e923

View File

@@ -12,20 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (6, 1, 9)
VERSION = (6, 1, "15a1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
"Your Gateway to Embedded Software Development Excellence. "
"Unlock the true potential of embedded software development "
"with PlatformIO's collaborative ecosystem, embracing "
"declarative principles, test-driven methodologies, and "
"modern toolchains for unrivaled success."
)
__url__ = "https://platformio.org"
@@ -42,15 +38,6 @@ __registry_mirror_hosts__ = [
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__core_packages__ = {
"contrib-piohome": "~3.4.2",
"contrib-pioremote": "~1.0.0",
"tool-scons": "~4.40502.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.150005.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org

View File

@@ -66,15 +66,6 @@ def configure():
if IS_CYGWIN:
raise exception.CygwinEnvDetected()
# https://urllib3.readthedocs.org
# /en/latest/security.html#insecureplatformwarning
try:
import urllib3 # pylint: disable=import-outside-toplevel
urllib3.disable_warnings()
except (AttributeError, ImportError):
pass
# Handle IOError issue with VSCode's Terminal (Windows)
click_echo_origin = [click.echo, click.secho]

View File

@@ -17,7 +17,7 @@ import time
from platformio import __accounts_api__, app
from platformio.exception import PlatformioException, UserSideException
from platformio.http import HTTPClient, HTTPClientError
from platformio.http import HttpApiClient, HttpClientApiError
class AccountError(PlatformioException):
@@ -32,7 +32,7 @@ class AccountAlreadyAuthorized(AccountError, UserSideException):
MESSAGE = "You are already authorized with {0} account."
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
class AccountClient(HttpApiClient): # pylint:disable=too-many-public-methods
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
def __init__(self):
@@ -60,7 +60,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
def fetch_json_data(self, *args, **kwargs):
try:
return super().fetch_json_data(*args, **kwargs)
except HTTPClientError as exc:
except HttpClientApiError as exc:
raise AccountError(exc) from exc
def fetch_authentication_token(self):

View File

@@ -19,18 +19,18 @@ from platformio.account.client import AccountClient, AccountNotAuthorized
@click.command("destroy", short_help="Destroy account")
def account_destroy_cmd():
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s user account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% client.get_logged_username(),
abort=True,
)
client.destroy_account()
try:
client.logout()
except AccountNotAuthorized:
pass
with AccountClient() as client:
click.confirm(
"Are you sure you want to delete the %s user account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% client.get_logged_username(),
abort=True,
)
client.destroy_account()
try:
client.logout()
except AccountNotAuthorized:
pass
click.secho(
"User account has been destroyed.",
fg="green",

View File

@@ -20,8 +20,8 @@ from platformio.account.client import AccountClient
@click.command("forgot", short_help="Forgot password")
@click.option("--username", prompt="Username or email")
def account_forgot_cmd(username):
client = AccountClient()
client.forgot_password(username)
with AccountClient() as client:
client.forgot_password(username)
click.secho(
"If this account is registered, we will send the "
"further instructions to your email.",

View File

@@ -21,6 +21,6 @@ from platformio.account.client import AccountClient
@click.option("-u", "--username", prompt="Username or email")
@click.option("-p", "--password", prompt=True, hide_input=True)
def account_login_cmd(username, password):
client = AccountClient()
client.login(username, password)
with AccountClient() as client:
client.login(username, password)
click.secho("Successfully logged in!", fg="green")

View File

@@ -19,6 +19,6 @@ from platformio.account.client import AccountClient
@click.command("logout", short_help="Log out of PlatformIO Account")
def account_logout_cmd():
client = AccountClient()
client.logout()
with AccountClient() as client:
client.logout()
click.secho("Successfully logged out!", fg="green")

View File

@@ -21,6 +21,6 @@ from platformio.account.client import AccountClient
@click.option("--old-password", prompt=True, hide_input=True)
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
def account_password_cmd(old_password, new_password):
client = AccountClient()
client.change_password(old_password, new_password)
with AccountClient() as client:
client.change_password(old_password, new_password)
click.secho("Password successfully changed!", fg="green")

View File

@@ -43,8 +43,8 @@ from platformio.account.validate import (
@click.option("--firstname", prompt=True)
@click.option("--lastname", prompt=True)
def account_register_cmd(username, email, password, firstname, lastname):
client = AccountClient()
client.registration(username, email, password, firstname, lastname)
with AccountClient() as client:
client.registration(username, email, password, firstname, lastname)
click.secho(
"An account has been successfully created. "
"Please check your mail to activate your account and verify your email address.",

View File

@@ -25,8 +25,8 @@ from platformio.account.client import AccountClient
@click.option("--offline", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_show_cmd(offline, json_output):
client = AccountClient()
info = client.get_account_info(offline)
with AccountClient() as client:
info = client.get_account_info(offline)
if json_output:
click.echo(json.dumps(info))
return

View File

@@ -24,8 +24,8 @@ from platformio.account.client import AccountClient
@click.option("--regenerate", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_token_cmd(password, regenerate, json_output):
client = AccountClient()
auth_token = client.auth_token(password, regenerate)
with AccountClient() as client:
auth_token = client.auth_token(password, regenerate)
if json_output:
click.echo(json.dumps({"status": "success", "result": auth_token}))
return

View File

@@ -25,8 +25,8 @@ from platformio.account.validate import validate_email, validate_username
@click.option("--firstname")
@click.option("--lastname")
def account_update_cmd(current_password, **kwargs):
client = AccountClient()
profile = client.get_profile()
with AccountClient() as client:
profile = client.get_profile()
new_profile = profile.copy()
if not any(kwargs.values()):
for field in profile:

View File

@@ -25,8 +25,8 @@ from platformio.account.client import AccountClient
"username",
)
def org_add_cmd(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
with AccountClient() as client:
client.add_org_owner(orgname, username)
return click.secho(
"The new owner `%s` has been successfully added to the `%s` organization."
% (username, orgname),

View File

@@ -30,8 +30,8 @@ from platformio.account.validate import validate_email, validate_orgname
"--displayname",
)
def org_create_cmd(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
with AccountClient() as client:
client.create_org(orgname, email, displayname)
return click.secho(
"The organization `%s` has been successfully created." % orgname,
fg="green",

View File

@@ -20,14 +20,14 @@ from platformio.account.client import AccountClient
@click.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def org_destroy_cmd(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the `%s` organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
with AccountClient() as client:
click.confirm(
"Are you sure you want to delete the `%s` organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho(
"Organization `%s` has been destroyed." % orgname,
fg="green",

View File

@@ -23,8 +23,8 @@ from platformio.account.client import AccountClient
@click.command("list", short_help="List organizations and their members")
@click.option("--json-output", is_flag=True)
def org_list_cmd(json_output):
client = AccountClient()
orgs = client.list_orgs()
with AccountClient() as client:
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:

View File

@@ -25,8 +25,8 @@ from platformio.account.client import AccountClient
"username",
)
def org_remove_cmd(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
with AccountClient() as client:
client.remove_org_owner(orgname, username)
return click.secho(
"The `%s` owner has been successfully removed from the `%s` organization."
% (username, orgname),

View File

@@ -31,8 +31,8 @@ from platformio.account.validate import validate_email, validate_orgname
)
@click.option("--displayname")
def org_update_cmd(cur_orgname, **kwargs):
client = AccountClient()
org = client.get_org(cur_orgname)
with AccountClient() as client:
org = client.get_org(cur_orgname)
new_org = {
key: value if value is not None else org[key] for key, value in kwargs.items()
}

View File

@@ -29,8 +29,8 @@ from platformio.account.validate import validate_orgname_teamname
)
def team_add_cmd(orgname_teamname, username):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
client.add_team_member(orgname, teamname, username)
with AccountClient() as client:
client.add_team_member(orgname, teamname, username)
return click.secho(
"The new member %s has been successfully added to the %s team."
% (username, teamname),

View File

@@ -29,8 +29,8 @@ from platformio.account.validate import validate_orgname_teamname
)
def team_create_cmd(orgname_teamname, description):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
client.create_team(orgname, teamname, description)
with AccountClient() as client:
client.create_team(orgname, teamname, description)
return click.secho(
"The team %s has been successfully created." % teamname,
fg="green",

View File

@@ -32,8 +32,8 @@ def team_destroy_cmd(orgname_teamname):
),
abort=True,
)
client = AccountClient()
client.destroy_team(orgname, teamname)
with AccountClient() as client:
client.destroy_team(orgname, teamname)
return click.secho(
"The team %s has been successfully destroyed." % teamname,
fg="green",

View File

@@ -24,19 +24,22 @@ from platformio.account.client import AccountClient
@click.argument("orgname", required=False)
@click.option("--json-output", is_flag=True)
def team_list_cmd(orgname, json_output):
client = AccountClient()
data = {}
if not orgname:
for item in client.list_orgs():
teams = client.list_teams(item.get("orgname"))
data[item.get("orgname")] = teams
else:
teams = client.list_teams(orgname)
data[orgname] = teams
with AccountClient() as client:
data = {}
if not orgname:
for item in client.list_orgs():
teams = client.list_teams(item.get("orgname"))
data[item.get("orgname")] = teams
else:
teams = client.list_teams(orgname)
data[orgname] = teams
if json_output:
return click.echo(json.dumps(data[orgname] if orgname else data))
if not any(data.values()):
return click.secho("You do not have any teams.", fg="yellow")
for org_name, teams in data.items():
for team in teams:
click.echo()
@@ -48,11 +51,13 @@ def team_list_cmd(orgname, json_output):
table_data.append(
(
"Members:",
", ".join(
(member.get("username") for member in team.get("members"))
)
if team.get("members")
else "-",
(
", ".join(
(member.get("username") for member in team.get("members"))
)
if team.get("members")
else "-"
),
)
)
click.echo(tabulate(table_data, tablefmt="plain"))

View File

@@ -27,8 +27,8 @@ from platformio.account.validate import validate_orgname_teamname
@click.argument("username")
def team_remove_cmd(orgname_teamname, username):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
client.remove_team_member(orgname, teamname, username)
with AccountClient() as client:
client.remove_team_member(orgname, teamname, username)
return click.secho(
"The %s member has been successfully removed from the %s team."
% (username, teamname),

View File

@@ -34,8 +34,8 @@ from platformio.account.validate import validate_orgname_teamname, validate_team
)
def team_update_cmd(orgname_teamname, **kwargs):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
team = client.get_team(orgname, teamname)
with AccountClient() as client:
team = client.get_team(orgname, teamname)
new_team = {
key: value if value is not None else team[key] for key, value in kwargs.items()
}

View File

@@ -270,6 +270,8 @@ def get_user_agent():
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
data.append("Python/%s" % platform.python_version())
data.append("Platform/%s" % platform.platform())
if not get_setting("enable_telemetry"):
data.append("Telemetry/0")
return " ".join(data)

View File

@@ -36,6 +36,8 @@ ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVIC
# QinHeng Electronics HL-340 USB-Serial adapter
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
# QinHeng Electronics CH343 USB-Serial adapter
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
# QinHeng Electronics CH9102 USB-Serial adapter
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
@@ -85,6 +87,8 @@ ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVI
# AIR32F103
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
# STM32 virtual COM port
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
#
# Debuggers
@@ -171,3 +175,6 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
# Atmel AVR Dragon
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
# Espressif USB JTAG/serial debug unit
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"

View File

@@ -15,7 +15,7 @@
import json
import os
import sys
from time import time
import time
import click
from SCons.Script import ARGUMENTS # pylint: disable=import-error
@@ -31,14 +31,13 @@ from SCons.Script import Variables # pylint: disable=import-error
from platformio import app, fs
from platformio.platform.base import PlatformBase
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_dir
from platformio.project.helpers import get_build_type, get_project_dir
AllowSubstExceptions(NameError)
# append CLI arguments to build environment
clivars = Variables(None)
clivars.AddVariables(
("PLATFORM_MANIFEST",),
("BUILD_SCRIPT",),
("PROJECT_CONFIG",),
("PIOENV",),
@@ -62,7 +61,7 @@ DEFAULT_ENV_OPTIONS = dict(
"piotarget",
"piolib",
"pioupload",
"piosize",
"piomemusage",
"pioino",
"piomisc",
"piointegration",
@@ -72,16 +71,7 @@ DEFAULT_ENV_OPTIONS = dict(
variables=clivars,
# Propagating External Environment
ENV=os.environ,
TIMESTAMP=int(time()),
UNIX_TIME="$TIMESTAMP", # deprecated
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
LIBPATH=["$BUILD_DIR"],
PROGNAME="program",
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PROG_PATH="$PROGPATH", # deprecated
UNIX_TIME=int(time.time()),
PYTHONEXE=get_pythonexe_path(),
)
@@ -128,13 +118,21 @@ env.Replace(
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
BUILD_TYPE=env.GetBuildType(),
BUILD_TYPE=get_build_type(config, env["PIOENV"], COMMAND_LINE_TARGETS),
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV", "$BUILD_TYPE"),
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
LIBPATH=["$BUILD_DIR"],
LIBSOURCE_DIRS=[
config.get("platformio", "lib_dir"),
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
config.get("platformio", "globallib_dir"),
],
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
PROGNAME="program",
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PROG_PATH="$PROGPATH", # deprecated
)
if int(ARGUMENTS.get("ISATTY", 0)):
@@ -185,7 +183,7 @@ env.SConscript(env.GetExtraScripts("post"), exports="env")
# Checking program size
if env.get("SIZETOOL") and not (
set(["nobuild", "sizedata"]) & set(COMMAND_LINE_TARGETS)
set(["nobuild", "__memusage"]) & set(COMMAND_LINE_TARGETS)
):
env.Depends("upload", "checkprogsize")
# Replace platform's "size" target with our
@@ -226,24 +224,27 @@ if env.IsIntegrationDump():
data = projenv.DumpIntegrationData(env)
# dump to file for the further reading by project.helpers.load_build_metadata
with open(
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
projenv.subst(os.path.join("$BUILD_DIR", "metadata.json")),
mode="w",
encoding="utf8",
) as fp:
json.dump(data, fp)
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
click.echo(
"Metadata has been saved to the following location: %s"
% projenv.subst(os.path.join("$BUILD_DIR", "metadata.json"))
)
env.Exit(0)
if "sizedata" in COMMAND_LINE_TARGETS:
if "__memusage" in COMMAND_LINE_TARGETS:
AlwaysBuild(
env.Alias(
"sizedata",
"__memusage",
DEFAULT_TARGETS,
env.VerboseAction(env.DumpSizeData, "Generating memory usage report..."),
env.VerboseAction(env.DumpMemoryUsage, "Generating memory usage report..."),
)
)
Default("sizedata")
Default("__memusage")
# issue #4604: process targets sequentially
for index, target in enumerate(

View File

@@ -54,7 +54,7 @@ def GetBuildType(env):
modes.append("debug")
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
modes.append("test")
return "+".join(modes or ["release"])
return ", ".join(modes or ["release"])
def BuildProgram(env):
@@ -126,6 +126,10 @@ def ProcessProgramDeps(env):
# remove specified flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
env.ProcessCompileDbToolchainOption()
def ProcessCompileDbToolchainOption(env):
if "compiledb" in COMMAND_LINE_TARGETS:
# Resolve absolute path of toolchain
for cmd in ("CC", "CXX", "AS"):
@@ -138,6 +142,7 @@ def ProcessProgramDeps(env):
)
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
for scope, includes in env.DumpIntegrationIncludes().items():
if scope in ("toolchain",):
env.Append(CPPPATH=includes)
@@ -376,6 +381,7 @@ def generate(env):
env.AddMethod(GetBuildType)
env.AddMethod(BuildProgram)
env.AddMethod(ProcessProgramDeps)
env.AddMethod(ProcessCompileDbToolchainOption)
env.AddMethod(ProcessProjectDeps)
env.AddMethod(ParseFlagsExtended)
env.AddMethod(ProcessFlags)

View File

@@ -12,11 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
import click
import SCons.Defaults # pylint: disable=import-error
import SCons.Subst # pylint: disable=import-error
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
@@ -25,18 +23,13 @@ from platformio.proc import exec_command, where_is_program
def IsIntegrationDump(_):
return set(["__idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
return set(["__idedata", "__metadata"]) & set(COMMAND_LINE_TARGETS)
def DumpIntegrationIncludes(env):
result = dict(build=[], compatlib=[], toolchain=[])
result["build"].extend(
[
env.subst("$PROJECT_INCLUDE_DIR"),
env.subst("$PROJECT_SRC_DIR"),
]
)
# `env`(project) CPPPATH
result["build"].extend(
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
)
@@ -140,27 +133,23 @@ def dump_svd_path(env):
return None
def _subst_cmd(env, cmd):
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
def _split_flags_string(env, s):
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
return [str(arg) for arg in args]
def DumpIntegrationData(*args):
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
data = {
"build_type": globalenv.GetBuildType(),
"build_type": globalenv["BUILD_TYPE"],
"env_name": globalenv["PIOENV"],
"libsource_dirs": [
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
],
"defines": dump_defines(projenv),
"includes": projenv.DumpIntegrationIncludes(),
"cc_flags": click.parser.split_arg_string(
_subst_cmd(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS")
),
"cxx_flags": click.parser.split_arg_string(
_subst_cmd(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS")
),
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
"cc_path": where_is_program(
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
),

View File

@@ -29,7 +29,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs
from platformio.builder.tools import piobuild
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
from platformio.http import HTTPClientError, InternetConnectionError
from platformio.http import HttpClientApiError, InternetConnectionError
from platformio.package.exception import (
MissingPackageManifestError,
UnknownPackageError,
@@ -39,7 +39,7 @@ from platformio.package.manifest.parser import (
ManifestParserError,
ManifestParserFactory,
)
from platformio.package.meta import PackageCompatibility, PackageItem
from platformio.package.meta import PackageCompatibility, PackageItem, PackageSpec
from platformio.project.options import ProjectOptions
@@ -309,10 +309,10 @@ class LibBuilderBase:
if not self.dependencies or self._deps_are_processed:
return
self._deps_are_processed = True
for item in self.dependencies:
for dependency in self.dependencies:
found = False
for lb in self.env.GetLibBuilders():
if item["name"] != lb.name:
if not lb.is_dependency_compatible(dependency):
continue
found = True
if lb not in self.depbuilders:
@@ -322,9 +322,28 @@ class LibBuilderBase:
if not found and self.verbose:
sys.stderr.write(
"Warning: Ignored `%s` dependency for `%s` "
"library\n" % (item["name"], self.name)
"library\n" % (dependency["name"], self.name)
)
def is_dependency_compatible(self, dependency):
pkg = PackageItem(self.path)
qualifiers = {"name": self.name, "version": self.version}
if pkg.metadata:
qualifiers = {"name": pkg.metadata.name, "version": pkg.metadata.version}
if pkg.metadata.spec and pkg.metadata.spec.owner:
qualifiers["owner"] = pkg.metadata.spec.owner
dep_qualifiers = {
k: v for k, v in dependency.items() if k in ("owner", "name", "version")
}
if (
"version" in dep_qualifiers
and not PackageSpec(dep_qualifiers["version"]).requirements
):
del dep_qualifiers["version"]
return PackageCompatibility.from_dependency(dep_qualifiers).is_compatible(
PackageCompatibility(**qualifiers)
)
def get_search_files(self):
return [
os.path.join(self.src_dir, item)
@@ -477,6 +496,7 @@ class LibBuilderBase:
self.is_built = True
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
self.env.ProcessCompileDbToolchainOption()
if self.lib_ldf_mode == "off":
for lb in self.env.GetLibBuilders():
@@ -791,7 +811,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
include_dirs.append(os.path.join(self.path, "utility"))
for path in self.env.get("CPPPATH", []):
if path not in self.envorigin.get("CPPPATH", []):
if path not in include_dirs and path not in self.envorigin.get(
"CPPPATH", []
):
include_dirs.append(self.env.subst(path))
return include_dirs
@@ -983,7 +1005,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
lm.install(spec)
did_install = True
except (
HTTPClientError,
HttpClientApiError,
UnknownPackageError,
InternetConnectionError,
) as exc:

View File

@@ -23,10 +23,10 @@ from SCons.Subst import quote_spaces # pylint: disable=import-error
from platformio.compat import IS_WINDOWS, hashlib_encode_data
# There are the next limits depending on a platform:
# - Windows = 8192
# - Windows = 8191
# - Unix = 131072
# We need ~512 characters for compiler and temporary file paths
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")

View File

@@ -14,33 +14,33 @@
# pylint: disable=too-many-locals
import json
import os
import sys
from os import environ, makedirs, remove
from os.path import isdir, join, splitdrive
import time
from elftools.elf.descriptions import describe_sh_flags
from elftools.elf.elffile import ELFFile
from platformio.compat import IS_WINDOWS
from platformio.proc import exec_command
from platformio.project.memusage import save_report
def _run_tool(cmd, env, tool_args):
sysenv = environ.copy()
sysenv = os.environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
build_dir = env.subst("$BUILD_DIR")
if not isdir(build_dir):
makedirs(build_dir)
tmp_file = join(build_dir, "size-data-longcmd.txt")
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
tmp_file = os.path.join(build_dir, "size-data-longcmd.txt")
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write("\n".join(tool_args))
cmd.append("@" + tmp_file)
result = exec_command(cmd, env=sysenv)
remove(tmp_file)
os.remove(tmp_file)
return result
@@ -53,7 +53,7 @@ def _get_symbol_locations(env, elf_path, addrs):
locations = [line for line in result["out"].split("\n") if line]
assert len(addrs) == len(locations)
return dict(zip(addrs, [l.strip() for l in locations]))
return dict(zip(addrs, [loc.strip() for loc in locations]))
def _get_demangled_names(env, mangled_names):
@@ -73,31 +73,7 @@ def _get_demangled_names(env, mangled_names):
)
def _determine_section(sections, symbol_addr):
for section, info in sections.items():
if not _is_flash_section(info) and not _is_ram_section(info):
continue
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
return section
return "unknown"
def _is_ram_section(section):
return (
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
and section.get("flags", "") == "WA"
)
def _is_flash_section(section):
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
def _is_valid_symbol(symbol_name, symbol_type, symbol_address):
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
def _collect_sections_info(elffile):
def _collect_sections_info(env, elffile):
sections = {}
for section in elffile.iter_sections():
if section.is_null() or section.name.startswith(".debug"):
@@ -107,13 +83,18 @@ def _collect_sections_info(elffile):
section_flags = describe_sh_flags(section["sh_flags"])
section_size = section.data_size
sections[section.name] = {
section_data = {
"name": section.name,
"size": section_size,
"start_addr": section["sh_addr"],
"type": section_type,
"flags": section_flags,
}
sections[section.name] = section_data
sections[section.name]["in_flash"] = env.memusageIsFlashSection(section_data)
sections[section.name]["in_ram"] = env.memusageIsRamSection(section_data)
return sections
@@ -125,7 +106,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
sys.stderr.write("Couldn't find symbol table. Is ELF file stripped?")
env.Exit(1)
sysenv = environ.copy()
sysenv = os.environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
symbol_addrs = []
@@ -136,7 +117,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
symbol_size = s["st_size"]
symbol_type = symbol_info["type"]
if not _is_valid_symbol(s.name, symbol_type, symbol_addr):
if not env.memusageIsValidSymbol(s.name, symbol_type, symbol_addr):
continue
symbol = {
@@ -145,7 +126,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
"name": s.name,
"type": symbol_type,
"size": symbol_size,
"section": _determine_section(sections, symbol_addr),
"section": env.memusageDetermineSection(sections, symbol_addr),
}
if s.name.startswith("_Z"):
@@ -163,8 +144,8 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
if not location or "?" in location:
continue
if IS_WINDOWS:
drive, tail = splitdrive(location)
location = join(drive.upper(), tail)
drive, tail = os.path.splitdrive(location)
location = os.path.join(drive.upper(), tail)
symbol["file"] = location
symbol["line"] = 0
if ":" in location:
@@ -175,31 +156,57 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
return symbols
def _calculate_firmware_size(sections):
def memusageDetermineSection(_, sections, symbol_addr):
for section, info in sections.items():
if not info.get("in_flash", False) and not info.get("in_ram", False):
continue
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
return section
return "unknown"
def memusageIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
def memusageIsRamSection(_, section):
return (
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
and section.get("flags", "") == "WA"
)
def memusageIsFlashSection(_, section):
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
def memusageCalculateFirmwareSize(_, sections):
flash_size = ram_size = 0
for section_info in sections.values():
if _is_flash_section(section_info):
if section_info.get("in_flash", False):
flash_size += section_info.get("size", 0)
if _is_ram_section(section_info):
if section_info.get("in_ram", False):
ram_size += section_info.get("size", 0)
return ram_size, flash_size
def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
data = {"device": {}, "memory": {}, "version": 1}
def DumpMemoryUsage(_, target, source, env): # pylint: disable=unused-argument
result = {"version": 1, "timestamp": int(time.time()), "device": {}, "memory": {}}
board = env.BoardConfig()
if board:
data["device"] = {
result["device"] = {
"mcu": board.get("build.mcu", ""),
"cpu": board.get("build.cpu", ""),
"frequency": board.get("build.f_cpu"),
"flash": int(board.get("upload.maximum_size", 0)),
"ram": int(board.get("upload.maximum_ram_size", 0)),
}
if data["device"]["frequency"] and data["device"]["frequency"].endswith("L"):
data["device"]["frequency"] = int(data["device"]["frequency"][0:-1])
if result["device"]["frequency"] and result["device"]["frequency"].endswith(
"L"
):
result["device"]["frequency"] = int(result["device"]["frequency"][0:-1])
elf_path = env.subst("$PIOMAINPROG")
@@ -210,39 +217,41 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
sys.stderr.write("Elf file doesn't contain DWARF information")
env.Exit(1)
sections = _collect_sections_info(elffile)
firmware_ram, firmware_flash = _calculate_firmware_size(sections)
data["memory"]["total"] = {
sections = _collect_sections_info(env, elffile)
firmware_ram, firmware_flash = env.memusageCalculateFirmwareSize(sections)
result["memory"]["total"] = {
"ram_size": firmware_ram,
"flash_size": firmware_flash,
"sections": sections,
}
result["memory"]["sections"] = sections
files = {}
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
file_path = symbol.get("file") or "unknown"
file_path = symbol.pop("file", "unknown")
if not files.get(file_path, {}):
files[file_path] = {"symbols": [], "ram_size": 0, "flash_size": 0}
symbol_size = symbol.get("size", 0)
section = sections.get(symbol.get("section", ""), {})
if _is_ram_section(section):
if not section:
continue
if section.get("in_ram", False):
files[file_path]["ram_size"] += symbol_size
if _is_flash_section(section):
if section.get("in_flash", False):
files[file_path]["flash_size"] += symbol_size
files[file_path]["symbols"].append(symbol)
data["memory"]["files"] = []
result["memory"]["files"] = []
for k, v in files.items():
file_data = {"path": k}
file_data.update(v)
data["memory"]["files"].append(file_data)
result["memory"]["files"].append(file_data)
with open(
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
) as fp:
fp.write(json.dumps(data))
print(
"Memory usage report has been saved to the following location: "
f"\"{save_report(os.getcwd(), env['PIOENV'], result)}\""
)
def exists(_):
@@ -250,5 +259,10 @@ def exists(_):
def generate(env):
env.AddMethod(DumpSizeData)
env.AddMethod(memusageIsRamSection)
env.AddMethod(memusageIsFlashSection)
env.AddMethod(memusageCalculateFirmwareSize)
env.AddMethod(memusageDetermineSection)
env.AddMethod(memusageIsValidSymbol)
env.AddMethod(DumpMemoryUsage)
return env

View File

@@ -33,9 +33,7 @@ from platformio.project.config import ProjectOptions
@util.memoized()
def _PioPlatform():
env = DefaultEnvironment()
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
p.configure_project_packages(env["PIOENV"], COMMAND_LINE_TARGETS)
return p
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
def PioPlatform(_):
@@ -77,9 +75,11 @@ def LoadPioPlatform(env):
continue
env.PrependENVPath(
"PATH",
os.path.join(pkg.path, "bin")
if os.path.isdir(os.path.join(pkg.path, "bin"))
else pkg.path,
(
os.path.join(pkg.path, "bin")
if os.path.isdir(os.path.join(pkg.path, "bin"))
else pkg.path
),
)
if (
not IS_WINDOWS

View File

@@ -218,12 +218,11 @@ def CheckUploadSize(_, target, source, env):
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
print(output)
# raise error
# if data_max_size and data_size > data_max_size:
# sys.stderr.write(
# "Error: The data size (%d bytes) is greater "
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
# env.Exit(1)
if data_max_size and data_size > data_max_size:
sys.stderr.write(
"Warning! The data size (%d bytes) is greater "
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
)
if program_size > program_max_size:
sys.stderr.write(
"Error: The program size (%d bytes) is greater "

View File

@@ -103,12 +103,23 @@ def cli(
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
)
default_src_filters = [
"+<%s>" % os.path.basename(config.get("platformio", "src_dir")),
"+<%s>" % os.path.basename(config.get("platformio", "include_dir")),
]
default_src_filters = []
for d in (
config.get("platformio", "src_dir"),
config.get("platformio", "include_dir"),
):
try:
default_src_filters.append("+<%s>" % os.path.relpath(d))
except ValueError as exc:
# On Windows if sources are located on a different logical drive
if not json_output and not silent:
click.echo(
"Error: Project cannot be analyzed! The project folder `%s`"
" is located on a different logical drive\n" % d
)
raise exception.ReturnErrorCode(1) from exc
src_filters = (
env_src_filters = (
src_filters
or pattern
or env_options.get(
@@ -120,11 +131,13 @@ def cli(
tool_options = dict(
verbose=verbose,
silent=silent,
src_filters=src_filters,
src_filters=env_src_filters,
flags=flags or env_options.get("check_flags"),
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
if silent
else severity or config.get("env:" + envname, "check_severity"),
severity=(
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
if silent
else severity or config.get("env:" + envname, "check_severity")
),
skip_packages=skip_packages or env_options.get("check_skip_packages"),
platform_packages=env_options.get("platform_packages"),
)
@@ -142,9 +155,11 @@ def cli(
result = {"env": envname, "tool": tool, "duration": time()}
rc = ct.check(
on_defect_callback=None
if (json_output or verbose)
else lambda defect: click.echo(repr(defect))
on_defect_callback=(
None
if (json_output or verbose)
else lambda defect: click.echo(repr(defect))
)
)
result["defects"] = ct.get_defects()

0
platformio/check/tools/base.py Normal file → Executable file
View File

View File

@@ -19,7 +19,8 @@ import subprocess
import click
from platformio import VERSION, __version__, app, exception
from platformio.http import fetch_remote_content
from platformio.dependencies import get_pip_dependencies
from platformio.http import fetch_http_content
from platformio.package.manager.core import update_core_packages
from platformio.proc import get_pythonexe_path
@@ -33,9 +34,14 @@ DEVELOP_INIT_SCRIPT_URL = (
@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version")
@click.option("--dev", is_flag=True, help="Use development branch")
@click.option("--only-dependencies", is_flag=True)
@click.option("--verbose", "-v", is_flag=True)
def cli(dev, verbose):
def cli(dev, only_dependencies, verbose):
if only_dependencies:
return upgrade_pip_dependencies(verbose)
update_core_packages()
if not dev and __version__ == get_latest_version():
return click.secho(
"You're up-to-date!\nPlatformIO %s is currently the "
@@ -50,11 +56,21 @@ def cli(dev, verbose):
pkg_spec = DEVELOP_ZIP_URL if to_develop else "platformio"
try:
# PIO Core
subprocess.run(
[python_exe, "-m", "pip", "install", "--upgrade", pkg_spec],
check=True,
stdout=subprocess.PIPE if not verbose else None,
)
# PyPI dependencies
subprocess.run(
[python_exe, "-m", "platformio", "upgrade", "--only-dependencies"],
check=False,
stdout=subprocess.PIPE,
)
# Check version
output = subprocess.run(
[python_exe, "-m", "platformio", "--version"],
check=True,
@@ -87,9 +103,20 @@ def cli(dev, verbose):
return True
def get_pkg_spec(to_develop):
if to_develop:
return
def upgrade_pip_dependencies(verbose):
subprocess.run(
[
get_pythonexe_path(),
"-m",
"pip",
"install",
"--upgrade",
"pip",
*get_pip_dependencies(),
],
check=True,
stdout=subprocess.PIPE if not verbose else None,
)
def get_latest_version():
@@ -106,7 +133,7 @@ def get_latest_version():
def get_develop_latest_version():
version = None
content = fetch_remote_content(DEVELOP_INIT_SCRIPT_URL)
content = fetch_http_content(DEVELOP_INIT_SCRIPT_URL)
for line in content.split("\n"):
line = line.strip()
if not line.startswith("VERSION"):
@@ -123,5 +150,5 @@ def get_develop_latest_version():
def get_pypi_latest_version():
content = fetch_remote_content(PYPI_JSON_URL)
content = fetch_http_content(PYPI_JSON_URL)
return json.loads(content)["info"]["version"]

View File

@@ -17,6 +17,7 @@
import importlib.util
import inspect
import locale
import os
import shlex
import sys
@@ -41,10 +42,14 @@ else:
if sys.version_info >= (3, 9):
from asyncio import to_thread as aio_to_thread
else:
from starlette.concurrency import run_in_threadpool as aio_to_thread
try:
from starlette.concurrency import run_in_threadpool as aio_to_thread
except ImportError:
pass
PY2 = sys.version_info[0] == 2 # DO NOT REMOVE IT. ESP8266/ESP32 depend on it
PY36 = sys.version_info[0:2] == (3, 6)
IS_CYGWIN = sys.platform.startswith("cygwin")
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
IS_MACOS = sys.platform.startswith("darwin")
@@ -132,3 +137,12 @@ def path_to_unicode(path):
and custom device monitor filters
"""
return path
def is_proxy_set(socks=False):
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
value = os.getenv(var, os.getenv(var.lower()))
if not value or (socks and not value.startswith("socks5://")):
continue
return True
return False

View File

@@ -55,7 +55,7 @@ from platformio.project.options import ProjectOptions
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
@click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
@click.argument("client_extra_args", nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def cli(
ctx,
@@ -65,10 +65,13 @@ def cli(
load_mode,
verbose,
interface,
__unprocessed,
client_extra_args,
):
app.set_session_var("custom_project_conf", project_conf)
if not interface and client_extra_args:
raise click.UsageError("Please specify debugging interface")
# use env variables from Eclipse or CLion
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir):
@@ -92,7 +95,7 @@ def cli(
env_name,
load_mode,
verbose,
__unprocessed,
client_extra_args,
)
if helpers.is_gdbmi_mode():
os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true"
@@ -103,21 +106,19 @@ def cli(
else:
debug_config = _configure(*configure_args)
_run(project_dir, debug_config, __unprocessed)
_run(project_dir, debug_config, client_extra_args)
return None
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed):
platform = PlatformFactory.new(
project_config.get(f"env:{env_name}", "platform"), autoinstall=True
)
def _configure(ctx, project_config, env_name, load_mode, verbose, client_extra_args):
platform = PlatformFactory.from_env(env_name, autoinstall=True)
debug_config = DebugConfigFactory.new(
platform,
project_config,
env_name,
)
if "--version" in __unprocessed:
if "--version" in client_extra_args:
raise ReturnErrorCode(
subprocess.run(
[debug_config.client_executable_path, "--version"], check=True
@@ -163,12 +164,12 @@ def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed)
return debug_config
def _run(project_dir, debug_config, __unprocessed):
def _run(project_dir, debug_config, client_extra_args):
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
asyncio.set_event_loop(loop)
client = GDBClientProcess(project_dir, debug_config)
coro = client.run(__unprocessed)
coro = client.run(client_extra_args)
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
loop.run_until_complete(coro)

View File

@@ -24,7 +24,9 @@ from platformio.project.options import ProjectOptions
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
def __init__(self, platform, project_config, env_name, port=None):
DEFAULT_PORT = None
def __init__(self, platform, project_config, env_name):
self.platform = platform
self.project_config = project_config
self.env_name = env_name
@@ -48,7 +50,6 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
self._load_cmds = None
self._port = None
self.port = port
self.server = self._configure_server()
try:
@@ -120,8 +121,10 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
@property
def port(self):
return (
self.env_options.get("debug_port", self.tool_settings.get("port"))
or self._port
self._port
or self.env_options.get("debug_port")
or self.tool_settings.get("port")
or self.DEFAULT_PORT
)
@port.setter
@@ -145,7 +148,9 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
)
def _load_build_data(self):
data = load_build_metadata(os.getcwd(), self.env_name, cache=True, debug=True)
data = load_build_metadata(
os.getcwd(), self.env_name, cache=True, force_targets=["__debug"]
)
if not data:
raise DebugInvalidOptionsError("Could not load a build configuration")
return data
@@ -191,9 +196,11 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
cwd=server_package_dir if server_package else None,
executable=result.get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
(
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
)
for a in result.get("arguments", [])
],
)

View File

@@ -27,17 +27,13 @@ class DebugConfigFactory:
@classmethod
def new(cls, platform, project_config, env_name):
board_config = platform.board_config(
project_config.get("env:" + env_name, "board")
)
tool_name = (
board_config.get_debug_tool_name(
project_config.get("env:" + env_name, "debug_tool")
)
if board_config
else None
)
board_id = project_config.get("env:" + env_name, "board")
config_cls = None
tool_name = None
if board_id:
tool_name = platform.board_config(
project_config.get("env:" + env_name, "board")
).get_debug_tool_name(project_config.get("env:" + env_name, "debug_tool"))
try:
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
config_cls = getattr(mod, cls.get_clsname(tool_name))

View File

@@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class GenericDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":3333"
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor reset halt
@@ -31,8 +32,3 @@ $LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":3333"
super().__init__(*args, **kwargs)

View File

@@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class JlinkDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":2331"
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor reset
@@ -36,11 +37,6 @@ $LOAD_CMDS
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":2331"
super().__init__(*args, **kwargs)
@property
def server_ready_pattern(self):
return super().server_ready_pattern or ("Waiting for GDB connection")

View File

@@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class MspdebugDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":2000"
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
@@ -29,8 +30,3 @@ $LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":2000"
super().__init__(*args, **kwargs)

View File

@@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class QemuDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":1234"
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor system_reset
@@ -30,8 +31,3 @@ $LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":1234"
super().__init__(*args, **kwargs)

View File

@@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class RenodeDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":3333"
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor machine Reset
@@ -33,11 +34,6 @@ $INIT_BREAK
monitor start
"""
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":3333"
super().__init__(*args, **kwargs)
@property
def server_ready_pattern(self):
return super().server_ready_pattern or (

View File

@@ -62,7 +62,9 @@ class DebugServerProcess(DebugBaseProcess):
openocd_pipe_allowed = all(
[
not self.debug_config.env_options.get("debug_port"),
not self.debug_config.env_options.get(
"debug_port", self.debug_config.tool_settings.get("port")
),
"gdb" in self.debug_config.client_executable_path,
"openocd" in server_executable,
]

View File

@@ -0,0 +1,71 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import platform
from platformio.compat import PY36, is_proxy_set
def get_core_dependencies():
return {
"contrib-piohome": "~3.4.2",
"contrib-pioremote": "~1.0.0",
"tool-scons": "~4.40700.0",
"tool-cppcheck": "~1.21100.0",
"tool-clangtidy": "~1.150005.0",
"tool-pvs-studio": "~7.18.0",
}
def get_pip_dependencies():
core = [
"bottle == 0.12.*",
"click >=8.0.4, <9",
"colorama",
"httpx%s >=0.22.0, <0.28" % ("[socks]" if is_proxy_set(socks=True) else ""),
"marshmallow == 3.*",
"pyelftools >=0.27, <1",
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
"semantic_version == 2.10.*",
"tabulate == 0.*",
]
home = [
# PIO Home requirements
"ajsonrpc == 1.2.*",
"starlette >=0.19, <0.38",
"uvicorn %s" % ("== 0.16.0" if PY36 else ">=0.16, <0.30"),
"wsproto == 1.*",
]
extra = []
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
if platform.system() == "Darwin" and "arm" in platform.machine().lower():
extra.append("chardet>=3.0.2,<6")
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
try:
import ssl # pylint: disable=import-outside-toplevel
if ssl.OPENSSL_VERSION.startswith("OpenSSL ") and ssl.OPENSSL_VERSION_INFO < (
1,
1,
1,
):
extra.append("urllib3<2")
except ImportError:
pass
return core + home + extra

View File

@@ -144,9 +144,9 @@ def list_mdns_services():
if service.properties:
try:
properties = {
k.decode("utf8"): v.decode("utf8")
if isinstance(v, bytes)
else v
k.decode("utf8"): (
v.decode("utf8") if isinstance(v, bytes) else v
)
for k, v in service.properties.items()
}
json.dumps(properties)

View File

@@ -58,7 +58,7 @@ from platformio.project.options import ProjectOptions
"--encoding",
help=(
"Set the encoding for the serial port "
"(e.g. hexlify, Latin1, UTF-8) [default=%s]"
"(e.g. hexlify, Latin-1, UTF-8) [default=%s]"
% ProjectOptions["env.monitor_encoding"].default
),
)
@@ -125,9 +125,11 @@ def device_monitor_cmd(**options):
options = apply_project_monitor_options(options, project_options)
register_filters(platform=platform, options=options)
options["port"] = SerialPortFinder(
board_config=platform.board_config(project_options.get("board"))
if platform and project_options.get("board")
else None,
board_config=(
platform.board_config(project_options.get("board"))
if platform and project_options.get("board")
else None
),
upload_protocol=project_options.get("upload_protocol"),
ensure_ready=True,
).find(initial_port=options["port"])

View File

@@ -25,11 +25,12 @@ from platformio.project.config import ProjectConfig
class DeviceMonitorFilterBase(miniterm.Transform):
def __init__(self, options=None):
"""Called by PlatformIO to pass context"""
miniterm.Transform.__init__(self)
super().__init__()
self.options = options or {}
self.project_dir = self.options.get("project_dir")
self.environment = self.options.get("environment")
self._running_terminal = None
self.config = ProjectConfig.get_instance()
if not self.environment:
@@ -47,6 +48,12 @@ class DeviceMonitorFilterBase(miniterm.Transform):
def NAME(self):
raise NotImplementedError("Please declare NAME attribute for the filter class")
def set_running_terminal(self, terminal):
self._running_terminal = terminal
def get_running_terminal(self):
return self._running_terminal
def register_filters(platform=None, options=None):
# project filters

View File

@@ -24,12 +24,18 @@ class Hexlify(DeviceMonitorFilterBase):
super().__init__(*args, **kwargs)
self._counter = 0
def set_running_terminal(self, terminal):
# force to Latin-1, issue #4732
if terminal.input_encoding == "UTF-8":
terminal.set_rx_encoding("Latin-1")
super().set_running_terminal(terminal)
def rx(self, text):
result = ""
for b in serial.iterbytes(text):
for c in serial.iterbytes(text):
if (self._counter % 16) == 0:
result += "\n{:04X} | ".format(self._counter)
asciicode = ord(b)
asciicode = ord(c)
if asciicode <= 255:
result += "{:02X} ".format(asciicode)
else:

View File

@@ -110,6 +110,12 @@ def new_terminal(options):
term.raw = options["raw"]
term.set_rx_encoding(options["encoding"])
term.set_tx_encoding(options["encoding"])
for ts in (term.tx_transformations, term.rx_transformations):
for t in ts:
try:
t.set_running_terminal(term)
except AttributeError:
pass
return term

View File

@@ -29,15 +29,16 @@ from platformio.compat import IS_WINDOWS
class cd:
def __init__(self, new_path):
self.new_path = new_path
self.prev_path = os.getcwd()
def __init__(self, path):
self.path = path
self._old_cwd = []
def __enter__(self):
os.chdir(self.new_path)
self._old_cwd.append(os.getcwd())
os.chdir(self.path)
def __exit__(self, etype, value, traceback):
os.chdir(self.prev_path)
def __exit__(self, *excinfo):
os.chdir(self._old_cwd.pop())
def get_source_dir():
@@ -210,7 +211,7 @@ def change_filemtime(path, mtime):
def rmtree(path):
def _onerror(func, path, __):
def _onexc(func, path, _):
try:
st_mode = os.stat(path).st_mode
if st_mode & stat.S_IREAD:
@@ -223,4 +224,7 @@ def rmtree(path):
err=True,
)
return shutil.rmtree(path, onerror=_onerror)
# pylint: disable=unexpected-keyword-arg, deprecated-argument
if sys.version_info < (3, 12):
return shutil.rmtree(path, onerror=_onexc)
return shutil.rmtree(path, onexc=_onexc)

View File

@@ -12,19 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from ajsonrpc.core import JSONRPC20DispatchException
from platformio.account.client import AccountClient
from platformio.home.rpc.handlers.base import BaseRPCHandler
class AccountRPC(BaseRPCHandler):
NAMESPACE = "account"
@staticmethod
def call_client(method, *args, **kwargs):
try:
client = AccountClient()
with AccountClient() as client:
return getattr(client, method)(*args, **kwargs)
except Exception as exc: # pylint: disable=bare-except
raise JSONRPC20DispatchException(
code=5000, message="PIO Account Call Error", data=str(exc)
) from exc

View File

@@ -20,6 +20,7 @@ from platformio.project.helpers import is_platformio_project
class AppRPC(BaseRPCHandler):
NAMESPACE = "app"
IGNORE_STORAGE_KEYS = [
"cid",
"coreVersion",

View File

@@ -14,4 +14,6 @@
class BaseRPCHandler:
NAMESPACE = None
factory = None

View File

@@ -0,0 +1,123 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import functools
import os
from platformio import __main__, __version__, app, proc, util
from platformio.compat import (
IS_WINDOWS,
aio_create_task,
aio_get_running_loop,
get_locale_encoding,
shlex_join,
)
from platformio.exception import UserSideException
from platformio.home.rpc.handlers.base import BaseRPCHandler
class PIOCoreCallError(UserSideException):
MESSAGE = 'An error occured while executing PIO Core command: "{0}"\n\n{1}'
class PIOCoreProtocol(asyncio.SubprocessProtocol):
def __init__(self, exit_future, on_data_callback=None):
self.exit_future = exit_future
self.on_data_callback = on_data_callback
self.stdout = ""
self.stderr = ""
self._is_exited = False
self._encoding = get_locale_encoding()
def pipe_data_received(self, fd, data):
data = data.decode(self._encoding, "replace")
pipe = ["stdin", "stdout", "stderr"][fd]
if pipe == "stdout":
self.stdout += data
if pipe == "stderr":
self.stderr += data
if self.on_data_callback:
self.on_data_callback(pipe=pipe, data=data)
def connection_lost(self, exc):
self.process_exited()
def process_exited(self):
if self._is_exited:
return
self.exit_future.set_result(True)
self._is_exited = True
@util.memoized(expire="60s")
def get_core_fullpath():
return proc.where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
class CoreRPC(BaseRPCHandler):
NAMESPACE = "core"
@staticmethod
def version():
return __version__
async def exec(self, args, options=None, raise_exception=True):
options = options or {}
loop = aio_get_running_loop()
exit_future = loop.create_future()
data_callback = functools.partial(
self._on_exec_data_received, exec_options=options
)
if args[0] != "--caller" and app.get_session_var("caller_id"):
args = ["--caller", app.get_session_var("caller_id")] + args
kwargs = options.get("spawn", {})
if "force_ansi" in options:
environ = kwargs.get("env", os.environ.copy())
environ["PLATFORMIO_FORCE_ANSI"] = "true"
kwargs["env"] = environ
transport, protocol = await loop.subprocess_exec(
lambda: PIOCoreProtocol(exit_future, data_callback),
get_core_fullpath(),
*args,
stdin=None,
**kwargs,
)
await exit_future
transport.close()
return_code = transport.get_returncode()
if return_code != 0 and raise_exception:
raise PIOCoreCallError(
shlex_join(["pio"] + args), f"{protocol.stdout}\n{protocol.stderr}"
)
return {
"stdout": protocol.stdout,
"stderr": protocol.stderr,
"returncode": return_code,
}
def _on_exec_data_received(self, exec_options, pipe, data):
notification_method = exec_options.get(f"{pipe}NotificationMethod")
if not notification_method:
return
aio_create_task(
self.factory.notify_clients(
method=notification_method,
params=[data],
actor="frontend",
)
)

View File

@@ -22,6 +22,7 @@ from platformio.home.rpc.handlers.base import BaseRPCHandler
class IDERPC(BaseRPCHandler):
NAMESPACE = "ide"
COMMAND_TIMEOUT = 1.5 # in seconds
def __init__(self):

View File

@@ -0,0 +1,127 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from platformio.home.rpc.handlers.base import BaseRPCHandler
from platformio.project import memusage
class MemUsageRPC(BaseRPCHandler):
NAMESPACE = "memusage"
async def profile(self, project_dir, env, options=None):
options = options or {}
report_dir = memusage.get_report_dir(project_dir, env)
if options.get("lazy"):
existing_reports = memusage.list_reports(report_dir)
if existing_reports:
return existing_reports[-1]
await self.factory.manager.dispatcher["core.exec"](
["run", "-d", project_dir, "-e", env, "-t", "__memusage"],
options=options.get("exec"),
)
return memusage.list_reports(report_dir)[-1]
@staticmethod
def load_report(path):
return memusage.read_report(path)
def summary(self, report_path):
max_top_items = 10
report_dir = os.path.dirname(report_path)
existing_reports = memusage.list_reports(report_dir)
current_report = memusage.read_report(report_path)
previous_report = None
try:
current_index = existing_reports.index(report_path)
if current_index > 0:
previous_report = memusage.read_report(
existing_reports[current_index - 1]
)
except ValueError:
pass
return dict(
timestamp=dict(
current=current_report["timestamp"],
previous=previous_report["timestamp"] if previous_report else None,
),
device=current_report["device"],
trend=dict(
current=current_report["memory"]["total"],
previous=(
previous_report["memory"]["total"] if previous_report else None
),
),
top=dict(
files=self._calculate_top_files(current_report["memory"]["files"])[
0:max_top_items
],
symbols=self._calculate_top_symbols(current_report["memory"]["files"])[
0:max_top_items
],
sections=sorted(
current_report["memory"]["sections"].values(),
key=lambda item: item["size"],
reverse=True,
)[0:max_top_items],
),
)
@staticmethod
def _calculate_top_files(items):
return [
{"path": item["path"], "ram": item["ram_size"], "flash": item["flash_size"]}
for item in sorted(
items,
key=lambda item: item["ram_size"] + item["flash_size"],
reverse=True,
)
]
@staticmethod
def _calculate_top_symbols(files):
symbols = functools.reduce(
lambda result, filex: result
+ [
{
"name": s["name"],
"type": s["type"],
"size": s["size"],
"file": filex["path"],
"line": s.get("line"),
}
for s in filex["symbols"]
],
files,
[],
)
return sorted(symbols, key=lambda item: item["size"], reverse=True)
async def history(self, project_dir, env, nums=10):
result = []
report_dir = memusage.get_report_dir(project_dir, env)
reports = memusage.list_reports(report_dir)[nums * -1 :]
for path in reports:
data = memusage.read_report(path)
result.append(
{
"timestamp": data["timestamp"],
"ram": data["memory"]["total"]["ram_size"],
"flash": data["memory"]["total"]["flash_size"],
}
)
return result

View File

@@ -22,6 +22,8 @@ from platformio.home.rpc.handlers.os import OSRPC
class MiscRPC(BaseRPCHandler):
NAMESPACE = "misc"
async def load_latest_tweets(self, data_url):
cache_key = ContentCache.key_from_args(data_url, "tweets")
cache_valid = "180d"

View File

@@ -15,32 +15,22 @@
import glob
import io
import os
import shutil
from functools import cmp_to_key
import click
from platformio import fs
from platformio.cache import ContentCache
from platformio.compat import aio_to_thread
from platformio.device.list.util import list_logical_devices
from platformio.home.rpc.handlers.base import BaseRPCHandler
from platformio.http import HTTPSession, ensure_internet_on
class HTTPAsyncSession(HTTPSession):
async def request( # pylint: disable=signature-differs,invalid-overridden-method
self, *args, **kwargs
):
func = super().request
return await aio_to_thread(func, *args, **kwargs)
class OSRPC(BaseRPCHandler):
_http_session = None
NAMESPACE = "os"
@classmethod
async def fetch_content(cls, url, data=None, headers=None, cache_valid=None):
def fetch_content(cls, url, data=None, headers=None, cache_valid=None):
if not headers:
headers = {
"User-Agent": (
@@ -52,35 +42,33 @@ class OSRPC(BaseRPCHandler):
cache_key = ContentCache.key_from_args(url, data) if cache_valid else None
with ContentCache() as cc:
if cache_key:
result = cc.get(cache_key)
if result is not None:
return result
content = cc.get(cache_key)
if content is not None:
return content
# check internet before and resolve issue with 60 seconds timeout
ensure_internet_on(raise_exception=True)
if not cls._http_session:
cls._http_session = HTTPAsyncSession()
with HTTPSession() as session:
if data:
response = session.post(url, data=data, headers=headers)
else:
response = session.get(url, headers=headers)
if data:
r = await cls._http_session.post(url, data=data, headers=headers)
else:
r = await cls._http_session.get(url, headers=headers)
response.raise_for_status()
content = response.text
if cache_valid:
with ContentCache() as cc:
cc.set(cache_key, content, cache_valid)
return content
r.raise_for_status()
result = r.text
if cache_valid:
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
return result
async def request_content(self, uri, data=None, headers=None, cache_valid=None):
@classmethod
def request_content(cls, uri, data=None, headers=None, cache_valid=None):
if uri.startswith("http"):
return await self.fetch_content(uri, data, headers, cache_valid)
return cls.fetch_content(uri, data, headers, cache_valid)
local_path = uri[7:] if uri.startswith("file://") else uri
with io.open(local_path, encoding="utf-8") as fp:
return fp.read()
return None
@staticmethod
def open_url(url):
@@ -110,22 +98,10 @@ class OSRPC(BaseRPCHandler):
def is_dir(path):
return os.path.isdir(path)
@staticmethod
def make_dirs(path):
return os.makedirs(path)
@staticmethod
def get_file_mtime(path):
return os.path.getmtime(path)
@staticmethod
def rename(src, dst):
return os.rename(src, dst)
@staticmethod
def copy(src, dst):
return shutil.copytree(src, dst, symlinks=True)
@staticmethod
def glob(pathnames, root=None):
if not isinstance(pathnames, list):

View File

@@ -1,229 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import functools
import io
import json
import os
import sys
import threading
import click
from ajsonrpc.core import JSONRPC20DispatchException
from platformio import __main__, __version__, app, fs, proc, util
from platformio.compat import (
IS_WINDOWS,
aio_create_task,
aio_get_running_loop,
aio_to_thread,
get_locale_encoding,
is_bytes,
)
from platformio.exception import PlatformioException
from platformio.home.rpc.handlers.base import BaseRPCHandler
class PIOCoreProtocol(asyncio.SubprocessProtocol):
def __init__(self, exit_future, on_data_callback=None):
self.exit_future = exit_future
self.on_data_callback = on_data_callback
self.stdout = ""
self.stderr = ""
self._is_exited = False
self._encoding = get_locale_encoding()
def pipe_data_received(self, fd, data):
data = data.decode(self._encoding, "replace")
pipe = ["stdin", "stdout", "stderr"][fd]
if pipe == "stdout":
self.stdout += data
if pipe == "stderr":
self.stderr += data
if self.on_data_callback:
self.on_data_callback(pipe=pipe, data=data)
def connection_lost(self, exc):
self.process_exited()
def process_exited(self):
if self._is_exited:
return
self.exit_future.set_result(True)
self._is_exited = True
class MultiThreadingStdStream:
def __init__(self, parent_stream):
self._buffers = {threading.get_ident(): parent_stream}
def __getattr__(self, name):
thread_id = threading.get_ident()
self._ensure_thread_buffer(thread_id)
return getattr(self._buffers[thread_id], name)
def _ensure_thread_buffer(self, thread_id):
if thread_id not in self._buffers:
self._buffers[thread_id] = io.StringIO()
def write(self, value):
thread_id = threading.get_ident()
self._ensure_thread_buffer(thread_id)
return self._buffers[thread_id].write(
value.decode() if is_bytes(value) else value
)
def get_value_and_reset(self):
result = ""
try:
result = self.getvalue()
self.seek(0)
self.truncate(0)
except AttributeError:
pass
return result
@util.memoized(expire="60s")
def get_core_fullpath():
return proc.where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
class PIOCoreRPC(BaseRPCHandler):
@staticmethod
def version():
return __version__
async def exec(self, args, options=None):
loop = aio_get_running_loop()
exit_future = loop.create_future()
data_callback = functools.partial(
self._on_exec_data_received, exec_options=options
)
if args[0] != "--caller" and app.get_session_var("caller_id"):
args = ["--caller", app.get_session_var("caller_id")] + args
transport, protocol = await loop.subprocess_exec(
lambda: PIOCoreProtocol(exit_future, data_callback),
get_core_fullpath(),
*args,
stdin=None,
**options.get("spawn", {}),
)
await exit_future
transport.close()
return {
"stdout": protocol.stdout,
"stderr": protocol.stderr,
"returncode": transport.get_returncode(),
}
def _on_exec_data_received(self, exec_options, pipe, data):
notification_method = exec_options.get(f"{pipe}NotificationMethod")
if not notification_method:
return
aio_create_task(
self.factory.notify_clients(
method=notification_method,
params=[data],
actor="frontend",
)
)
@staticmethod
def setup_multithreading_std_streams():
if isinstance(sys.stdout, MultiThreadingStdStream):
return
PIOCoreRPC.thread_stdout = MultiThreadingStdStream(sys.stdout)
PIOCoreRPC.thread_stderr = MultiThreadingStdStream(sys.stderr)
sys.stdout = PIOCoreRPC.thread_stdout
sys.stderr = PIOCoreRPC.thread_stderr
@staticmethod
async def call(args, options=None):
for i, arg in enumerate(args):
if not isinstance(arg, str):
args[i] = str(arg)
options = options or {}
to_json = "--json-output" in args
try:
if options.get("force_subprocess"):
result = await PIOCoreRPC._call_subprocess(args, options)
return PIOCoreRPC._process_result(result, to_json)
result = await PIOCoreRPC._call_inline(args, options)
try:
return PIOCoreRPC._process_result(result, to_json)
except ValueError:
# fall-back to subprocess method
result = await PIOCoreRPC._call_subprocess(args, options)
return PIOCoreRPC._process_result(result, to_json)
except Exception as exc: # pylint: disable=bare-except
raise JSONRPC20DispatchException(
code=5000, message="PIO Core Call Error", data=str(exc)
) from exc
@staticmethod
async def _call_subprocess(args, options):
result = await aio_to_thread(
proc.exec_command,
[get_core_fullpath()] + args,
cwd=options.get("cwd") or os.getcwd(),
)
return (result["out"], result["err"], result["returncode"])
@staticmethod
async def _call_inline(args, options):
PIOCoreRPC.setup_multithreading_std_streams()
def _thread_safe_call(args, cwd):
with fs.cd(cwd):
exit_code = __main__.main(["-c"] + args)
return (
PIOCoreRPC.thread_stdout.get_value_and_reset(),
PIOCoreRPC.thread_stderr.get_value_and_reset(),
exit_code,
)
return await aio_to_thread(
_thread_safe_call, args=args, cwd=options.get("cwd") or os.getcwd()
)
@staticmethod
def _process_result(result, to_json=False):
out, err, code = result
if out and is_bytes(out):
out = out.decode(get_locale_encoding())
if err and is_bytes(err):
err = err.decode(get_locale_encoding())
text = ("%s\n\n%s" % (out, err)).strip()
if code != 0:
raise PlatformioException(text)
if not to_json:
return text
try:
return json.loads(out)
except ValueError as exc:
click.secho("%s => `%s`" % (exc, out), fg="red", err=True)
# if PIO Core prints unhandled warnings
for line in out.split("\n"):
line = line.strip()
if not line:
continue
try:
return json.loads(line)
except ValueError:
pass
raise exc

View File

@@ -14,8 +14,8 @@
import os.path
from platformio.compat import aio_to_thread
from platformio.home.rpc.handlers.base import BaseRPCHandler
from platformio.home.rpc.handlers.registry import RegistryRPC
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.package.meta import PackageSpec
@@ -23,15 +23,13 @@ from platformio.platform.factory import PlatformFactory
class PlatformRPC(BaseRPCHandler):
async def fetch_platforms(self, search_query=None, page=0, force_installed=False):
if force_installed:
return {
"items": await aio_to_thread(
self._load_installed_platforms, search_query
)
}
NAMESPACE = "platform"
search_result = await self.factory.manager.dispatcher["registry.call_client"](
def fetch_platforms(self, search_query=None, page=0, force_installed=False):
if force_installed:
return {"items": self._load_installed_platforms(search_query)}
search_result = RegistryRPC.call_client(
method="list_packages",
query=search_query,
qualifiers={
@@ -88,17 +86,17 @@ class PlatformRPC(BaseRPCHandler):
)
return items
async def fetch_boards(self, platform_spec):
def fetch_boards(self, platform_spec):
spec = PackageSpec(platform_spec)
if spec.owner:
return await self.factory.manager.dispatcher["registry.call_client"](
return RegistryRPC.call_client(
method="get_package",
typex="platform",
owner=spec.owner,
name=spec.name,
extra_path="/boards",
)
return await aio_to_thread(self._load_installed_boards, spec)
return self._load_installed_boards(spec)
@staticmethod
def _load_installed_boards(platform_spec):
@@ -108,17 +106,17 @@ class PlatformRPC(BaseRPCHandler):
key=lambda item: item["name"],
)
async def fetch_examples(self, platform_spec):
def fetch_examples(self, platform_spec):
spec = PackageSpec(platform_spec)
if spec.owner:
return await self.factory.manager.dispatcher["registry.call_client"](
return RegistryRPC.call_client(
method="get_package",
typex="platform",
owner=spec.owner,
name=spec.name,
extra_path="/examples",
)
return await aio_to_thread(self._load_installed_examples, spec)
return self._load_installed_examples(spec)
@staticmethod
def _load_installed_examples(platform_spec):

View File

@@ -13,279 +13,50 @@
# limitations under the License.
import os
import shutil
import time
from pathlib import Path
import semantic_version
from ajsonrpc.core import JSONRPC20DispatchException
from platformio import app, exception, fs
from platformio.home.rpc.handlers.app import AppRPC
from platformio import app, fs
from platformio.home.rpc.handlers.base import BaseRPCHandler
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.package.manager.platform import PlatformPackageManager
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectError
from platformio.project.helpers import get_project_dir, is_platformio_project
from platformio.project.helpers import get_project_dir
from platformio.project.integration.generator import ProjectGenerator
from platformio.project.options import get_config_options_schema
class ProjectRPC(BaseRPCHandler):
NAMESPACE = "project"
@staticmethod
def config_call(init_kwargs, method, *args):
async def config_call(init_kwargs, method, *args):
assert isinstance(init_kwargs, dict)
assert "path" in init_kwargs
project_dir = get_project_dir()
if os.path.isfile(init_kwargs["path"]):
if os.path.isdir(init_kwargs["path"]):
project_dir = init_kwargs["path"]
init_kwargs["path"] = os.path.join(init_kwargs["path"], "platformio.ini")
elif os.path.isfile(init_kwargs["path"]):
project_dir = os.path.dirname(init_kwargs["path"])
else:
project_dir = get_project_dir()
with fs.cd(project_dir):
return getattr(ProjectConfig(**init_kwargs), method)(*args)
@staticmethod
def config_load(path):
return ProjectConfig(
path, parse_extra=False, expand_interpolations=False
).as_tuple()
@staticmethod
def config_dump(path, data):
config = ProjectConfig(path, parse_extra=False, expand_interpolations=False)
config.update(data, clear=True)
return config.save()
@staticmethod
def config_update_description(path, text):
config = ProjectConfig(path, parse_extra=False, expand_interpolations=False)
if not config.has_section("platformio"):
config.add_section("platformio")
if text:
config.set("platformio", "description", text)
else:
if config.has_option("platformio", "description"):
config.remove_option("platformio", "description")
if not config.options("platformio"):
config.remove_section("platformio")
return config.save()
@staticmethod
def get_config_schema():
return get_config_options_schema()
@staticmethod
def get_projects():
def _get_project_data():
data = {"boards": [], "envLibdepsDirs": [], "libExtraDirs": []}
config = ProjectConfig()
data["envs"] = config.envs()
data["description"] = config.get("platformio", "description")
data["libExtraDirs"].extend(config.get("platformio", "lib_extra_dirs", []))
libdeps_dir = config.get("platformio", "libdeps_dir")
for section in config.sections():
if not section.startswith("env:"):
continue
data["envLibdepsDirs"].append(os.path.join(libdeps_dir, section[4:]))
if config.has_option(section, "board"):
data["boards"].append(config.get(section, "board"))
data["libExtraDirs"].extend(config.get(section, "lib_extra_dirs", []))
# skip non existing folders and resolve full path
for key in ("envLibdepsDirs", "libExtraDirs"):
data[key] = [
fs.expanduser(d) if d.startswith("~") else os.path.abspath(d)
for d in data[key]
if os.path.isdir(d)
]
return data
def _path_to_name(path):
return (os.path.sep).join(path.split(os.path.sep)[-2:])
result = []
pm = PlatformPackageManager()
for project_dir in AppRPC.load_state()["storage"]["recentProjects"]:
if not os.path.isdir(project_dir):
continue
data = {}
boards = []
try:
with fs.cd(project_dir):
data = _get_project_data()
except ProjectError:
continue
for board_id in data.get("boards", []):
name = board_id
try:
name = pm.board_config(board_id)["name"]
except exception.PlatformioException:
pass
boards.append({"id": board_id, "name": name})
result.append(
{
"path": project_dir,
"name": _path_to_name(project_dir),
"modified": int(os.path.getmtime(project_dir)),
"boards": boards,
"description": data.get("description"),
"envs": data.get("envs", []),
"envLibStorages": [
{"name": os.path.basename(d), "path": d}
for d in data.get("envLibdepsDirs", [])
],
"extraLibStorages": [
{"name": _path_to_name(d), "path": d}
for d in data.get("libExtraDirs", [])
],
}
)
return result
@staticmethod
def get_project_examples():
result = []
pm = PlatformPackageManager()
for pkg in pm.get_installed():
examples_dir = os.path.join(pkg.path, "examples")
if not os.path.isdir(examples_dir):
continue
items = []
for project_dir, _, __ in os.walk(examples_dir):
project_description = None
try:
config = ProjectConfig(os.path.join(project_dir, "platformio.ini"))
config.validate(silent=True)
project_description = config.get("platformio", "description")
except ProjectError:
continue
path_tokens = project_dir.split(os.path.sep)
items.append(
{
"name": "/".join(
path_tokens[path_tokens.index("examples") + 1 :]
),
"path": project_dir,
"description": project_description,
}
)
manifest = pm.load_manifest(pkg)
result.append(
{
"platform": {
"title": manifest["title"],
"version": manifest["version"],
},
"items": sorted(items, key=lambda item: item["name"]),
}
)
return sorted(result, key=lambda data: data["platform"]["title"])
async def init(self, board, framework, project_dir):
assert project_dir
if not os.path.isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board, "--sample-code"]
if framework:
args.extend(["--project-option", "framework = %s" % framework])
ide = app.get_session_var("caller_id")
if ide in ProjectGenerator.get_supported_ides():
args.extend(["--ide", ide])
await PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
return project_dir
@staticmethod
async def import_arduino(board, use_arduino_libs, arduino_project_dir):
board = str(board)
# don't import PIO Project
if is_platformio_project(arduino_project_dir):
return arduino_project_dir
is_arduino_project = any(
os.path.isfile(
os.path.join(
arduino_project_dir,
"%s.%s" % (os.path.basename(arduino_project_dir), ext),
)
)
for ext in ("ino", "pde")
)
if not is_arduino_project:
raise JSONRPC20DispatchException(
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
)
state = AppRPC.load_state()
project_dir = os.path.join(
state["storage"]["projectsDir"], time.strftime("%y%m%d-%H%M%S-") + board
)
if not os.path.isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board]
args.extend(["--project-option", "framework = arduino"])
if use_arduino_libs:
args.extend(
["--project-option", "lib_extra_dirs = ~/Documents/Arduino/libraries"]
)
ide = app.get_session_var("caller_id")
if ide in ProjectGenerator.get_supported_ides():
args.extend(["--ide", ide])
await PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get("platformio", "src_dir")
if os.path.isdir(src_dir):
fs.rmtree(src_dir)
shutil.copytree(arduino_project_dir, src_dir, symlinks=True)
return project_dir
@staticmethod
async def import_pio(project_dir):
if not project_dir or not is_platformio_project(project_dir):
raise JSONRPC20DispatchException(
code=4001, message="Not an PlatformIO project: %s" % project_dir
)
new_project_dir = os.path.join(
AppRPC.load_state()["storage"]["projectsDir"],
time.strftime("%y%m%d-%H%M%S-") + os.path.basename(project_dir),
)
shutil.copytree(project_dir, new_project_dir, symlinks=True)
args = ["init"]
ide = app.get_session_var("caller_id")
if ide in ProjectGenerator.get_supported_ides():
args.extend(["--ide", ide])
await PIOCoreRPC.call(
args, options={"cwd": new_project_dir, "force_subprocess": True}
)
return new_project_dir
async def init_v2(self, configuration, options=None):
async def init(self, configuration, options=None):
project_dir = os.path.join(configuration["location"], configuration["name"])
if not os.path.isdir(project_dir):
os.makedirs(project_dir)
envclone = os.environ.copy()
envclone["PLATFORMIO_FORCE_ANSI"] = "true"
options = options or {}
options["spawn"] = {"env": envclone, "cwd": project_dir}
args = ["project", "init"]
args = ["project", "init", "-d", project_dir]
ide = app.get_session_var("caller_id")
if ide in ProjectGenerator.get_supported_ides():
args.extend(["--ide", ide])
exec_options = options.get("exec", {})
if configuration.get("example"):
await self.factory.notify_clients(
method=options.get("stdoutNotificationMethod"),
method=exec_options.get("stdoutNotificationMethod"),
params=["Copying example files...\n"],
actor="frontend",
)
@@ -293,7 +64,9 @@ class ProjectRPC(BaseRPCHandler):
else:
args.extend(self._pre_init_empty(configuration))
return await self.factory.manager.dispatcher["core.exec"](args, options=options)
return await self.factory.manager.dispatcher["core.exec"](
args, options=exec_options, raise_exception=False
)
@staticmethod
def _pre_init_empty(configuration):
@@ -335,3 +108,51 @@ class ProjectRPC(BaseRPCHandler):
encoding="utf-8",
)
return []
@staticmethod
async def configuration(project_dir, env):
with fs.cd(project_dir):
config = ProjectConfig.get_instance()
config.validate(envs=[env])
platform = PlatformFactory.from_env(env, autoinstall=True)
platform_pkg = PlatformPackageManager().get_package(platform.get_dir())
board_id = config.get(f"env:{env}", "board", None)
# frameworks
frameworks = []
for name in config.get(f"env:{env}", "framework", []):
if name not in platform.frameworks:
continue
f_pkg_name = platform.frameworks[name].get("package")
if not f_pkg_name:
continue
f_pkg = platform.get_package(f_pkg_name)
if not f_pkg:
continue
f_manifest = platform.pm.load_manifest(f_pkg)
frameworks.append(
dict(
name=name,
title=f_manifest.get("title"),
version=str(f_pkg.metadata.version),
)
)
return dict(
platform=dict(
ownername=(
platform_pkg.metadata.spec.owner
if platform_pkg.metadata.spec
else None
),
name=platform.name,
title=platform.title,
version=str(platform_pkg.metadata.version),
),
board=(
platform.board_config(board_id).get_brief_data()
if board_id
else None
),
frameworks=frameworks or None,
)

View File

@@ -12,20 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from ajsonrpc.core import JSONRPC20DispatchException
from platformio.compat import aio_to_thread
from platformio.home.rpc.handlers.base import BaseRPCHandler
from platformio.registry.client import RegistryClient
class RegistryRPC(BaseRPCHandler):
NAMESPACE = "registry"
@staticmethod
async def call_client(method, *args, **kwargs):
try:
client = RegistryClient()
return await aio_to_thread(getattr(client, method), *args, **kwargs)
except Exception as exc: # pylint: disable=bare-except
raise JSONRPC20DispatchException(
code=5000, message="Registry Call Error", data=str(exc)
) from exc
def call_client(method, *args, **kwargs):
with RegistryClient() as client:
return getattr(client, method)(*args, **kwargs)

View File

@@ -12,22 +12,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import inspect
from urllib.parse import parse_qs
import ajsonrpc.utils
import ajsonrpc.manager
import click
from ajsonrpc.core import JSONRPC20Error, JSONRPC20Request
from ajsonrpc.dispatcher import Dispatcher
from ajsonrpc.manager import AsyncJSONRPCResponseManager, JSONRPC20Response
from starlette.endpoints import WebSocketEndpoint
from platformio.compat import aio_create_task, aio_get_running_loop
from platformio.compat import aio_create_task, aio_get_running_loop, aio_to_thread
from platformio.http import InternetConnectionError
from platformio.proc import force_exit
# Remove this line when PR is merged
# https://github.com/pavlov99/ajsonrpc/pull/22
ajsonrpc.utils.is_invalid_params = lambda: False
ajsonrpc.manager.is_invalid_params = lambda *args, **kwargs: False
class JSONRPCServerFactoryBase:
@@ -44,9 +46,18 @@ class JSONRPCServerFactoryBase:
def __call__(self, *args, **kwargs):
raise NotImplementedError
def add_object_handler(self, handler, namespace):
handler.factory = self
self.manager.dispatcher.add_object(handler, prefix="%s." % namespace)
def add_object_handler(self, obj):
obj.factory = self
namespace = obj.NAMESPACE or obj.__class__.__name__
for name in dir(obj):
method = getattr(obj, name)
if name.startswith("_") or not (
inspect.ismethod(method) or inspect.isfunction(method)
):
continue
if not inspect.iscoroutinefunction(method):
method = functools.partial(aio_to_thread, method)
self.manager.dispatcher.add_function(method, name=f"{namespace}.{name}")
def on_client_connect(self, connection, actor=None):
self._clients[connection] = {"actor": actor}

View File

@@ -28,10 +28,11 @@ from platformio.compat import aio_get_running_loop
from platformio.exception import PlatformioException
from platformio.home.rpc.handlers.account import AccountRPC
from platformio.home.rpc.handlers.app import AppRPC
from platformio.home.rpc.handlers.core import CoreRPC
from platformio.home.rpc.handlers.ide import IDERPC
from platformio.home.rpc.handlers.memusage import MemUsageRPC
from platformio.home.rpc.handlers.misc import MiscRPC
from platformio.home.rpc.handlers.os import OSRPC
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.home.rpc.handlers.platform import PlatformRPC
from platformio.home.rpc.handlers.project import ProjectRPC
from platformio.home.rpc.handlers.registry import RegistryRPC
@@ -67,15 +68,16 @@ def run_server(host, port, no_open, shutdown_timeout, home_url):
raise PlatformioException("Invalid path to PIO Home Contrib")
ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)
ws_rpc_factory.add_object_handler(AccountRPC(), namespace="account")
ws_rpc_factory.add_object_handler(AppRPC(), namespace="app")
ws_rpc_factory.add_object_handler(IDERPC(), namespace="ide")
ws_rpc_factory.add_object_handler(MiscRPC(), namespace="misc")
ws_rpc_factory.add_object_handler(OSRPC(), namespace="os")
ws_rpc_factory.add_object_handler(PIOCoreRPC(), namespace="core")
ws_rpc_factory.add_object_handler(ProjectRPC(), namespace="project")
ws_rpc_factory.add_object_handler(PlatformRPC(), namespace="platform")
ws_rpc_factory.add_object_handler(RegistryRPC(), namespace="registry")
ws_rpc_factory.add_object_handler(AccountRPC())
ws_rpc_factory.add_object_handler(AppRPC())
ws_rpc_factory.add_object_handler(IDERPC())
ws_rpc_factory.add_object_handler(MemUsageRPC())
ws_rpc_factory.add_object_handler(MiscRPC())
ws_rpc_factory.add_object_handler(OSRPC())
ws_rpc_factory.add_object_handler(CoreRPC())
ws_rpc_factory.add_object_handler(ProjectRPC())
ws_rpc_factory.add_object_handler(PlatformRPC())
ws_rpc_factory.add_object_handler(RegistryRPC())
path = urlparse(home_url).path
routes = [

View File

@@ -12,22 +12,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import itertools
import json
import os
import socket
from urllib.parse import urljoin
import time
import requests.adapters
from urllib3.util.retry import Retry
import httpx
from platformio import __check_internet_hosts__, app, util
from platformio.cache import ContentCache, cleanup_content_cache
from platformio.compat import is_proxy_set
from platformio.exception import PlatformioException, UserSideException
__default_requests_timeout__ = (10, None) # (connect, read)
RETRIES_BACKOFF_FACTOR = 2 # 0s, 2s, 4s, 8s, etc.
RETRIES_METHOD_WHITELIST = ["GET"]
RETRIES_STATUS_FORCELIST = [429, 500, 502, 503, 504]
class HTTPClientError(UserSideException):
class HttpClientApiError(UserSideException):
def __init__(self, message, response=None):
super().__init__()
self.message = message
@@ -40,84 +43,138 @@ class HTTPClientError(UserSideException):
class InternetConnectionError(UserSideException):
MESSAGE = (
"You are not connected to the Internet.\n"
"PlatformIO needs the Internet connection to"
" download dependent packages or to work with PlatformIO Account."
"PlatformIO needs the Internet connection to "
"download dependent packages or to work with PlatformIO Account."
)
class HTTPSession(requests.Session):
def __init__(self, *args, **kwargs):
self._x_base_url = kwargs.pop("x_base_url") if "x_base_url" in kwargs else None
super().__init__(*args, **kwargs)
self.headers.update({"User-Agent": app.get_user_agent()})
try:
self.verify = app.get_setting("enable_proxy_strict_ssl")
except PlatformioException:
self.verify = True
def exponential_backoff(factor):
yield 0
for n in itertools.count(2):
yield factor * (2 ** (n - 2))
def request( # pylint: disable=signature-differs,arguments-differ
self, method, url, *args, **kwargs
def apply_default_kwargs(kwargs=None):
kwargs = kwargs or {}
# enable redirects by default
kwargs["follow_redirects"] = kwargs.get("follow_redirects", True)
try:
kwargs["verify"] = kwargs.get(
"verify", app.get_setting("enable_proxy_strict_ssl")
)
except PlatformioException:
kwargs["verify"] = True
headers = kwargs.pop("headers", {})
if "User-Agent" not in headers:
headers.update({"User-Agent": app.get_user_agent()})
kwargs["headers"] = headers
retry = kwargs.pop("retry", None)
if retry:
kwargs["transport"] = HTTPRetryTransport(verify=kwargs["verify"], **retry)
return kwargs
class HTTPRetryTransport(httpx.HTTPTransport):
def __init__( # pylint: disable=too-many-arguments
self,
verify=True,
retries=1,
backoff_factor=None,
status_forcelist=None,
method_whitelist=None,
):
# print("HTTPSession::request", self._x_base_url, method, url, args, kwargs)
if "timeout" not in kwargs:
kwargs["timeout"] = __default_requests_timeout__
return super().request(
method,
url
if url.startswith("http") or not self._x_base_url
else urljoin(self._x_base_url, url),
*args,
**kwargs
super().__init__(verify=verify)
self._retries = retries
self._backoff_factor = backoff_factor or RETRIES_BACKOFF_FACTOR
self._status_forcelist = status_forcelist or RETRIES_STATUS_FORCELIST
self._method_whitelist = method_whitelist or RETRIES_METHOD_WHITELIST
def handle_request(self, request):
retries_left = self._retries
delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR)
while retries_left > 0:
retries_left -= 1
try:
response = super().handle_request(request)
if response.status_code in RETRIES_STATUS_FORCELIST:
if request.method.upper() not in self._method_whitelist:
return response
raise httpx.HTTPStatusError(
f"Server error '{response.status_code} {response.reason_phrase}' "
f"for url '{request.url}'\n",
request=request,
response=response,
)
return response
except httpx.HTTPError:
if retries_left == 0:
raise
time.sleep(next(delays) or 1)
raise httpx.RequestError(
f"Could not process '{request.url}' request", request=request
)
class HTTPSessionIterator:
def __init__(self, endpoints):
class HTTPSession(httpx.Client):
def __init__(self, *args, **kwargs):
super().__init__(*args, **apply_default_kwargs(kwargs))
class HttpEndpointPool:
def __init__(self, endpoints, session_retry=None):
if not isinstance(endpoints, list):
endpoints = [endpoints]
self.endpoints = endpoints
self.endpoints_iter = iter(endpoints)
# https://urllib3.readthedocs.io/en/stable/reference/urllib3.util.html
self.retry = Retry(
total=5,
backoff_factor=1, # [0, 2, 4, 8, 16] secs
# method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
status_forcelist=[413, 429, 500, 502, 503, 504],
)
self.session_retry = session_retry
def __iter__(self): # pylint: disable=non-iterator-returned
return self
def __next__(self):
base_url = next(self.endpoints_iter)
session = HTTPSession(x_base_url=base_url)
adapter = requests.adapters.HTTPAdapter(max_retries=self.retry)
session.mount(base_url, adapter)
return session
class HTTPClient:
def __init__(self, endpoints):
self._session_iter = HTTPSessionIterator(endpoints)
self._session = None
self._next_session()
def __del__(self):
if not self._session:
return
try:
self._session.close()
except: # pylint: disable=bare-except
pass
self._endpoints_iter = iter(endpoints)
self._session = None
def _next_session(self):
self.next()
def close(self):
if self._session:
self._session.close()
self._session = next(self._session_iter)
def next(self):
if self._session:
self._session.close()
self._session = HTTPSession(
base_url=next(self._endpoints_iter), retry=self.session_retry
)
def request(self, method, *args, **kwargs):
while True:
try:
return self._session.request(method, *args, **kwargs)
except httpx.HTTPError as exc:
try:
self.next()
except StopIteration as exc2:
raise exc from exc2
class HttpApiClient(contextlib.AbstractContextManager):
def __init__(self, endpoints):
self._endpoint = HttpEndpointPool(endpoints, session_retry=dict(retries=5))
def __exit__(self, *excinfo):
self.close()
def __del__(self):
self.close()
def close(self):
if getattr(self, "_endpoint"):
self._endpoint.close()
@util.throttle(500)
def send_request(self, method, path, **kwargs):
def send_request(self, method, *args, **kwargs):
# check Internet before and resolve issue with 60 seconds timeout
ensure_internet_on(raise_exception=True)
@@ -131,23 +188,28 @@ class HTTPClient:
# pylint: disable=import-outside-toplevel
from platformio.account.client import AccountClient
headers["Authorization"] = (
"Bearer %s" % AccountClient().fetch_authentication_token()
)
with AccountClient() as client:
headers["Authorization"] = (
"Bearer %s" % client.fetch_authentication_token()
)
kwargs["headers"] = headers
while True:
try:
return getattr(self._session, method)(path, **kwargs)
except requests.exceptions.RequestException as exc:
try:
self._next_session()
except Exception as exc2:
raise HTTPClientError(str(exc2)) from exc
try:
return self._endpoint.request(method, *args, **kwargs)
except httpx.HTTPError as exc:
raise HttpClientApiError(str(exc)) from exc
def fetch_json_data(self, method, path, **kwargs):
if method not in ("get", "head", "options"):
cleanup_content_cache("http")
# remove empty params
if kwargs.get("params"):
kwargs["params"] = {
key: value
for key, value in kwargs.get("params").items()
if value is not None
}
cache_valid = kwargs.pop("x_cache_valid") if "x_cache_valid" in kwargs else None
if not cache_valid:
return self._parse_json_response(self.send_request(method, path, **kwargs))
@@ -177,7 +239,7 @@ class HTTPClient:
message = response.json()["message"]
except (KeyError, ValueError):
message = response.text
raise HTTPClientError(message, response)
raise HttpClientApiError(message, response)
#
@@ -191,10 +253,8 @@ def _internet_on():
socket.setdefaulttimeout(timeout)
for host in __check_internet_hosts__:
try:
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
if not os.getenv(var) and not os.getenv(var.lower()):
continue
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
if is_proxy_set():
httpx.get("http://%s" % host, follow_redirects=False, timeout=timeout)
return True
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
# to all possible addresses (IPv4 and IPv6) in turn until a connection succeeds:
@@ -213,9 +273,8 @@ def ensure_internet_on(raise_exception=False):
return result
def fetch_remote_content(*args, **kwargs):
with HTTPSession() as s:
r = s.get(*args, **kwargs)
r.raise_for_status()
r.close()
return r.text
def fetch_http_content(*args, **kwargs):
with HTTPSession() as session:
response = session.get(*args, **kwargs)
response.raise_for_status()
return response.text

View File

@@ -23,7 +23,11 @@ from platformio import __version__, app, exception, fs, telemetry
from platformio.cache import cleanup_content_cache
from platformio.cli import PlatformioCLI
from platformio.commands.upgrade import get_latest_version
from platformio.http import HTTPClientError, InternetConnectionError, ensure_internet_on
from platformio.http import (
HttpClientApiError,
InternetConnectionError,
ensure_internet_on,
)
from platformio.package.manager.core import update_core_packages
from platformio.package.version import pepver_to_semver
from platformio.system.prune import calculate_unnecessary_system_data
@@ -46,7 +50,7 @@ def on_cmd_end():
check_platformio_upgrade()
check_prune_system()
except (
HTTPClientError,
HttpClientApiError,
InternetConnectionError,
exception.GetLatestVersionError,
):

View File

@@ -20,7 +20,7 @@ import click
from platformio.compat import IS_MACOS, IS_WINDOWS
from platformio.exception import ReturnErrorCode, UserSideException
from platformio.package.manager.tool import ToolPackageManager
from platformio.proc import get_pythonexe_path
from platformio.proc import get_pythonexe_path, where_is_program
@click.command("exec", short_help="Run command from package tool")
@@ -52,9 +52,13 @@ def package_exec_cmd(obj, package, call, args):
inject_pkg_to_environ(pkg)
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
# inject current python interpreter on Windows
if IS_WINDOWS and args and args[0].endswith(".py"):
if args[0].endswith(".py"):
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
if not os.path.exists(args[1]):
args[1] = where_is_program(args[1])
result = None
try:
run_options = dict(shell=call is not None, env=os.environ)

View File

@@ -20,6 +20,7 @@ import click
from platformio import fs
from platformio.package.exception import UnknownPackageError
from platformio.package.manager.core import get_core_package_dir
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manager.tool import ToolPackageManager
@@ -120,7 +121,7 @@ def install_project_env_dependencies(project_env, options=None):
# custom tools
if options.get("tools"):
installed_conds.append(_install_project_env_custom_tools(project_env, options))
# custom ibraries
# custom libraries
if options.get("libraries"):
installed_conds.append(
_install_project_env_custom_libraries(project_env, options)
@@ -152,6 +153,8 @@ def _install_project_env_platform(project_env, options):
skip_dependencies=options.get("skip_dependencies"),
force=options.get("force"),
)
# ensure SCons is installed
get_core_package_dir("tool-scons")
return not already_up_to_date
@@ -206,7 +209,7 @@ def _install_project_env_libraries(project_env, options):
config = ProjectConfig.get_instance()
compatibility_qualifiers = {}
if config.get(f"env:{project_env}", "platform"):
if config.get(f"env:{project_env}", "platform", None):
try:
p = PlatformFactory.new(config.get(f"env:{project_env}", "platform"))
compatibility_qualifiers["platforms"] = [p.name]
@@ -219,9 +222,11 @@ def _install_project_env_libraries(project_env, options):
env_lm = LibraryPackageManager(
os.path.join(config.get("platformio", "libdeps_dir"), project_env),
compatibility=PackageCompatibility(**compatibility_qualifiers)
if compatibility_qualifiers
else None,
compatibility=(
PackageCompatibility(**compatibility_qualifiers)
if compatibility_qualifiers
else None
),
)
private_lm = LibraryPackageManager(
os.path.join(config.get("platformio", "lib_dir"))

View File

@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
from typing import List
import click
@@ -21,12 +21,13 @@ from platformio import fs
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manager.tool import ToolPackageManager
from platformio.package.meta import PackageItem, PackageSpec
from platformio.package.meta import PackageInfo, PackageItem, PackageSpec
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
@click.command("list", short_help="List installed packages")
@click.command("list", short_help="List project packages")
@click.option(
"-d",
"--project-dir",
@@ -47,79 +48,116 @@ from platformio.project.config import ProjectConfig
@click.option("--only-platforms", is_flag=True, help="List only platform packages")
@click.option("--only-tools", is_flag=True, help="List only tool packages")
@click.option("--only-libraries", is_flag=True, help="List only library packages")
@click.option("--json-output", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
def package_list_cmd(**options):
if options.get("global"):
data = (
list_global_packages(options)
if options.get("global")
else list_project_packages(options)
)
if options.get("json_output"):
return click.echo(_dump_to_json(data, options))
def _print_items(typex, items):
click.secho(typex.capitalize(), bold=True)
print_dependency_tree(items, verbose=options.get("verbose"))
click.echo()
if options.get("global"):
for typex, items in data.items():
_print_items(typex, items)
else:
list_project_packages(options)
for env, env_data in data.items():
click.echo("Resolving %s dependencies..." % click.style(env, fg="cyan"))
for typex, items in env_data.items():
_print_items(typex, items)
return None
def humanize_package(pkg, spec=None, verbose=False):
if spec and not isinstance(spec, PackageSpec):
spec = PackageSpec(spec)
data = [
click.style(pkg.metadata.name, fg="cyan"),
click.style(f"@ {str(pkg.metadata.version)}", bold=True),
]
extra_data = ["required: %s" % (spec.humanize() if spec else "Any")]
if verbose:
extra_data.append(pkg.path)
data.append("(%s)" % ", ".join(extra_data))
return " ".join(data)
def _dump_to_json(data, options):
result = {}
if options.get("global"):
for typex, items in data.items():
result[typex] = [info.as_dict(with_manifest=True) for info in items]
else:
for env, env_data in data.items():
result[env] = {}
for typex, items in env_data.items():
result[env][typex] = [
info.as_dict(with_manifest=True) for info in items
]
return json.dumps(result)
def print_dependency_tree(pm, specs=None, filter_specs=None, level=0, verbose=False):
def build_package_info(pm, specs=None, filter_specs=None, resolve_dependencies=True):
filtered_pkgs = [
pm.get_package(spec) for spec in filter_specs or [] if pm.get_package(spec)
pm.get_package(spec) for spec in filter_specs if pm.get_package(spec)
]
candidates = {}
candidates = []
if specs:
for spec in specs:
pkg = pm.get_package(spec)
if not pkg:
continue
candidates[pkg.path] = (pkg, spec)
candidates.append(
PackageInfo(
spec if isinstance(spec, PackageSpec) else PackageSpec(spec),
pm.get_package(spec),
)
)
else:
candidates = {pkg.path: (pkg, pkg.metadata.spec) for pkg in pm.get_installed()}
candidates = [PackageInfo(pkg.metadata.spec, pkg) for pkg in pm.get_installed()]
if not candidates:
return
candidates = sorted(candidates.values(), key=lambda item: item[0].metadata.name)
return []
for index, (pkg, spec) in enumerate(candidates):
if filtered_pkgs and not _pkg_tree_contains(pm, pkg, filtered_pkgs):
continue
printed_pkgs = pm.memcache_get("__printed_pkgs", [])
if printed_pkgs and pkg.path in printed_pkgs:
continue
printed_pkgs.append(pkg.path)
pm.memcache_set("__printed_pkgs", printed_pkgs)
candidates = sorted(
candidates,
key=lambda info: info.item.metadata.name if info.item else info.spec.humanize(),
)
click.echo(
"%s%s %s"
% (
"" * level,
"├──" if index < len(candidates) - 1 else "└──",
humanize_package(
pkg,
spec=spec,
verbose=verbose,
result = []
for info in candidates:
if filter_specs and (
not info.item or not _pkg_tree_contains(pm, info.item, filtered_pkgs)
):
continue
if not info.item:
if not info.spec.external and not info.spec.owner: # built-in library?
continue
result.append(info)
continue
visited_pkgs = pm.memcache_get("__visited_pkgs", [])
if visited_pkgs and info.item.path in visited_pkgs:
continue
visited_pkgs.append(info.item.path)
pm.memcache_set("__visited_pkgs", visited_pkgs)
result.append(
PackageInfo(
info.spec,
info.item,
(
build_package_info(
pm,
specs=[
pm.dependency_to_spec(item)
for item in pm.get_pkg_dependencies(info.item)
],
filter_specs=filter_specs,
resolve_dependencies=True,
)
if resolve_dependencies and pm.get_pkg_dependencies(info.item)
else []
),
)
)
dependencies = pm.get_pkg_dependencies(pkg)
if dependencies:
print_dependency_tree(
pm,
specs=[pm.dependency_to_spec(item) for item in dependencies],
filter_specs=filter_specs,
level=level + 1,
verbose=verbose,
)
return result
def _pkg_tree_contains(pm, root: PackageItem, children: List[PackageItem]):
def _pkg_tree_contains(pm, root: PackageItem, children: list[PackageItem]):
if root in children:
return True
for dependency in pm.get_pkg_dependencies(root) or []:
@@ -138,6 +176,7 @@ def list_global_packages(options):
only_packages = any(
options.get(typex) or options.get(f"only_{typex}") for (typex, _) in data
)
result = {}
for typex, pm in data:
skip_conds = [
only_packages
@@ -147,82 +186,115 @@ def list_global_packages(options):
]
if any(skip_conds):
continue
click.secho(typex.capitalize(), bold=True)
print_dependency_tree(
pm, filter_specs=options.get(typex), verbose=options.get("verbose")
)
click.echo()
result[typex] = build_package_info(pm, filter_specs=options.get(typex))
return result
def list_project_packages(options):
environments = options["environments"]
only_packages = any(
only_filtered_packages = any(
options.get(typex) or options.get(f"only_{typex}")
for typex in ("platforms", "tools", "libraries")
)
only_platform_packages = any(
options.get(typex) or options.get(f"only_{typex}")
for typex in ("platforms", "tools")
)
only_platform_package = options.get("platforms") or options.get("only_platforms")
only_tool_packages = options.get("tools") or options.get("only_tools")
only_library_packages = options.get("libraries") or options.get("only_libraries")
result = {}
with fs.cd(options["project_dir"]):
config = ProjectConfig.get_instance()
config.validate(environments)
for env in config.envs():
if environments and env not in environments:
continue
click.echo("Resolving %s dependencies..." % click.style(env, fg="cyan"))
found = False
if not only_packages or only_platform_packages:
_found = print_project_env_platform_packages(env, options)
found = found or _found
if not only_packages or only_library_packages:
_found = print_project_env_library_packages(env, options)
found = found or _found
if not found:
click.echo("No packages")
if (not environments and len(config.envs()) > 1) or len(environments) > 1:
click.echo()
result[env] = {}
if not only_filtered_packages or only_platform_package:
result[env]["platforms"] = list_project_env_platform_package(
env, options
)
if not only_filtered_packages or only_tool_packages:
result[env]["tools"] = list_project_env_tool_packages(env, options)
if not only_filtered_packages or only_library_packages:
result[env]["libraries"] = list_project_env_library_packages(
env, options
)
return result
def print_project_env_platform_packages(project_env, options):
config = ProjectConfig.get_instance()
platform = config.get(f"env:{project_env}", "platform")
if not platform:
return None
pkg = PlatformPackageManager().get_package(platform)
if not pkg:
return None
click.echo(
"Platform %s"
% (humanize_package(pkg, platform, verbose=options.get("verbose")))
def list_project_env_platform_package(project_env, options):
pm = PlatformPackageManager()
return build_package_info(
pm,
specs=[PackageSpec(pm.config.get(f"env:{project_env}", "platform"))],
filter_specs=options.get("platforms"),
resolve_dependencies=False,
)
p = PlatformFactory.new(pkg)
if project_env:
p.configure_project_packages(project_env)
print_dependency_tree(
def list_project_env_tool_packages(project_env, options):
try:
p = PlatformFactory.from_env(project_env, targets=["upload"])
except UnknownPlatform:
return []
return build_package_info(
p.pm,
specs=[p.get_package_spec(name) for name in p.packages],
specs=[
p.get_package_spec(name)
for name, options in p.packages.items()
if not options.get("optional")
],
filter_specs=options.get("tools"),
)
click.echo()
return True
def print_project_env_library_packages(project_env, options):
def list_project_env_library_packages(project_env, options):
config = ProjectConfig.get_instance()
lib_deps = config.get(f"env:{project_env}", "lib_deps")
lm = LibraryPackageManager(
os.path.join(config.get("platformio", "libdeps_dir"), project_env)
)
if not lib_deps or not lm.get_installed():
return None
click.echo("Libraries")
print_dependency_tree(
return build_package_info(
lm,
lib_deps,
filter_specs=options.get("libraries"),
verbose=options.get("verbose"),
)
return True
def humanize_package(info, verbose=False):
data = (
[
click.style(info.item.metadata.name, fg="cyan"),
click.style(f"@ {str(info.item.metadata.version)}", bold=True),
]
if info.item
else ["Not installed"]
)
extra_data = ["required: %s" % (info.spec.humanize() if info.spec else "Any")]
if verbose and info.item:
extra_data.append(info.item.path)
data.append("(%s)" % ", ".join(extra_data))
return " ".join(data)
def print_dependency_tree(items, verbose=False, level=0):
for index, info in enumerate(items):
click.echo(
"%s%s %s"
% (
"" * level,
"├──" if index < len(items) - 1 else "└──",
humanize_package(
info,
verbose=verbose,
),
)
)
if info.dependencies:
print_dependency_tree(
info.dependencies,
verbose=verbose,
level=level + 1,
)

View File

@@ -62,10 +62,9 @@ class OutdatedCandidate:
)
@click.option("-e", "--environment", "environments", multiple=True)
def package_outdated_cmd(project_dir, environments):
candidates = fetch_outdated_candidates(
project_dir, environments, with_progress=True
)
print_outdated_candidates(candidates)
with fs.cd(project_dir):
candidates = fetch_outdated_candidates(environments, with_progress=True)
print_outdated_candidates(candidates)
def print_outdated_candidates(candidates):
@@ -126,8 +125,10 @@ def get_candidate_update_color(outdated):
return None
def fetch_outdated_candidates(project_dir, environments, with_progress=False):
def fetch_outdated_candidates(environments, with_progress=False):
candidates = []
config = ProjectConfig.get_instance()
config.validate(environments)
def _add_candidate(data):
new_candidate = OutdatedCandidate(
@@ -139,20 +140,16 @@ def fetch_outdated_candidates(project_dir, environments, with_progress=False):
return
candidates.append(new_candidate)
with fs.cd(project_dir):
config = ProjectConfig.get_instance()
config.validate(environments)
# platforms
for item in find_platform_candidates(config, environments):
_add_candidate(item)
# platform package dependencies
for dep_item in find_platform_dependency_candidates(item["env"]):
_add_candidate(dep_item)
# platforms
for item in find_platform_candidates(config, environments):
_add_candidate(item)
# platform package dependencies
for dep_item in find_platform_dependency_candidates(item):
_add_candidate(dep_item)
# libraries
for item in find_library_candidates(config, environments):
_add_candidate(item)
# libraries
for item in find_library_candidates(config, environments):
_add_candidate(item)
result = []
if not with_progress:
@@ -172,7 +169,7 @@ def find_platform_candidates(config, environments):
result = []
pm = PlatformPackageManager()
for env in config.envs():
platform = config.get(f"env:{env}", "platform")
platform = config.get(f"env:{env}", "platform", None)
if not platform or (environments and env not in environments):
continue
spec = PackageSpec(platform)
@@ -183,14 +180,13 @@ def find_platform_candidates(config, environments):
return result
def find_platform_dependency_candidates(platform_candidate):
def find_platform_dependency_candidates(env):
result = []
p = PlatformFactory.new(platform_candidate["spec"])
p.configure_project_packages(platform_candidate["env"])
p = PlatformFactory.from_env(env)
for pkg in p.get_installed_packages():
result.append(
dict(
env=platform_candidate["env"],
env=env,
pm=p.pm,
pkg=pkg,
spec=p.get_package_spec(pkg.metadata.name),

View File

@@ -86,8 +86,10 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
package, owner, typex, released_at, private, notify, no_interactive, non_interactive
):
click.secho("Preparing a package...", fg="cyan")
package = os.path.abspath(package)
no_interactive = no_interactive or non_interactive
owner = owner or AccountClient().get_logged_username()
with AccountClient() as client:
owner = owner or client.get_logged_username()
do_not_pack = (
not os.path.isdir(package)
and isinstance(FileUnpacker.new_archiver(package), TARArchiver)
@@ -145,9 +147,10 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
fg="yellow",
)
click.echo("Publishing...")
response = RegistryClient().publish_package(
owner, typex, archive_path, released_at, private, notify
)
with RegistryClient() as client:
response = client.publish_package(
owner, typex, archive_path, released_at, private, notify
)
if not do_not_pack:
os.remove(archive_path)
click.secho(response.get("message"), fg="green")

View File

@@ -29,8 +29,8 @@ from platformio.registry.client import RegistryClient
type=click.Choice(["relevance", "popularity", "trending", "added", "updated"]),
)
def package_search_cmd(query, page, sort):
client = RegistryClient()
result = client.list_packages(query, page=page, sort=sort)
with RegistryClient() as client:
result = client.list_packages(query, page=page, sort=sort)
if not result["total"]:
click.secho("Nothing has been found by your request", fg="yellow")
click.echo(
@@ -65,10 +65,12 @@ def print_search_item(item):
click.echo(
"%s%s • Published on %s"
% (
item["type"].capitalize()
if item["tier"] == "community"
else click.style(
("%s %s" % (item["tier"], item["type"])).title(), bold=True
(
item["type"].capitalize()
if item["tier"] == "community"
else click.style(
("%s %s" % (item["tier"], item["type"])).title(), bold=True
)
),
item["version"]["name"],
util.parse_datetime(item["version"]["released_at"]).strftime("%c"),

View File

@@ -124,31 +124,31 @@ def package_show_cmd(spec, pkg_type):
def fetch_package_data(spec, pkg_type=None):
assert isinstance(spec, PackageSpec)
client = RegistryClient()
if pkg_type and spec.owner and spec.name:
with RegistryClient() as client:
if pkg_type and spec.owner and spec.name:
return client.get_package(
pkg_type, spec.owner, spec.name, version=spec.requirements
)
qualifiers = {}
if spec.id:
qualifiers["ids"] = str(spec.id)
if spec.name:
qualifiers["names"] = spec.name.lower()
if pkg_type:
qualifiers["types"] = pkg_type
if spec.owner:
qualifiers["owners"] = spec.owner.lower()
packages = client.list_packages(qualifiers=qualifiers)["items"]
if not packages:
return None
if len(packages) > 1:
PackageManagerRegistryMixin.print_multi_package_issue(
click.echo, packages, spec
)
return None
return client.get_package(
pkg_type, spec.owner, spec.name, version=spec.requirements
packages[0]["type"],
packages[0]["owner"]["username"],
packages[0]["name"],
version=spec.requirements,
)
qualifiers = {}
if spec.id:
qualifiers["ids"] = str(spec.id)
if spec.name:
qualifiers["names"] = spec.name.lower()
if pkg_type:
qualifiers["types"] = pkg_type
if spec.owner:
qualifiers["owners"] = spec.owner.lower()
packages = client.list_packages(qualifiers=qualifiers)["items"]
if not packages:
return None
if len(packages) > 1:
PackageManagerRegistryMixin.print_multi_package_issue(
click.echo, packages, spec
)
return None
return client.get_package(
packages[0]["type"],
packages[0]["owner"]["username"],
packages[0]["name"],
version=spec.requirements,
)

View File

@@ -36,11 +36,14 @@ from platformio.registry.client import RegistryClient
)
def package_unpublish_cmd(package, type, undo): # pylint: disable=redefined-builtin
spec = PackageSpec(package)
response = RegistryClient().unpublish_package(
owner=spec.owner or AccountClient().get_logged_username(),
type=type,
name=spec.name,
version=str(spec.requirements),
undo=undo,
)
click.secho(response.get("message"), fg="green")
with AccountClient() as client:
owner = spec.owner or client.get_logged_username()
with RegistryClient() as client:
response = client.unpublish_package(
owner=owner,
type=type,
name=spec.name,
version=str(spec.requirements),
undo=undo,
)
click.secho(response.get("message"), fg="green")

View File

@@ -12,48 +12,28 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import tempfile
import time
from email.utils import parsedate
from os.path import getsize, join
from time import mktime
from urllib.parse import urlparse
import click
import httpx
from platformio import fs
from platformio.compat import is_terminal
from platformio.http import HTTPSession
from platformio.http import apply_default_kwargs
from platformio.package.exception import PackageException
class FileDownloader:
def __init__(self, url, dest_dir=None):
self._http_session = HTTPSession()
self._http_response = None
# make connection
self._http_response = self._http_session.get(
url,
stream=True,
)
if self._http_response.status_code != 200:
raise PackageException(
"Got the unrecognized status code '{0}' when downloaded {1}".format(
self._http_response.status_code, url
)
)
def __init__(self, url, dst_dir=None):
self.url = url
self.dst_dir = dst_dir
disposition = self._http_response.headers.get("content-disposition")
if disposition and "filename=" in disposition:
self._fname = (
disposition[disposition.index("filename=") + 9 :]
.replace('"', "")
.replace("'", "")
)
else:
self._fname = [p for p in url.split("/") if p][-1]
self._fname = str(self._fname)
self._destination = self._fname
if dest_dir:
self.set_destination(join(dest_dir, self._fname))
self._destination = None
self._http_response = None
def set_destination(self, destination):
self._destination = destination
@@ -69,18 +49,34 @@ class FileDownloader:
return -1
return int(self._http_response.headers["content-length"])
def get_disposition_filname(self):
disposition = self._http_response.headers.get("content-disposition")
if disposition and "filename=" in disposition:
return (
disposition[disposition.index("filename=") + 9 :]
.replace('"', "")
.replace("'", "")
)
return [p for p in urlparse(self.url).path.split("/") if p][-1]
def start(self, with_progress=True, silent=False):
label = "Downloading"
file_size = self.get_size()
itercontent = self._http_response.iter_content(
chunk_size=io.DEFAULT_BUFFER_SIZE
)
try:
with httpx.stream("GET", self.url, **apply_default_kwargs()) as response:
if response.status_code != 200:
raise PackageException(
f"Got the unrecognized status code '{response.status_code}' "
"when downloading '{self.url}'"
)
self._http_response = response
total_size = self.get_size()
if not self._destination:
assert self.dst_dir
with open(self._destination, "wb") as fp:
if file_size == -1 or not with_progress or silent:
if total_size == -1 or not with_progress or silent:
if not silent:
click.echo(f"{label}...")
for chunk in itercontent:
for chunk in response.iter_bytes():
fp.write(chunk)
elif not is_terminal():
@@ -88,10 +84,10 @@ class FileDownloader:
print_percent_step = 10
printed_percents = 0
downloaded_size = 0
for chunk in itercontent:
for chunk in response.iter_bytes():
fp.write(chunk)
downloaded_size += len(chunk)
if (downloaded_size / file_size * 100) >= (
if (downloaded_size / total_size * 100) >= (
printed_percents + print_percent_step
):
printed_percents += print_percent_step
@@ -100,33 +96,39 @@ class FileDownloader:
else:
with click.progressbar(
length=file_size,
iterable=itercontent,
length=total_size,
iterable=response.iter_bytes(),
label=label,
update_min_steps=min(
256 * 1024, file_size / 100
256 * 1024, total_size / 100
), # every 256Kb or less
) as pb:
for chunk in pb:
pb.update(len(chunk))
fp.write(chunk)
finally:
self._http_response.close()
self._http_session.close()
if self.get_lmtime():
self._preserve_filemtime(self.get_lmtime())
last_modified = self.get_lmtime()
if last_modified:
self._preserve_filemtime(last_modified)
return True
def _set_tmp_destination(self):
dst_dir = self.dst_dir or tempfile.mkdtemp()
self.set_destination(os.path.join(dst_dir, self.get_disposition_filname()))
def _preserve_filemtime(self, lmdate):
lmtime = time.mktime(parsedate(lmdate))
fs.change_filemtime(self._destination, lmtime)
def verify(self, checksum=None):
_dlsize = getsize(self._destination)
if self.get_size() != -1 and _dlsize != self.get_size():
remote_size = self.get_size()
downloaded_size = os.path.getsize(self._destination)
if remote_size not in (-1, downloaded_size):
raise PackageException(
(
"The size ({0:d} bytes) of downloaded file '{1}' "
"is not equal to remote size ({2:d} bytes)"
).format(_dlsize, self._fname, self.get_size())
f"The size ({downloaded_size} bytes) of downloaded file "
f"'{self._destination}' is not equal to remote size "
f"({remote_size} bytes)"
)
if not checksum:
return True
@@ -142,7 +144,7 @@ class FileDownloader:
if not hash_algo:
raise PackageException(
"Could not determine checksum algorithm by %s" % checksum
f"Could not determine checksum algorithm by {checksum}"
)
dl_checksum = fs.calculate_file_hashsum(hash_algo, self._destination)
@@ -150,16 +152,7 @@ class FileDownloader:
raise PackageException(
"The checksum '{0}' of the downloaded file '{1}' "
"does not match to the remote '{2}'".format(
dl_checksum, self._fname, checksum
dl_checksum, self._destination, checksum
)
)
return True
def _preserve_filemtime(self, lmdate):
lmtime = mktime(parsedate(lmdate))
fs.change_filemtime(self._destination, lmtime)
def __del__(self):
self._http_session.close()
if self._http_response:
self._http_response.close()

View File

@@ -98,9 +98,13 @@ class PackageManagerInstallMixin:
else:
pkg = self.install_from_registry(
spec,
search_qualifiers=compatibility.to_search_qualifiers()
if compatibility
else None,
search_qualifiers=(
compatibility.to_search_qualifiers(
["platforms", "frameworks", "authors"]
)
if compatibility
else None
),
)
if not pkg or not pkg.metadata:

View File

@@ -15,6 +15,7 @@
import time
import click
import httpx
from platformio.package.exception import UnknownPackageError
from platformio.package.meta import PackageSpec
@@ -57,7 +58,7 @@ class PackageManagerRegistryMixin:
),
checksum or pkgfile["checksum"]["sha256"],
)
except Exception as exc: # pylint: disable=broad-except
except httpx.HTTPError as exc:
self.log.warning(
click.style("Warning! Package Mirror: %s" % exc, fg="yellow")
)

View File

@@ -35,7 +35,7 @@ from platformio.package.manager._update import PackageManagerUpdateMixin
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.package.meta import (
PackageItem,
PackageMetaData,
PackageMetadata,
PackageSpec,
PackageType,
)
@@ -199,7 +199,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
def build_metadata(self, pkg_dir, spec, vcs_revision=None):
manifest = self.load_manifest(pkg_dir)
metadata = PackageMetaData(
metadata = PackageMetadata(
type=self.pkg_type,
name=manifest.get("name"),
version=manifest.get("version"),
@@ -280,11 +280,15 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
# external "URL" mismatch
if spec.external:
# local folder mismatch
if os.path.abspath(spec.uri) == os.path.abspath(pkg.path) or (
# local/symlinked folder mismatch
check_conds = [
os.path.abspath(spec.uri) == os.path.abspath(pkg.path),
spec.uri.startswith("file://")
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:])
):
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:]),
spec.uri.startswith("symlink://")
and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[10:]),
]
if any(check_conds):
return True
if spec.uri != pkg.metadata.spec.uri:
return False

View File

@@ -14,7 +14,8 @@
import os
from platformio import __core_packages__, exception
from platformio import exception
from platformio.dependencies import get_core_dependencies
from platformio.package.exception import UnknownPackageError
from platformio.package.manager.tool import ToolPackageManager
from platformio.package.meta import PackageSpec
@@ -23,7 +24,7 @@ from platformio.package.meta import PackageSpec
def get_installed_core_packages():
result = []
pm = ToolPackageManager()
for name, requirements in __core_packages__.items():
for name, requirements in get_core_dependencies().items():
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
pkg = pm.get_package(spec)
if pkg:
@@ -32,11 +33,11 @@ def get_installed_core_packages():
def get_core_package_dir(name, spec=None, auto_install=True):
if name not in __core_packages__:
if name not in get_core_dependencies():
raise exception.PlatformioException("Please upgrade PlatformIO Core")
pm = ToolPackageManager()
spec = spec or PackageSpec(
owner="platformio", name=name, requirements=__core_packages__[name]
owner="platformio", name=name, requirements=get_core_dependencies()[name]
)
pkg = pm.get_package(spec)
if pkg:
@@ -50,7 +51,7 @@ def get_core_package_dir(name, spec=None, auto_install=True):
def update_core_packages():
pm = ToolPackageManager()
for name, requirements in __core_packages__.items():
for name, requirements in get_core_dependencies().items():
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
try:
pm.update(spec, spec)
@@ -65,7 +66,7 @@ def remove_unnecessary_core_packages(dry_run=False):
pm = ToolPackageManager()
best_pkg_versions = {}
for name, requirements in __core_packages__.items():
for name, requirements in get_core_dependencies().items():
spec = PackageSpec(owner="platformio", name=name, requirements=requirements)
pkg = pm.get_package(spec)
if not pkg:

View File

@@ -15,7 +15,7 @@
import os
from platformio import util
from platformio.http import HTTPClientError, InternetConnectionError
from platformio.http import HttpClientApiError, InternetConnectionError
from platformio.package.exception import UnknownPackageError
from platformio.package.manager.base import BasePackageManager
from platformio.package.manager.core import get_installed_core_packages
@@ -128,7 +128,7 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an
key = "%s:%s" % (board["platform"], board["id"])
if key not in know_boards:
boards.append(board)
except (HTTPClientError, InternetConnectionError):
except (HttpClientApiError, InternetConnectionError):
pass
return sorted(boards, key=lambda b: b["name"])

View File

@@ -22,7 +22,7 @@ from urllib.parse import urlparse
from platformio import util
from platformio.compat import get_object_members, string_types
from platformio.http import fetch_remote_content
from platformio.http import fetch_http_content
from platformio.package.exception import ManifestParserError, UnknownManifestError
from platformio.project.helpers import is_platformio_project
@@ -103,7 +103,7 @@ class ManifestParserFactory:
@staticmethod
def new_from_url(remote_url):
content = fetch_remote_content(remote_url)
content = fetch_http_content(remote_url)
return ManifestParserFactory.new(
content,
ManifestFileType.from_uri(remote_url) or ManifestFileType.LIBRARY_JSON,
@@ -294,9 +294,11 @@ class BaseManifestParser:
if not matched_files:
continue
result[root] = dict(
name="Examples"
if root == examples_dir
else os.path.relpath(root, examples_dir),
name=(
"Examples"
if root == examples_dir
else os.path.relpath(root, examples_dir)
),
base=os.path.relpath(root, package_dir),
files=matched_files,
)
@@ -540,6 +542,8 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
"esp32": "espressif32",
"arc32": "intel_arc32",
"stm32": "ststm32",
"nrf52": "nordicnrf52",
"rp2040": "raspberrypi",
}
for arch in properties.get("architectures", "").split(","):
if "particle-" in arch:

View File

@@ -17,12 +17,12 @@
import json
import re
import httpx
import marshmallow
import requests
import semantic_version
from marshmallow import Schema, ValidationError, fields, validate, validates
from platformio.http import fetch_remote_content
from platformio.http import fetch_http_content
from platformio.package.exception import ManifestValidationError
from platformio.util import memoized
@@ -252,7 +252,7 @@ class ManifestSchema(BaseSchema):
def validate_license(self, value):
try:
spdx = self.load_spdx_licenses()
except requests.exceptions.RequestException as exc:
except httpx.HTTPError as exc:
raise ValidationError(
"Could not load SPDX licenses for validation"
) from exc
@@ -276,9 +276,9 @@ class ManifestSchema(BaseSchema):
@staticmethod
@memoized(expire="1h")
def load_spdx_licenses():
version = "3.21"
version = "3.23"
spdx_data_url = (
"https://raw.githubusercontent.com/spdx/license-list-data/"
f"v{version}/json/licenses.json"
)
return json.loads(fetch_remote_content(spdx_data_url))
return json.loads(fetch_http_content(spdx_data_url))

View File

@@ -23,7 +23,7 @@ import semantic_version
from platformio import fs
from platformio.compat import get_object_members, hashlib_encode_data, string_types
from platformio.package.manifest.parser import ManifestFileType
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
from platformio.package.version import SemanticVersionError, cast_version_to_semver
from platformio.util import items_in_list
@@ -65,7 +65,14 @@ class PackageType:
class PackageCompatibility:
KNOWN_QUALIFIERS = ("platforms", "frameworks", "authors")
KNOWN_QUALIFIERS = (
"owner",
"name",
"version",
"platforms",
"frameworks",
"authors",
)
@classmethod
def from_dependency(cls, dependency):
@@ -89,19 +96,45 @@ class PackageCompatibility:
def __repr__(self):
return "PackageCompatibility <%s>" % self.qualifiers
def to_search_qualifiers(self):
return self.qualifiers
def to_search_qualifiers(self, fields=None):
result = {}
for name, value in self.qualifiers.items():
if not fields or name in fields:
result[name] = value
return result
def is_compatible(self, other):
assert isinstance(other, PackageCompatibility)
for key, value in self.qualifiers.items():
for key, current_value in self.qualifiers.items():
other_value = other.qualifiers.get(key)
if not value or not other_value:
if not current_value or not other_value:
continue
if not items_in_list(value, other_value):
if any(isinstance(v, list) for v in (current_value, other_value)):
if not items_in_list(current_value, other_value):
return False
continue
if key == "version":
if not self._compare_versions(current_value, other_value):
return False
continue
if current_value != other_value:
return False
return True
def _compare_versions(self, current, other):
if current == other:
return True
try:
version = (
other
if isinstance(other, semantic_version.Version)
else cast_version_to_semver(other)
)
return version in semantic_version.SimpleSpec(current)
except ValueError:
pass
return False
class PackageOutdatedResult:
UPDATE_INCREMENT_MAJOR = "major"
@@ -401,7 +434,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
return name
class PackageMetaData:
class PackageMetadata:
def __init__( # pylint: disable=redefined-builtin
self, type, name, version, spec=None
):
@@ -416,7 +449,7 @@ class PackageMetaData:
def __repr__(self):
return (
"PackageMetaData <type={type} name={name} version={version} "
"PackageMetadata <type={type} name={name} version={version} "
"spec={spec}".format(**self.as_dict())
)
@@ -466,7 +499,7 @@ class PackageMetaData:
data["spec"]["uri"] = data["spec"]["url"]
del data["spec"]["url"]
data["spec"] = PackageSpec(**data["spec"])
return PackageMetaData(**data)
return PackageMetadata(**data)
class PackageItem:
@@ -485,9 +518,11 @@ class PackageItem:
def __eq__(self, other):
conds = [
os.path.realpath(self.path) == os.path.realpath(other.path)
if self.path and other.path
else self.path == other.path,
(
os.path.realpath(self.path) == os.path.realpath(other.path)
if self.path and other.path
else self.path == other.path
),
self.metadata == other.metadata,
]
return all(conds)
@@ -515,7 +550,7 @@ class PackageItem:
for location in self.get_metafile_locations():
manifest_path = os.path.join(location, self.METAFILE_NAME)
if os.path.isfile(manifest_path):
return PackageMetaData.load(manifest_path)
return PackageMetadata.load(manifest_path)
return None
def dump_meta(self):
@@ -526,3 +561,29 @@ class PackageItem:
break
assert location
return self.metadata.dump(os.path.join(location, self.METAFILE_NAME))
def as_dict(self):
return {"path": self.path, "metadata": self.metadata.as_dict()}
class PackageInfo:
def __init__(self, spec: PackageSpec, item: PackageItem = None, dependencies=None):
assert isinstance(spec, PackageSpec)
self.spec = spec
self.item = item
self.dependencies = dependencies or []
def as_dict(self, with_manifest=False):
result = {
"spec": self.spec.as_dict(),
"item": self.item.as_dict() if self.item else None,
"dependencies": [d.as_dict() for d in self.dependencies],
}
if with_manifest:
result["manifest"] = (
ManifestParserFactory.new_from_dir(self.item.path).as_dict()
if self.item
else None
)
return result

View File

@@ -13,6 +13,7 @@
# limitations under the License.
import os
import sys
from tarfile import open as tarfile_open
from time import mktime
from zipfile import ZipFile
@@ -82,19 +83,23 @@ class TARArchiver(BaseArchiver):
).startswith(base)
def extract_item(self, item, dest_dir):
if sys.version_info >= (3, 12):
self._afo.extract(item, dest_dir, filter="data")
return self.after_extract(item, dest_dir)
# apply custom security logic
dest_dir = self.resolve_path(dest_dir)
bad_conds = [
self.is_bad_path(item.name, dest_dir),
self.is_link(item) and self.is_bad_link(item, dest_dir),
]
if not any(bad_conds):
super().extract_item(item, dest_dir)
else:
click.secho(
if any(bad_conds):
return click.secho(
"Blocked insecure item `%s` from TAR archive" % item.name,
fg="red",
err=True,
)
return super().extract_item(item, dest_dir)
class ZIPArchiver(BaseArchiver):

View File

@@ -51,13 +51,10 @@ class PlatformRunMixin:
assert isinstance(targets, list)
self.ensure_engine_compatible()
self.configure_project_packages(variables["pioenv"], targets)
self.silent = silent
self.verbose = verbose or app.get_setting("force_verbose")
variables["platform_manifest"] = self.manifest_path
if "build_script" not in variables:
variables["build_script"] = self.get_build_script()
if not os.path.isfile(variables["build_script"]):
@@ -119,9 +116,9 @@ class PlatformRunMixin:
args,
stdout=proc.BuildAsyncPipe(
line_callback=self._on_stdout_line,
data_callback=lambda data: None
if self.silent
else _write_and_flush(sys.stdout, data),
data_callback=lambda data: (
None if self.silent else _write_and_flush(sys.stdout, data)
),
),
stderr=proc.BuildAsyncPipe(
line_callback=self._on_stderr_line,

View File

@@ -34,6 +34,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
def __init__(self, manifest_path):
self.manifest_path = manifest_path
self.project_env = None # set by factory.from_env(env)
self.silent = False
self.verbose = False
@@ -168,6 +169,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
def board_config(self, id_):
assert id_
return self.get_boards(id_)
def get_package_type(self, name):

View File

@@ -16,11 +16,15 @@ import os
import re
import sys
import httpx
from platformio import fs
from platformio.compat import load_python_module
from platformio.package.meta import PackageItem
from platformio.platform import base
from platformio.platform.exception import UnknownPlatform
from platformio.project.config import ProjectConfig
from platformio.project.exception import UndefinedEnvPlatformError
class PlatformFactory:
@@ -29,13 +33,16 @@ class PlatformFactory:
name = re.sub(r"[^\da-z\_]+", "", name, flags=re.I)
return "%sPlatform" % name.lower().capitalize()
@staticmethod
def load_platform_module(name, path):
@classmethod
def load_platform_module(cls, name, path):
# backward compatibiility with the legacy dev-platforms
sys.modules["platformio.managers.platform"] = base
try:
return load_python_module("platformio.platform.%s" % name, path)
except ImportError as exc:
if exc.name == "requests" and not sys.modules.get("requests"):
sys.modules["requests"] = httpx
return cls.load_platform_module(name, path)
raise UnknownPlatform(name) from exc
@classmethod
@@ -88,3 +95,14 @@ class PlatformFactory:
_instance = platform_cls(os.path.join(platform_dir, "platform.json"))
assert isinstance(_instance, base.PlatformBase)
return _instance
@classmethod
def from_env(cls, env, targets=None, autoinstall=False):
config = ProjectConfig.get_instance()
spec = config.get(f"env:{env}", "platform", None)
if not spec:
raise UndefinedEnvPlatformError(env)
p = cls.new(spec, autoinstall=autoinstall)
p.project_env = env
p.configure_project_packages(env, targets)
return p

Some files were not shown because too many files have changed in this diff Show More