Compare commits

...

306 Commits

Author SHA1 Message Date
e964c7fa5c Merge branch 'release/v5.2.2' 2021-10-20 18:44:28 +03:00
2e2773fa6b Bump version to 5.2.2 2021-10-20 18:44:20 +03:00
a9c7a27d47 Fix CLion 2021.3 support (#4085)
New CMake behavior crashes CLion with apostrophe symbols in `add_definitions` clause
see https://youtrack.jetbrains.com/issue/CPP-26719
2021-10-20 18:08:22 +03:00
e41ecb19cf Resolve an issue with interrupting a running program 2021-10-20 16:21:48 +03:00
5b091b602f Fixed a “TypeError” issue when extending configuration option in “platformio.ini” with the multi-line default value // Resolve #4082 2021-10-20 15:35:01 +03:00
768681c4f2 Remove debugging code // Resolve #4083 2021-10-19 19:27:20 +03:00
2e4e5c1873 Temporary disable CI for Windows+Python 3.10 2021-10-19 19:26:13 +03:00
4a61806e60 Quote Python versions 2021-10-19 18:52:30 +03:00
883187f9ac Bump version to 5.2.2a1 2021-10-19 18:21:28 +03:00
2d9a5031e9 Test PlatformIO Core on Python 3.10 2021-10-19 18:21:21 +03:00
39c93f6512 Override debugging firmware loading mode using `--load-mode option for pio debug` command 2021-10-19 18:20:01 +03:00
a7905b373e Skip CI for macOS & Py 3.6 2021-10-11 16:00:09 +03:00
a7c82ff9b9 Merge branch 'release/v5.2.1' 2021-10-11 15:07:19 +03:00
5b4b4a4051 Merge tag 'v5.2.1' into develop
Bump version to 5.2.1
2021-10-11 15:07:19 +03:00
c348fec609 Bump version to 5.2.1 2021-10-11 15:07:04 +03:00
4af17356f3 Handle ".hpp" files when looking for a library root 2021-10-11 15:01:42 +03:00
384e5052bc Bump version to 5.2.1rc2 2021-10-10 14:09:59 +03:00
a5adae1491 Skip broken Click 8.0.2 release // Resolve #4078 2021-10-10 14:09:17 +03:00
fe62b810db Bump version to 5.2.1rc1 2021-10-08 19:03:12 +03:00
ee78496058 Clean a build environment and installed library dependencies using a new `cleanall` target // Resolve #4062 2021-10-08 19:02:45 +03:00
8afe4bae87 Typo fix 2021-10-08 15:31:26 +03:00
b04bb2b740 Fix Click's "DeprecationWarning: 'resultcallback' has been renamed to 'result_callback'" // Resolve #4075 2021-10-08 15:18:34 +03:00
3d46f0d72f Drop support for Click < 7.1.2 2021-10-08 15:18:19 +03:00
a65d973660 Extend library root signs with "include" and "src" dirs // Resolve #4073 2021-10-08 15:00:05 +03:00
df83d90c06 Handle upper-cased "Include" & "Src" folders 2021-10-08 14:58:41 +03:00
a1d55f2529 Ignore telemetry on "idedata" target 2021-10-08 14:40:23 +03:00
aa097f3fd6 Update Cppcheck to v2.6.0 // Resolve #3942 2021-10-07 16:43:06 +03:00
e0b72202fd Bump version to 5.2.1b4 2021-09-29 19:21:55 +03:00
e8769fff7d Improved handling of a library root based on "Conan" or "CMake" build systems // Resolve #3887 2021-09-29 19:21:31 +03:00
ed33652534 Handle "test" folder as a part of CLion project // Resolve #4005 2021-09-29 15:44:52 +03:00
d1c1f972a6 Propagate agent option to remote device monitor command (#4065)
Signed-off-by: Christophe PAVOT <christophe.pavot@wiifor.com>
2021-09-29 14:47:11 +03:00
6008275aae Properly handle in-progress C++ standards when invoking Cppcheck // Resolve #3944 (#4070) 2021-09-29 14:46:02 +03:00
edf8bb3945 Bump version to 5.2.1b3 2021-09-27 22:59:58 +03:00
dd7d133263 Dump "embedded_result.output" 2021-09-27 22:59:36 +03:00
b6f783674b Allowed to override a default library builder via a new `builder field in a build group of library.json` // Resolve #3957 2021-09-26 15:27:41 +03:00
eab70fae3b Properly handle "--keep-build-dir" option in platformio ci command (#4061)
This fixes #4011 and possible "FileExists" errors when the "platformio ci"
command by safely copying sources to the build folder
2021-09-23 23:26:42 +03:00
fed40ef104 Add debug information when a test fails on Win/Py3.8 2021-09-17 21:06:08 +03:00
6d087f5a38 Bump version to 5.2.1b2 2021-09-16 22:07:01 +03:00
0edcf33547 Use "ubuntu-18.04" for project examples (CI) 2021-09-16 22:06:45 +03:00
443417b0f4 PyLint fix 2021-09-16 21:56:09 +03:00
369e994b0d Check for "build.mcu" and "build.cpu" when looking for precompiled library // Issue #405 2021-09-16 21:51:53 +03:00
55469327c6 Bump version to 5.2.1b1 2021-09-16 21:16:21 +03:00
27f326673c Fixed a "KeyError: Invalid board option 'build.cpu'" when using a precompiled library with a board that does not have a CPU field in the manifest // Resolve #405 2021-09-16 21:13:54 +03:00
e6fd766fff Bump version to 5.2.1a1 2021-09-14 13:03:47 +03:00
7da3ccfacb Merge tag 'v5.2.0' into develop
Bump version to 5.2.0
2021-09-13 19:00:10 +03:00
624d6b3b0b Merge branch 'release/v5.2.0' 2021-09-13 19:00:09 +03:00
9528083a66 Bump version to 5.2.0 2021-09-13 18:59:53 +03:00
55408f6ccb Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files // Resolve #4019 2021-09-13 14:56:24 +03:00
dce5a39b10 Process "precompiled" and "ldflags" properties of the "library.properties" manifest // Resolve #3994 2021-09-13 14:48:48 +03:00
03a23876a7 Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files // Resolve #4019 2021-09-13 14:04:33 +03:00
775357dd94 Better error handling if git is not installed // Resolve #4013 2021-09-13 13:31:53 +03:00
d10cbb2823 Fix link to clang-tidy (#4049) 2021-09-13 12:36:56 +03:00
63a2465bac Update check tools to the latest available // Resolve #4041 2021-09-10 18:11:48 +03:00
d97ed52e91 Sync docs 2021-09-07 15:17:59 +03:00
e1dc12c14d Docs: Document "platformio-ide.pioHomeServerHttpHost" setting for VSCode 2021-09-02 12:47:17 +03:00
7c755d4e2d Sync docs 2021-08-31 16:23:24 +03:00
55b786d9f0 Use byte-mode for writing binary file 2021-08-28 13:21:46 +03:00
131f4be4ea Fix PyLint's "use-dict-literal" and "use-list-literal" 2021-08-28 13:14:40 +03:00
d819617d2b Specify encoding for "open()" functions 2021-08-28 13:10:07 +03:00
b9219a2b62 Update "zeroconf" deps to 0.36 2021-08-28 12:31:02 +03:00
554e378dd6 Sync docs 2021-08-28 12:30:38 +03:00
cc11402bc9 Sync docs 2021-08-14 15:41:44 +03:00
40220f92c1 Sync docs 2021-08-14 15:25:25 +03:00
8c4d9021c2 Update deps 2021-08-14 12:53:49 +03:00
efefb02d86 Sync docs 2021-08-14 12:53:30 +03:00
3ee281aaf9 Update SPDX License List to 3.14 2021-08-09 17:46:56 +03:00
097b6d5097 PyLint fixes 2021-08-05 18:13:22 +03:00
6cdaf05f98 Sync docs 2021-08-05 18:13:00 +03:00
3be0f58c30 Sync docs 2021-08-04 14:58:54 +03:00
f3489a3b01 Sync docs 2021-08-02 13:52:06 +03:00
173dbeb24a Bump version to 5.2.0b1 2021-08-02 13:11:23 +03:00
0607b86818 Upgraded build engine to the SCons 4.2 2021-08-02 13:10:37 +03:00
1282a65bcb Update Arduino udev rule to include latest Portenta board
Resolves #4014
2021-08-02 12:12:52 +03:00
45d3207dfe Docs: Sync dev-platforms 2021-07-31 18:48:08 +03:00
76b46f59e9 Fix lib test 2021-07-30 20:13:53 +03:00
19fa108f61 Docs: Add "Copy" button to CODE blocks 2021-07-30 17:32:22 +03:00
2372d06591 Sync docs 2021-07-26 19:26:33 +03:00
7015375892 Docs: Revert "html_favicon" path 2021-07-23 15:32:02 +03:00
e9bf2b361f Update deps and sync docs 2021-07-23 15:05:01 +03:00
51b790b767 Bump version to 5.2.0a9 2021-07-12 15:06:42 +03:00
ac84431361 Take into account package's "system" when checking for duplicates 2021-07-12 15:06:06 +03:00
7dc8463da9 Fix charmap error (#3998)
* Fix charmap error

Fix charmap error on cyrilic in platformio.ini file #3493

* Update config.py

Co-authored-by: Ivan Kravets <me@ikravets.com>
2021-07-07 18:25:55 +03:00
71ae579bc0 PyLint fix 2021-07-05 16:06:02 +03:00
5036d25b60 Enable Python version auto-detection for Black formatter 2021-07-05 13:31:23 +03:00
ff6d169862 Fix PyLint for v2.9.3 2021-07-05 13:30:37 +03:00
dde8898aae Bump zeroconf to 0.32.* (#3991) 2021-07-05 12:57:30 +03:00
72cc23ef46 Fix PyLint warning with "No exception type(s) specified (bare-except)" 2021-06-29 18:25:20 +03:00
5390b4ed42 Add Github token for Slack notification 2021-06-29 18:24:47 +03:00
17c7d90d52 Sync docs 2021-06-29 18:11:08 +03:00
5c3b5be613 Fix TypeError: 'NoneType' object is not callable 2021-06-29 18:07:45 +03:00
5ab7769745 Bump version to 5.2.0a8 2021-06-24 16:43:00 +03:00
05374d1145 Match buffered data from debugging server 2021-06-24 16:42:45 +03:00
311e10f91e Ensure all patterns are replaces in debug init script 2021-06-24 16:00:13 +03:00
2b94791387 Bump version to 5.2.0a7 2021-06-22 14:28:40 +03:00
fbcae11cd0 Fix project generator 2021-06-22 14:28:04 +03:00
0d6eff2a9a Syn docs 2021-06-22 14:27:33 +03:00
6a9b7fdb6d Update SPDX License List to 3.13 2021-06-03 16:32:53 +03:00
e8f703648a Docs: Use Python 3 for CI integration 2021-06-01 18:24:17 +03:00
710f82de0f Up uvicorn to 0.14 & click to 8.0 2021-06-01 17:59:18 +03:00
bee35acfa6 Sync docs 2021-06-01 17:56:55 +03:00
90fdaf80e4 Sync docs 2021-05-31 18:25:54 +03:00
27feb1ddd7 Added support for Click 8.0; updated other deps 2021-05-19 19:43:41 +03:00
2be7e0f7e6 Docs: Promote PlatformIO Labs blog posts 2021-05-13 15:28:09 +03:00
186ab70bf9 Add udev rule for Raspberry Pi Pico boards 2021-05-10 11:38:05 +03:00
0fa9006e45 Sync docs: CircleCI updates 2021-05-03 22:34:43 +03:00
60c83bae93 Docs: Sync dev-platforms 2021-05-01 13:44:28 +03:00
553c398c8e Show package "system" info before publishing 2021-04-30 18:06:35 +03:00
1c90bb383f Sync docs 2021-04-29 19:46:17 +03:00
4281225b02 Sync docs 2021-04-29 19:24:44 +03:00
14dc9c6c43 Sync docs 2021-04-29 18:38:44 +03:00
c9e10b1a3e Fix issue with broken redirect 2021-04-29 14:43:27 +03:00
915c850760 Docs: Fix JS redirect URL 2021-04-29 12:47:57 +03:00
2c3f430203 Tidy up Docs CI 2021-04-28 20:59:01 +03:00
1a152ed7fa Add deploy step to CI configuration 2021-04-28 20:18:23 +03:00
5953480807 Docs: Fix broken link for RTD page 2021-04-28 20:16:01 +03:00
b5c1a195be Fix PyLint issues: consider-using-with 2021-04-28 19:59:37 +03:00
310cc086c6 Docs: Minor fixes to "redirect" page generator 2021-04-28 19:59:12 +03:00
61d6cd3c18 Apply black formatter 2021-04-28 19:58:50 +03:00
cccabf5330 Add missed "sphinx-notfound-page" package for docs 2021-04-28 13:19:49 +03:00
6f33460afd Remove debugging code 2021-04-28 13:17:22 +03:00
603d524aaf Refactor docs to be deployed as a static content 2021-04-28 13:10:19 +03:00
eb2cd001b6 Use private "_idedata" target when fetching data for debugging 2021-04-24 18:01:35 +03:00
b5b57790be Validate package manifest when packing archive or publishing a package 2021-04-23 22:02:07 +03:00
286f4ef961 Bump version to 5.2.0a6 2021-04-21 20:52:27 +03:00
ad28d1906c Improve a package publishing process 2021-04-21 20:51:54 +03:00
dfdccac67d Remove unnecessary "ensure_python3()" blocks 2021-04-20 20:28:49 +03:00
b8c2752237 Dccs: Add information how to avoid extra script running when IDE fetches metadata 2021-04-16 13:36:53 +03:00
834c7b0def Bump version to 5.2.0a5 2021-04-12 22:38:56 +03:00
5bfe70142e Switch to project directory before starting debugging process 2021-04-12 22:38:21 +03:00
b35c5a22bb Fix a broken support for custom configuration file for pio debug command // Resolve #3922 2021-04-11 22:21:01 +03:00
eecc825c90 PyLint 2021-04-11 22:20:09 +03:00
3823c22dad Update Release Notes 2021-04-07 21:30:06 +03:00
551bd3dbfe Explicitly specify PROGSUFFIX when compiling final binary (#3918)
Resolves #3906
2021-04-02 17:09:38 +03:00
7e9956963a Remove a note with using pio ci for uploading // Resolve #3903 2021-04-02 15:23:34 +03:00
80c24a1993 Fixed an issue when "main.cpp" was generated for a new project for 8-bit development platforms // Resolve #3872 2021-04-02 15:19:18 +03:00
66091bae24 Disable GDB "startup-with-shell" only on Unix platform 2021-04-02 14:44:38 +03:00
73d4f10f4b Bump version to 5.2.0a4 2021-04-01 21:16:42 +03:00
ee7ea77fc3 Fixed an error "Unknown development platform" when running unit tests on a clean machine // Resolve #3901 2021-04-01 21:15:14 +03:00
32e1cbe2a3 Provide solution for issue #3417 2021-03-31 18:28:06 +03:00
3539724843 Update "zeroconf" dependency to 0.29 2021-03-31 17:33:26 +03:00
940b25f158 Sync docs & examples 2021-03-31 17:32:57 +03:00
37e601e5b5 Ensure that a serial port is ready before running unit tests on a remote target // Resolve #3742 2021-03-24 19:07:40 +02:00
0230374709 Document new VSCode settings: activateProjectOnTextEditorChange & autoOpenPlatformIOIniFile 2021-03-24 13:04:20 +02:00
86db237e5d Update Cppcheck and PVS-Studio packages // Resolve #3898 2021-03-23 21:17:32 +02:00
1542b1cebb Bump version to 5.2.0a3 2021-03-20 10:32:14 +02:00
990071af5c Fix issue with missed compat.path_to_unicode // Resolve #3894 2021-03-20 10:31:55 +02:00
f543e00307 Bump version to 5.2.0a2 2021-03-19 20:26:26 +02:00
34b4f8265a Debug unit tests created with PlatformIO Unit Testing solution // Resolve #948 2021-03-19 20:25:30 +02:00
a366d1af2a Use "target remote" for mpsdebug 2021-03-19 18:26:09 +02:00
ebe5785a91 Allow overriding default debugging flags from dev-platform 2021-03-19 17:11:25 +02:00
887d46725b Debug native (desktop) application on a host machine // Resolve #980 2021-03-19 17:02:11 +02:00
a326b718f2 Handle legacy $LOAD_CMD "init_cmds" 2021-03-19 16:09:38 +02:00
c14b298cb9 Fixed an issue with silent hanging when a custom debug server is not found // Resolve #3756 2021-03-19 15:55:42 +02:00
9cca8f3f55 Split debugging client to base and GDB // Resolve #3757 2021-03-19 15:47:20 +02:00
f5cee56740 Fix issue when disabling "debug_init_break" did not work 2021-03-19 14:09:43 +02:00
972d183d85 Use a cached build configuration 2021-03-19 13:46:54 +02:00
eebdf04357 Load "idedata" configuration from a dumped file 2021-03-19 13:46:27 +02:00
9ede20a367 Disable checking for "__PLATFORMIO_BUILD_DEBUG__" that is not available in g2 mode 2021-03-19 13:10:29 +02:00
b0c3e22a52 Configure a custom pattern to determine when debugging server is started with a new debug_server_ready_pattern option 2021-03-19 12:30:16 +02:00
a78db17784 Drop support for Python 2 2021-03-19 00:21:44 +02:00
dbb9998f69 Refactor debugging configuration, add support for server_ready_pattern // Resolve #3401 2021-03-18 23:42:54 +02:00
2745dbd124 PyLint fix 2021-03-17 23:14:22 +02:00
c0357daf01 Remove Python 2 code 2021-03-17 21:08:06 +02:00
064fa6027d Bump version to 5.2.0a1 2021-03-17 20:07:26 +02:00
779e02a05e Use "connect_read_pipe" on Unix 2021-03-17 20:06:52 +02:00
e222d0356a Merge branch 'feature/debug-async' into develop 2021-03-17 18:25:47 +02:00
d2ae333bb8 Merge branch 'release/v5.1.1' 2021-03-17 18:17:46 +02:00
764c42a810 Merge tag 'v5.1.1' into develop
Bump version to 5.1.1
2021-03-17 18:17:46 +02:00
18b18f1c3d Bump version to 5.1.1 2021-03-17 18:17:40 +02:00
b54a8b40a4 Refactor Unified Debugger to native Python Asynchronous I/O stack // Resolve #3793 , Resolve #3595 2021-03-17 17:42:11 +02:00
edf724d20d Sync docs 2021-03-15 17:01:44 +02:00
622a190a61 Avoid "rustup" when building cryptography for contrib-pysite // Resolve #3865 2021-03-15 17:00:16 +02:00
5b4a78ba20 Bump version to 5.1.1b1 2021-03-11 14:49:20 +02:00
44b85f6e4b Switch Cppcheck to analyze project per file // Issue #3797
Cppcheck doesn't provide a proper report when one of the files in the check list is broken.
If we run the analysis on a per-file basis, then Cppcheck will be able report at least defects
from valid source files.
2021-03-11 13:49:27 +02:00
7f1f760645 Preserve user-specified debug configurations in VSCode integration (#3878)
* Preserve user-specified debug configurations in VSCode integration

Issue #3824

* Tidy up Python code
2021-03-10 14:54:52 +02:00
54d8c96c30 Update SPDX license list to 3.12 2021-03-09 22:01:58 +02:00
c6ab7827e7 Fixed incorrect size of unnecessary data // Resolve #3830 2021-03-09 19:26:22 +02:00
ae26079e2e Fixed an issue when code inspection fails with "Bad JSON" // Resolve #3790 2021-03-09 19:20:30 +02:00
3e993156f2 Suppress printing unnecessary info in silent mode // Resolve #3837 2021-03-08 12:16:53 +02:00
3b2fafd789 Add new test for check command and project with whitespace 2021-03-04 22:27:00 +02:00
72ebaddcb8 Handle possible whitespaces in project path for PVS-Studio (#3849) 2021-03-04 22:22:09 +02:00
5a9950cc19 Sync docs 2021-03-04 18:52:12 +02:00
cf29d7e400 Typo fix 2021-03-04 18:52:02 +02:00
244dba3614 JFrog shutdowns Bintray 2021-03-03 21:31:42 +02:00
21886517e1 Bump version to 5.1.1a3 2021-03-01 17:59:58 +02:00
3996236729 Report detailed server error to PIO Home frontend 2021-03-01 17:59:40 +02:00
560cb3ac82 Sync docs 2021-02-27 19:57:40 +02:00
81c7e23ae9 Bump version to 5.1.1a2 2021-02-27 19:44:11 +02:00
0b8bd6d4fc Migrate to Async JSON-RPC package 2021-02-27 19:43:43 +02:00
7c271c8207 Better detecting of native dev-platform for unit testing // Resolve #3851 2021-02-27 18:53:26 +02:00
58947d91a6 PyLint fixes 2021-02-27 17:13:30 +02:00
20096be990 Sync docs 2021-02-26 13:39:13 +02:00
7c8508b651 Fixed an issue with device monitor when the “send_on_enter” filter didn’t send EOL chars // Resolve #3787 2021-02-10 14:43:50 +02:00
b56d0fdd9b Sync docs & examples 2021-02-10 14:43:12 +02:00
d0cc06f766 Move isort setttings to "tox.ini" 2021-02-06 16:56:44 +02:00
d8d2b215d1 Minor improvement 2021-02-03 23:11:47 +02:00
c478d383b4 Sync docs 2021-02-03 23:10:01 +02:00
e01cd1c037 Bump version to 5.1.1a1 2021-02-01 13:01:31 +02:00
e63019c469 Fixed a "The command line is too long" issue with a linking process on Windows // Resolve #3827 2021-02-01 12:52:00 +02:00
90a325a1b2 Merge branch 'release/v5.1.0' 2021-01-28 19:23:14 +02:00
698594525f Merge tag 'v5.1.0' into develop
Bump version to 5.1.0
2021-01-28 19:23:14 +02:00
fd540148f3 Bump version to 5.1.0 2021-01-28 19:23:06 +02:00
078a024931 Configure default debug_speed 2021-01-28 13:52:11 +02:00
f8193b2419 Bump version to 5.1.0rc3 2021-01-27 23:06:42 +02:00
808ba603c5 Fixed an issue when "pio device monitor –eol" and “send_on_enter” filter do not work properly // Resolve #3787 2021-01-27 23:06:18 +02:00
61d70fa688 Include Unity framework for IDE data only if there are tests in project 2021-01-27 22:40:19 +02:00
493a33e754 Drop support for Python 2 2021-01-27 22:25:42 +02:00
bd75c3e559 Bump version to 5.1.0rc2 2021-01-27 20:58:13 +02:00
cb9e72a879 Dump build flags using SCons.Subst.SUBST_CMD 2021-01-27 20:57:53 +02:00
9d2fd4982f Cleanup code 2021-01-27 20:40:25 +02:00
eed9a0e376 Merge branch 'feature/3792-maxleng-cmd' into develop 2021-01-27 20:30:39 +02:00
d77dbb2cca Use "TEMPFILEARGESCFUNC" for GCC workaround on Windows 2021-01-27 20:30:28 +02:00
7810946484 Use project build folder for tempfile workaround with command maxlen 2021-01-27 18:47:54 +02:00
e2906e3be5 Refactored a workaround for a maximum command line character limitation // Resolve #3792 2021-01-27 16:10:13 +02:00
0a8b66ee95 Configure a custom debug adapter speed using a new debug_speed option // Resolve #3799 2021-01-26 21:21:41 +02:00
8ff270c5f7 Skip non-existing package when checking for update// Resolve #3818 2021-01-26 17:05:37 +02:00
4012a86cac Fixed a "ValueError: Invalid simple block" when uninstalling a package with a custom name and external source // Resolve #3816 2021-01-26 16:15:11 +02:00
dd4fff3a79 Bump version to 5.1.0rc1 2021-01-25 23:50:41 +02:00
0ed99b7687 Added a new `--session-id option to pio home` // Resolve #3397 2021-01-25 23:44:26 +02:00
2c389ae11e Added new check_prune_system_threshold setting 2021-01-24 17:21:22 +02:00
15ff8f9d2a Bump version to 5.0.5b5 2021-01-24 15:58:07 +02:00
bd4d3b914b Revert "lib_compat_mode" changes // Resolve #3811 Resolve #3806 2021-01-24 15:49:56 +02:00
59b02120b6 New options for system prune command: remove unnecessary core and development platform packages // Resolve #923 2021-01-23 23:20:53 +02:00
92655c30c1 Disabled automatic removal of unnecessary development platform packages // Resolve #3708 , Resolve #/3770 2021-01-23 22:34:48 +02:00
484567f242 Project's "lib_compat_mode" has higher priority than "library.json" 2021-01-23 15:54:52 +02:00
ef6e70a38b Fixed an issue when unnecessary packages were removed in `update --dry-run` mode // Resolve #3809 2021-01-23 15:24:32 +02:00
e695e30a9b Fixed an issue with compiler driver for ".ccls" language server // Resolve #3808 2021-01-23 14:44:53 +02:00
65e67b64bd Remove unnecessary dependencies from contrib-pysite 2021-01-22 22:55:45 +02:00
ddbe339541 Update to iSort 5.0 2021-01-22 22:55:02 +02:00
b2c0e6a8c2 Sync docs 2021-01-22 22:46:09 +02:00
f9384ded27 Fixed an issue when “strict” compatibility mode was not used for a library with custom “platforms” field in library.json manifest // Resolve #3806 2021-01-22 22:45:36 +02:00
4488f25ce0 Bump version to 5.0.5b4 2021-01-20 23:26:22 +02:00
52b22b5784 Fixed a "UnicodeDecodeError: 'utf-8' codec can't decode byte" // Resolve #3804 , Resolve #3417 2021-01-20 20:45:23 +02:00
5a356140d6 Sync examples and docs 2021-01-20 20:44:43 +02:00
e79de0108c Upgraded build engine to the SCons 4.1 2021-01-20 16:15:05 +02:00
985f31877c Automatically install tool-unity when there are tests and "idedata" target is called 2021-01-20 15:14:45 +02:00
11a71b7fbb Bump version to 5.0.5b3 2021-01-20 14:37:19 +02:00
7f26c11c9d Fix an issue with "coroutine' object has no attribute 'addCallback'" 2021-01-20 14:36:45 +02:00
9b93fcd947 Do not install tool-unity for even non-test proejct 2021-01-20 14:27:03 +02:00
733ca5174b Bump version to 5.0.5b2 2021-01-18 21:19:57 +02:00
bd897d780b Implement "__shutdown__" endpoint for PIO Home server 2021-01-18 21:19:15 +02:00
429065d2b9 Legacy support for PIO Home "__shutdown__" query request 2021-01-18 20:53:19 +02:00
b90734f1e2 List multicast DNS services only when PY3 2021-01-18 20:51:50 +02:00
db97a7d9d3 Bump version to 5.0.5b1 2021-01-18 18:21:27 +02:00
6ff67aeadf Significantly speedup PlatformIO Home loading time by migrating to native Python 3 Asynchronous I/O 2021-01-18 18:20:26 +02:00
dd7d282d17 Improved listing of multicast DNS services 2021-01-18 18:17:10 +02:00
4e637ae58a Drop Python 2 from PIO Core test 2021-01-18 18:15:15 +02:00
1ec2e55322 Add udev rule for Atmel AVR Dragon (#3786) 2021-01-04 13:46:09 +02:00
556eb3f8c1 Docs: Update "Wiring Connections" section for ST-Link debugging probe 2020-12-31 13:47:05 +02:00
76b49ebc95 Increase timeout to 60sec when starting debug server and "ready_pattern" is used 2020-12-30 14:38:18 +02:00
e82443a302 Bump version to 5.0.5a1 2020-12-30 14:29:41 +02:00
5de86a6416 Check for debug server's "ready_pattern" in "stderr" 2020-12-30 14:29:19 +02:00
3f3c8cabb8 Merge branch 'release/v5.0.4' 2020-12-30 13:23:11 +02:00
cd59aa9afb Merge tag 'v5.0.4' into develop
Bump version to 5.0.4
2020-12-30 13:23:11 +02:00
34e12e575b Bump version to 5.0.4 2020-12-30 13:23:04 +02:00
4c8c261ab4 Raise an exception when trying to pack a package from tar.gz on Windows // Resolve #3776 2020-12-28 20:12:53 +02:00
099bb3b9ff Sync dev-platforms: docs + examples 2020-12-28 13:51:34 +02:00
c623a6aacc Fixed an issue with package publishing on Windows when Unix permissions are not preserved // Resolve // #3776 2020-12-28 13:08:12 +02:00
ce7356794d Test examples from the official dev-platforms 2020-12-26 21:43:41 +02:00
523494f9cf Ignore CI tests from tokisaki dev-platform 2020-12-26 20:18:15 +02:00
0edc867d45 Bump version to 5.0.4rc1 2020-12-26 16:10:44 +02:00
ce4c45a075 Show a warning message about deprecated support for Python 2 and Python 3.5 2020-12-26 16:10:07 +02:00
e29941e3eb Update release notes with check tools updates 2020-12-22 21:30:01 +02:00
86ce3595f6 Update check tools packages // Resolve #3758
Updated tools: Cppcheck v2.3, PVS-Studio v7.11
2020-12-22 00:44:09 +02:00
6e958b8415 Handle possible issues when check tool cannot be executed // Resolve #3753
Now, each tool individually decides under what conditions the check is considered failed.
2020-12-22 00:21:32 +02:00
d485703768 Use "Updating to X.Y.Z" instead of "Outdated" when doing a real package updating 2020-12-11 17:53:48 +02:00
109e2107d1 Sync docs 2020-12-11 16:14:08 +02:00
3469905365 Decode subprocess output only for byte-strings 2020-12-02 15:15:17 +02:00
75b3846f8f Sync docs & examples 2020-12-02 15:15:02 +02:00
a9ec38208c Bump version to 5.0.4b1 2020-11-30 20:24:45 +02:00
c38b9a4144 Fixed a "git-sh-setup: file not found" error when installing project dependencies from Git VCS // Resolve #3740 2020-11-30 20:23:30 +02:00
b6128aeaa1 Apply formatting 2020-11-22 22:32:03 +02:00
881782be05 Allow spaces and dots in example's name ([package manifest) 2020-11-22 21:42:25 +02:00
0c05930501 Sync docs 2020-11-22 21:41:47 +02:00
b96f2a19b5 Bump version to 5.0.4a2 2020-11-14 20:10:45 +02:00
c1906714ee Give a constant "PlatformIO" name for the C/C++ configuration 2020-11-14 20:10:22 +02:00
32181d1bd2 Improved `.ccls` configuration file for Emacs, Vim, and Sublime Text integrations // Issue #3735 2020-11-14 19:55:24 +02:00
7dfb413d87 Typo fix 2020-11-12 21:42:53 +02:00
7934a96ad1 Added "Core" suffix when showing PlatformIO Core version using `pio --version` command 2020-11-12 20:42:27 +02:00
abddbf9c7d Bump version to 5.0.4a1 2020-11-12 18:56:55 +02:00
77e66241f7 Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension 2020-11-12 18:56:34 +02:00
4b3f2e19a4 Merge branch 'release/v5.0.3' 2020-11-12 17:57:30 +02:00
b29c6485a8 Merge tag 'v5.0.3' into develop
Bump version to 5.0.3
2020-11-12 17:57:30 +02:00
f4dba7a68c Bump version to 5.0.3 2020-11-12 17:56:12 +02:00
2817408db3 Fixed an issue when pio package pack ignores some folders // Resolve #3730 2020-11-12 16:06:54 +02:00
9ff3c758eb Fix tests 2020-11-12 15:35:37 +02:00
3dcc189740 Use custom Pre-Debug task only for multi-env project 2020-11-12 15:35:19 +02:00
4a12d1954e Fixed an issue when the package manager tries to install a built-in library from the registry // Resolve #3662 2020-11-12 15:27:34 +02:00
e4d645110a Merge branch 'develop' of https://github.com/platformio/platformio-core into develop
# Conflicts:
#	HISTORY.rst
2020-11-12 15:25:51 +02:00
01a32067d5 Print ignored environments and test suites in only in verbose mode
Resolve #3726
2020-11-12 15:22:47 +02:00
fc5ce4739c Added an error selector for Sublime Text build runner // Resolve #3733 2020-11-12 15:05:01 +02:00
ae7b8f9ecf Fix tests 2020-11-11 20:52:23 +02:00
0f5d2d6821 Sync docs 2020-11-11 19:44:39 +02:00
48eca22a00 Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used 2020-11-11 14:19:58 +02:00
5e164493a8 Sync docs 2020-11-09 11:39:26 +02:00
ead99208f2 Increase example name in manifest to 255 chars 2020-11-09 11:38:46 +02:00
4f5ad05792 Docs: Document "Introducing Strict SSL/TLS" in migration 2020-11-04 14:07:40 +02:00
bc52e72605 Bump version to 5.0.3a2 2020-11-03 15:11:52 +02:00
038674835a Workaround for a broken locale 2020-11-02 12:27:17 +02:00
00f21c17ca Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2020-11-01 21:06:47 +02:00
818a1508a0 Docs: Use native ProjectConfig in the advanced scripting examples 2020-11-01 21:06:23 +02:00
2d9480a6a7 Support for GitPod environment 2020-11-01 21:05:03 +02:00
0bec4e25c8 Add support for C++ language standard in QtCreator template
Resolve #3719
2020-11-01 19:03:14 +02:00
950a540df4 Bump version to 5.0.3a1 2020-10-31 19:07:45 +02:00
2e66c5f807 Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode 2020-10-31 19:07:04 +02:00
7033c2616b Docs: Add info how to access PlatformIO Core CLI in VSCode 2020-10-31 12:44:37 +02:00
153 changed files with 4280 additions and 2825 deletions

View File

@ -8,14 +8,19 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: [2.7, 3.7, 3.8]
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
exclude:
- os: macos-latest
python-version: "3.6"
- os: windows-latest
python-version: "3.10"
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
@ -42,3 +47,4 @@ jobs:
job_name: '*Core*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
token: ${{ secrets.SLACK_GITHUB_TOKEN }}

View File

@ -4,13 +4,14 @@ on: [push, pull_request]
jobs:
build:
name: Build Docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python
uses: actions/setup-python@v1
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install dependencies
@ -30,3 +31,79 @@ jobs:
job_name: '*Docs*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
- name: Preserve Docs
if: ${{ github.event_name == 'push' }}
run: |
tar -czvf docs.tar.gz -C docs/_build html rtdpage
- name: Save artifact
if: ${{ github.event_name == 'push' }}
uses: actions/upload-artifact@v2
with:
name: docs
path: ./docs.tar.gz
deploy:
name: Deploy Docs
needs: build
runs-on: ubuntu-latest
env:
DOCS_REPO: platformio/platformio-docs
DOCS_DIR: platformio-docs
LATEST_DOCS_DIR: latest-docs
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
if: ${{ github.event_name == 'push' }}
steps:
- name: Download artifact
uses: actions/download-artifact@v2
with:
name: docs
- name: Unpack artifact
run: |
mkdir ./${{ env.LATEST_DOCS_DIR }}
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
- name: Delete Artifact
uses: geekyeggo/delete-artifact@v1
with:
name: docs
- name: Select Docs type
id: get-destination-dir
run: |
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
echo "::set-output name=dst_dir::stable"
else
echo "::set-output name=dst_dir::latest"
fi
- name: Checkout latest Docs
continue-on-error: true
uses: actions/checkout@v2
with:
repository: ${{ env.DOCS_REPO }}
path: ${{ env.DOCS_DIR }}
ref: gh-pages
- name: Synchronize Docs
run: |
rm -rf ${{ env.DOCS_DIR }}/.git
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
rm -rf ${{ env.DOCS_DIR }}/page
mkdir -p ${{ env.DOCS_DIR }}/page
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
fi
- name: Validate Docs
run: |
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
echo "Docs folder is empty. Aborting!"
exit 1
fi
- name: Deploy to Github Pages
uses: peaceiris/actions-gh-pages@v3
with:
personal_token: ${{ secrets.PERSONAL_TOKEN }}
external_repository: ${{ env.DOCS_REPO }}
publish_dir: ./${{ env.DOCS_DIR }}
commit_message: Sync Docs

View File

@ -7,15 +7,15 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-16.04, windows-latest, macos-latest]
python-version: [2.7, 3.7]
os: [ubuntu-18.04, windows-latest, macos-latest]
python-version: [3.7]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
@ -26,7 +26,8 @@ jobs:
- name: Run on Linux
if: startsWith(matrix.os, 'ubuntu')
env:
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,intel_mcs51,aceinna_imu"
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51"
run: |
# ChipKIT issue: install 32-bit support for GCC PIC32
sudo apt-get install libc6-i386
@ -40,7 +41,8 @@ jobs:
- name: Run on macOS
if: startsWith(matrix.os, 'macos')
env:
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,microchippic32,gd32v,nuclei,lattice_ice40"
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,lattice_ice40,gd32v"
run: |
df -h
tox -e testexamples
@ -50,7 +52,8 @@ jobs:
env:
PLATFORMIO_CORE_DIR: C:/pio
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,riscv_gap"
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
run: |
tox -e testexamples
@ -62,3 +65,4 @@ jobs:
job_name: '*Examples*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
token: ${{ secrets.SLACK_GITHUB_TOKEN }}

View File

@ -1,3 +0,0 @@
[settings]
line_length=88
known_third_party=OpenSSL, SCons, autobahn, jsonrpc, twisted, zope

View File

@ -14,8 +14,9 @@ disable=
too-few-public-methods,
useless-object-inheritance,
useless-import-alias,
fixme,
bad-option-value,
consider-using-dict-items,
consider-using-f-string,
; PY2 Compat
super-with-arguments,

View File

@ -8,6 +8,143 @@ PlatformIO Core 5
**A professional collaborative platform for embedded development**
5.2.2 (2021-10-20)
~~~~~~~~~~~~~~~~~~
- Override debugging firmware loading mode using ``--load-mode`` option for `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ command
- Added support for CLion IDE 2021.3 (`pull #4085 <https://github.com/platformio/platformio-core/issues/4085>`_)
- Removed debugging "legacy Click" message from CLI (`issue #4083 <https://github.com/platformio/platformio-core/issues/4083>`_)
- Fixed a "TypeError: sequence item 1: expected str instance, list found" issue when extending configuration option in `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__ with the multi-line default value (`issue #4082 <https://github.com/platformio/platformio-core/issues/4082>`_)
5.2.1 (2021-10-11)
~~~~~~~~~~~~~~~~~~
- Clean a build environment and installed library dependencies using a new ``cleanall`` target (`issue #4062 <https://github.com/platformio/platformio-core/issues/4062>`_)
- Override a default library builder via a new ``builder`` field in a ``build`` group of `library.json <https://docs.platformio.org/page/librarymanager/config.html#build>`__ manifest (`issue #3957 <https://github.com/platformio/platformio-core/issues/3957>`_)
- Updated `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.6 with new checks, increased reliability of advanced addons (MISRA/CERT) and various improvements
- Handle the "test" folder as a part of CLion project (`issue #4005 <https://github.com/platformio/platformio-core/issues/4005>`_)
- Improved handling of a library root based on "Conan" or "CMake" build systems (`issue #3887 <https://github.com/platformio/platformio-core/issues/3887>`_)
- Fixed a "KeyError: Invalid board option 'build.cpu'" when using a precompiled library with a board that does not have a CPU field in the manifest (`issue #4056 <https://github.com/platformio/platformio-core/issues/4056>`_)
- Fixed a "FileExist" error when the `platformio ci <https://docs.platformio.org/en/latest/userguide/cmd_ci.html>`__ command is used in pair with the ``--keep-build-dir`` option (`issue #4011 <https://github.com/platformio/platformio-core/issues/4011>`_)
- Fixed an issue with draft values of C++ language standards that broke static analysis via Cppcheck (`issue #3944 <https://github.com/platformio/platformio-core/issues/3944>`_)
5.2.0 (2021-09-13)
~~~~~~~~~~~~~~~~~~
* **PlatformIO Debugging**
- Boosted `PlatformIO Debugging <https://docs.platformio.org/page/plus/debugging.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
- `Debug unit tests <https://docs.platformio.org/page/plus/debugging.html#debug-unit-tests>`__ created with `PlatformIO Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html>`__ solution (`issue #948 <https://github.com/platformio/platformio-core/issues/948>`_)
- Debug native (desktop) applications on a host machine (`issue #980 <https://github.com/platformio/platformio-core/issues/980>`_)
- Support debugging on Windows using Windows CMD/CLI (`pio debug <https://docs.platformio.org/page/core/userguide/cmd_debug.html>`__) (`issue #3793 <https://github.com/platformio/platformio-core/issues/3793>`_)
- Configure a custom pattern to determine when debugging server is started with a new `debug_server_ready_pattern <https://docs.platformio.org/page/projectconf/section_env_debug.html#debug-server-ready-pattern>`__ option
- Fixed an issue with silent hanging when a custom debug server is not found (`issue #3756 <https://github.com/platformio/platformio-core/issues/3756>`_)
* **Package Management**
- Improved a package publishing process:
* Show package details
* Check for conflicting names in the PlatformIO Trusted Registry
* Check for duplicates and used version
* Validate package manifest
- Added a new option ``--non-interactive`` to `pio package publish <https://docs.platformio.org/page/core/userguide/package/cmd_publish.html>`__ command
* **Build System**
- Process "precompiled" and "ldflags" properties of the "library.properties" manifest (`issue #3994 <https://github.com/platformio/platformio-core/issues/3994>`_)
- Upgraded build engine to the SCons 4.2 (`release notes <https://github.com/SCons/scons/blob/rel_4.2.0/CHANGES.txt>`__)
- Fixed an issue with broken binary file extension when a custom ``PROGNAME`` contains dot symbols (`issue #3906 <https://github.com/platformio/platformio-core/issues/3906>`_)
- Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files (`issue #4019 <https://github.com/platformio/platformio-core/issues/4019>`_)
* **Static Code Analysis**
- Updated analysis tools:
* `Clang-Tidy <https://docs.platformio.org/page/plus/check-tools/clang-tidy.html>`__ v12.0.1 with new modules and extended checks list
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.5.0 with improved code analysis and MISRA improvements
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.14 with support for intermodular analysis, improved MISRA support and new diagnostics
* **Miscellaneous**
- Ensure that a serial port is ready before running unit tests on a remote target (`issue #3742 <https://github.com/platformio/platformio-core/issues/3742>`_)
- Fixed an error "Unknown development platform" when running unit tests on a clean machine (`issue #3901 <https://github.com/platformio/platformio-core/issues/3901>`_)
- Fixed an issue when "main.cpp" was generated for a new project for 8-bit development platforms (`issue #3872 <https://github.com/platformio/platformio-core/issues/3872>`_)
5.1.1 (2021-03-17)
~~~~~~~~~~~~~~~~~~
* Fixed a "The command line is too long" issue with a linking process on Windows (`issue #3827 <https://github.com/platformio/platformio-core/issues/3827>`_)
* Fixed an issue with `device monitor <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html>`__ when the "send_on_enter" filter didn't send EOL chars (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
* Fixed an issue with silent mode when unwanted data is printed to stdout (`issue #3837 <https://github.com/platformio/platformio-core/issues/3837>`_)
* Fixed an issue when code inspection fails with "Bad JSON" (`issue #3790 <https://github.com/platformio/platformio-core/issues/3790>`_)
* Fixed an issue with overriding user-specified debugging configuration information in VSCode (`issue #3824 <https://github.com/platformio/platformio-core/issues/3824>`_)
5.1.0 (2021-01-28)
~~~~~~~~~~~~~~~~~~
* **PlatformIO Home**
- Boosted `PlatformIO Home <https://docs.platformio.org/page/home/index.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
- Added a new ``--session-id`` option to `pio home <https://docs.platformio.org/page/core/userguide/cmd_home.html>`__ command that helps to keep PlatformIO Home isolated from other instances and protect from 3rd party access (`issue #3397 <https://github.com/platformio/platformio-core/issues/3397>`_)
* **Build System**
- Upgraded build engine to the SCons 4.1 (`release notes <https://scons.org/scons-410-is-available.html>`_)
- Refactored a workaround for a maximum command line character limitation (`issue #3792 <https://github.com/platformio/platformio-core/issues/3792>`_)
- Fixed an issue with Python 3.8+ on Windows when a network drive is used (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
* **Package Management**
- New options for `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command:
+ ``--dry-run`` option to show data that will be removed
+ ``--core-packages`` option to remove unnecessary core packages
+ ``--platform-packages`` option to remove unnecessary development platform packages (`issue #923 <https://github.com/platformio/platformio-core/issues/923>`_)
- Added new `check_prune_system_threshold <https://docs.platformio.org/page/core/userguide/cmd_settings.html#check-prune-system-threshold>`__ setting
- Disabled automatic removal of unnecessary development platform packages (`issue #3708 <https://github.com/platformio/platformio-core/issues/3708>`_, `issue #3770 <https://github.com/platformio/platformio-core/issues/3770>`_)
- Fixed an issue when unnecessary packages were removed in ``update --dry-run`` mode (`issue #3809 <https://github.com/platformio/platformio-core/issues/3809>`_)
- Fixed a "ValueError: Invalid simple block" when uninstalling a package with a custom name and external source (`issue #3816 <https://github.com/platformio/platformio-core/issues/3816>`_)
* **Debugging**
- Configure a custom debug adapter speed using a new `debug_speed <https://docs.platformio.org/page/projectconf/section_env_debug.html#debug-speed>`__ option (`issue #3799 <https://github.com/platformio/platformio-core/issues/3799>`_)
- Handle debugging server's "ready_pattern" in "stderr" output
* **Miscellaneous**
- Improved listing of `multicast DNS services <https://docs.platformio.org/page/core/userguide/device/cmd_list.html>`_
- Fixed a "UnicodeDecodeError: 'utf-8' codec can't decode byte" when using J-Link for firmware uploading on Linux (`issue #3804 <https://github.com/platformio/platformio-core/issues/3804>`_)
- Fixed an issue with a compiler driver for ".ccls" language server (`issue #3808 <https://github.com/platformio/platformio-core/issues/3808>`_)
- Fixed an issue when `pio device monitor --eol <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#cmdoption-pio-device-monitor-eol>`__ and "send_on_enter" filter do not work properly (`issue #3787 <https://github.com/platformio/platformio-core/issues/3787>`_)
5.0.4 (2020-12-30)
~~~~~~~~~~~~~~~~~~
- Added "Core" suffix when showing PlatformIO Core version using ``pio --version`` command
- Improved ".ccls" configuration file for Emacs, Vim, and Sublime Text integrations
- Updated analysis tools:
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.3 with improved C++ parser and several new MISRA rules
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.11 with new diagnostics and updated mass suppression mechanism
- Show a warning message about deprecated support for Python 2 and Python 3.5
- Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension
- Fixed a "git-sh-setup: file not found" error when installing project dependencies from Git VCS (`issue #3740 <https://github.com/platformio/platformio-core/issues/3740>`_)
- Fixed an issue with package publishing on Windows when Unix permissions are not preserved (`issue #3776 <https://github.com/platformio/platformio-core/issues/3776>`_)
5.0.3 (2020-11-12)
~~~~~~~~~~~~~~~~~~
- Added an error selector for `Sublime Text <https://docs.platformio.org/page/integration/ide/sublimetext.html>`__ build runner (`issue #3733 <https://github.com/platformio/platformio-core/issues/3733>`_)
- Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode
- Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used
- Print ignored test suites and environments in the test summary report only in verbose mode (`issue #3726 <https://github.com/platformio/platformio-core/issues/3726>`_)
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
- Fixed an issue when `pio package pack <https://docs.platformio.org/page/core/userguide/package/cmd_pack.html>`__ ignores some folders (`issue #3730 <https://github.com/platformio/platformio-core/issues/3730>`_)
5.0.2 (2020-10-30)
~~~~~~~~~~~~~~~~~~
@ -118,31 +255,32 @@ Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/
- Display system-wide information using a new `pio system info <https://docs.platformio.org/page/core/userguide/system/cmd_info.html>`__ command (`issue #3521 <https://github.com/platformio/platformio-core/issues/3521>`_)
- Remove unused data using a new `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command (`issue #3522 <https://github.com/platformio/platformio-core/issues/3522>`_)
- Show ignored project environments only in the verbose mode (`issue #3641 <https://github.com/platformio/platformio-core/issues/3641>`_)
- Do not escape compiler arguments in VSCode template on Windows.
- Do not escape compiler arguments in VSCode template on Windows
- Drop support for Python 2 and 3.5.
.. _release_notes_4:
PlatformIO Core 4
-----------------
See `PlatformIO Core 4.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-4>`__.
See `PlatformIO Core 4.0 history <https://github.com/platformio/platformio-core/blob/v4.3.4/HISTORY.rst>`__.
PlatformIO Core 3
-----------------
See `PlatformIO Core 3.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-3>`__.
See `PlatformIO Core 3.0 history <https://github.com/platformio/platformio-core/blob/v3.6.7/HISTORY.rst>`__.
PlatformIO Core 2
-----------------
See `PlatformIO Core 2.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-2>`__.
See `PlatformIO Core 2.0 history <https://github.com/platformio/platformio-core/blob/v2.11.2/HISTORY.rst>`__.
PlatformIO Core 1
-----------------
See `PlatformIO Core 1.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-1>`__.
See `PlatformIO Core 1.0 history <https://github.com/platformio/platformio-core/blob/v1.5.0/HISTORY.rst>`__.
PlatformIO Core Preview
-----------------------
See `PlatformIO Core Preview history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-preview>`__.
See `PlatformIO Core Preview history <https://github.com/platformio/platformio-core/blob/v0.10.2/HISTORY.rst>`__.

View File

@ -1,14 +1,14 @@
lint:
pylint -j 6 --rcfile=./.pylintrc ./platformio
pylint -j 6 --rcfile=./.pylintrc ./tests
pylint -j 6 --rcfile=./.pylintrc ./platformio
isort:
isort -rc ./platformio
isort -rc ./tests
isort ./platformio
isort ./tests
format:
black --target-version py27 ./platformio
black --target-version py27 ./tests
black ./platformio
black ./tests
test:
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py

2
docs

Submodule docs updated: deae09a880...66f67cb335

View File

@ -14,7 +14,7 @@
import sys
VERSION = (5, 0, 2)
VERSION = (5, 2, 2)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
@ -31,11 +31,11 @@ __description__ = (
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "contact@piolabs.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_api__ = [
@ -47,13 +47,13 @@ __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.3.1",
"contrib-piohome": "~3.3.4",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-unity": "~1.20500.0",
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
"tool-cppcheck": "~1.210.0",
"tool-clangtidy": "~1.100000.0",
"tool-pvs-studio": "~7.9.0",
"tool-scons": "~4.40200.0",
"tool-cppcheck": "~1.260.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.14.0",
}
__check_internet_hosts__ = [

View File

@ -12,15 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=import-outside-toplevel
import os
import sys
from traceback import format_exc
import click
from platformio import __version__, exception, maintenance, util
from platformio import __version__, exception
from platformio.commands import PlatformioCLI
from platformio.compat import CYGWIN
from platformio.compat import IS_CYGWIN, ensure_python3
try:
import click_completion # pylint: disable=import-error
@ -33,7 +35,7 @@ except: # pylint: disable=bare-except
@click.command(
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
)
@click.version_option(__version__, prog_name="PlatformIO")
@click.version_option(__version__, prog_name="PlatformIO Core")
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
@click.option("--caller", "-c", help="Caller ID (service)")
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
@ -60,18 +62,35 @@ def cli(ctx, force, caller, no_ansi):
except: # pylint: disable=bare-except
pass
from platformio import maintenance
maintenance.on_platformio_start(ctx, force, caller)
@cli.resultcallback()
@click.pass_context
def process_result(ctx, result, *_, **__):
try:
@cli.result_callback()
@click.pass_context
def process_result(ctx, result, *_, **__):
_process_result(ctx, result)
except (AttributeError, TypeError): # legacy support for CLick > 8.0.1
@cli.resultcallback()
@click.pass_context
def process_result(ctx, result, *_, **__):
_process_result(ctx, result)
def _process_result(ctx, result):
from platformio import maintenance
maintenance.on_platformio_end(ctx, result)
@util.memoized()
def configure():
if CYGWIN:
if IS_CYGWIN:
raise exception.CygwinEnvDetected()
# https://urllib3.readthedocs.org
@ -105,6 +124,7 @@ def main(argv=None):
assert isinstance(argv, list)
sys.argv = argv
try:
ensure_python3(raise_exception=True)
configure()
cli() # pylint: disable=no-value-for-parameter
except SystemExit as e:
@ -112,6 +132,9 @@ def main(argv=None):
exit_code = int(e.code)
except Exception as e: # pylint: disable=broad-except
if not isinstance(e, exception.ReturnErrorCode):
if sys.version_info.major != 2:
from platformio import maintenance
maintenance.on_platformio_exception(e)
error_str = "Error: "
if isinstance(e, exception.PlatformioException):

View File

@ -24,7 +24,7 @@ import uuid
from os.path import dirname, isdir, isfile, join, realpath
from platformio import __version__, exception, fs, proc
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
from platformio.compat import IS_WINDOWS, hashlib_encode_data
from platformio.package.lockfile import LockFile
from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
@ -55,6 +55,10 @@ DEFAULT_SETTINGS = {
"description": "Check for the platform updates interval (days)",
"value": 7,
},
"check_prune_system_threshold": {
"description": "Check for pruning unnecessary data threshold (megabytes)",
"value": 1024,
},
"enable_cache": {
"description": "Enable caching for HTTP API requests",
"value": True,
@ -110,8 +114,8 @@ class State(object):
def __exit__(self, type_, value, traceback):
if self.modified:
try:
with open(self.path, "w") as fp:
fp.write(dump_json_to_unicode(self._storage))
with open(self.path, mode="w", encoding="utf8") as fp:
fp.write(json.dumps(self._storage))
except IOError:
raise exception.HomeDirPermissionsError(get_project_core_dir())
self._unlock_state_file()
@ -255,6 +259,8 @@ def get_cid():
uid = None
if os.getenv("C9_UID"):
uid = os.getenv("C9_UID")
elif os.getenv("GITPOD_GIT_USER_NAME"):
uid = os.getenv("GITPOD_GIT_USER_NAME")
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
try:
uid = json.loads(
@ -271,7 +277,7 @@ def get_cid():
uid = uuid.getnode()
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
cid = str(cid)
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
set_state_item("cid", cid)
return cid

View File

@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
from os import environ, makedirs
from os.path import isdir, join
from time import time
import click
@ -29,7 +29,6 @@ from SCons.Script import Import # pylint: disable=import-error
from SCons.Script import Variables # pylint: disable=import-error
from platformio import compat, fs
from platformio.compat import dump_json_to_unicode
from platformio.platform.base import PlatformBase
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_dir
@ -65,28 +64,35 @@ DEFAULT_ENV_OPTIONS = dict(
"pioide",
"piosize",
],
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
variables=clivars,
# Propagating External Environment
ENV=environ,
ENV=os.environ,
UNIX_TIME=int(time()),
BUILD_DIR=join("$PROJECT_BUILD_DIR", "$PIOENV"),
BUILD_SRC_DIR=join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=join("$BUILD_DIR", "test"),
COMPILATIONDB_PATH=join("$BUILD_DIR", "compile_commands.json"),
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
COMPILATIONDB_PATH=os.path.join("$BUILD_DIR", "compile_commands.json"),
LIBPATH=["$BUILD_DIR"],
PROGNAME="program",
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PROG_PATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PYTHONEXE=get_pythonexe_path(),
IDE_EXTRA_DATA={},
)
# Declare command verbose messages
command_strings = dict(
ARCOM="Archiving",
LINKCOM="Linking",
RANLIBCOM="Indexing",
ASCOM="Compiling",
ASPPCOM="Compiling",
CCCOM="Compiling",
CXXCOM="Compiling",
)
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
DEFAULT_ENV_OPTIONS["ARCOMSTR"] = "Archiving $TARGET"
DEFAULT_ENV_OPTIONS["LINKCOMSTR"] = "Linking $TARGET"
DEFAULT_ENV_OPTIONS["RANLIBCOMSTR"] = "Indexing $TARGET"
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
for name, value in command_strings.items():
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
@ -117,51 +123,55 @@ env.Replace(
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
LIBSOURCE_DIRS=[
config.get_optional_dir("lib"),
join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
config.get_optional_dir("globallib"),
],
)
if (
compat.WINDOWS
compat.IS_WINDOWS
and sys.version_info >= (3, 8)
and env["PROJECT_DIR"].startswith("\\\\")
):
click.secho(
"There is a known issue with Python 3.8+ and mapped network drives on "
"Windows.\nPlease downgrade Python to the latest 3.7. More details at:\n"
"Windows.\nSee a solution at:\n"
"https://github.com/platformio/platformio-core/issues/3417",
fg="yellow",
)
if env.subst("$BUILD_CACHE_DIR"):
if not isdir(env.subst("$BUILD_CACHE_DIR")):
makedirs(env.subst("$BUILD_CACHE_DIR"))
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
env.CacheDir("$BUILD_CACHE_DIR")
if int(ARGUMENTS.get("ISATTY", 0)):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
if env.GetOption("clean"):
env.PioClean(env.subst("$BUILD_DIR"))
is_clean_all = "cleanall" in COMMAND_LINE_TARGETS
if env.GetOption("clean") or is_clean_all:
env.PioClean(is_clean_all)
env.Exit(0)
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
# Dynamically load dependent tools
if "compiledb" in COMMAND_LINE_TARGETS:
env.Tool("compilation_db")
if not isdir(env.subst("$BUILD_DIR")):
makedirs(env.subst("$BUILD_DIR"))
if not os.path.isdir(env.subst("$BUILD_DIR")):
os.makedirs(env.subst("$BUILD_DIR"))
env.LoadProjectOptions()
env.LoadPioPlatform()
env.SConscriptChdir(0)
env.SConsignFile(
join("$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]))
os.path.join(
"$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1])
)
)
for item in env.GetExtraScripts("pre"):
@ -202,7 +212,7 @@ env.AddPreAction(
),
)
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
##############################################################################
@ -211,17 +221,20 @@ if "envdump" in COMMAND_LINE_TARGETS:
click.echo(env.Dump())
env.Exit(0)
if "idedata" in COMMAND_LINE_TARGETS:
if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS):
try:
Import("projenv")
except: # pylint: disable=bare-except
projenv = env
click.echo(
"\n%s\n"
% dump_json_to_unicode(
projenv.DumpIDEData(env) # pylint: disable=undefined-variable
)
)
data = projenv.DumpIDEData(env)
# dump to file for the further reading by project.helpers.load_project_ide_data
with open(
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
mode="w",
encoding="utf8",
) as fp:
json.dump(data, fp)
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
env.Exit(0)
if "sizedata" in COMMAND_LINE_TARGETS:

View File

@ -41,7 +41,7 @@ from platformio.proc import where_is_program
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
# which is the name that most clang tools search for by default.
# TODO: Is there a better way to do this than this global? Right now this exists so that the
# Is there a better way to do this than this global? Right now this exists so that the
# emitter we add can record all of the things it emits, so that the scanner for the top level
# compilation database can access the complete list, and also so that the writer has easy
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
@ -58,7 +58,7 @@ class __CompilationDbNode(SCons.Node.Python.Value):
def changed_since_last_build_node(*args, **kwargs):
""" Dummy decider to force always building"""
"""Dummy decider to force always building"""
return True
@ -104,7 +104,7 @@ def makeEmitCompilationDbEntry(comstr):
__COMPILATIONDB_ENV=env,
)
# TODO: Technically, these next two lines should not be required: it should be fine to
# Technically, these next two lines should not be required: it should be fine to
# cache the entries. However, they don't seem to update properly. Since they are quick
# to re-generate disable caching and sidestep this problem.
env.AlwaysBuild(entry)
@ -152,7 +152,7 @@ def WriteCompilationDb(target, source, env):
item["file"] = os.path.abspath(item["file"])
entries.append(item)
with open(str(target[0]), "w") as target_file:
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
json.dump(
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
)

View File

@ -14,12 +14,12 @@
from __future__ import absolute_import
import glob
import os
from glob import glob
from SCons.Defaults import processDefines # pylint: disable=import-error
import SCons.Defaults # pylint: disable=import-error
import SCons.Subst # pylint: disable=import-error
from platformio.compat import glob_escape
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import exec_command, where_is_program
@ -48,7 +48,7 @@ def _dump_includes(env):
for pkg in p.get_installed_packages():
if p.get_package_type(pkg.metadata.name) != "toolchain":
continue
toolchain_dir = glob_escape(pkg.path)
toolchain_dir = glob.escape(pkg.path)
toolchain_incglobs = [
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
@ -56,10 +56,20 @@ def _dump_includes(env):
os.path.join(toolchain_dir, "*", "include*"),
]
for g in toolchain_incglobs:
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
includes["toolchain"].extend(
[os.path.realpath(inc) for inc in glob.glob(g)]
)
# include Unity framework if there are tests in project
includes["unity"] = []
unity_dir = get_core_package_dir("tool-unity")
auto_install_unity = False
test_dir = env.GetProjectConfig().get_optional_dir("test")
if os.path.isdir(test_dir) and os.listdir(test_dir) != ["README"]:
auto_install_unity = True
unity_dir = get_core_package_dir(
"tool-unity",
auto_install=auto_install_unity,
)
if unity_dir:
includes["unity"].append(unity_dir)
@ -92,7 +102,7 @@ def _get_gcc_defines(env):
def _dump_defines(env):
defines = []
# global symbols
for item in processDefines(env.get("CPPDEFINES", [])):
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
item = item.strip()
if item:
defines.append(env.subst(item).replace("\\", ""))
@ -141,25 +151,17 @@ def _get_svd_path(env):
return None
def _escape_build_flag(flags):
return [flag if " " not in flag else '"%s"' % flag for flag in flags]
def _subst_cmd(env, cmd):
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
def DumpIDEData(env, globalenv):
""" env here is `projenv`"""
env["__escape_build_flag"] = _escape_build_flag
LINTCCOM = (
"${__escape_build_flag(CFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
)
LINTCXXCOM = (
"${__escape_build_flag(CXXFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
)
"""env here is `projenv`"""
data = {
"env_name": env["PIOENV"],
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
"libsource_dirs": [env.subst(item) for item in env.GetLibSourceDirs()],
"defines": _dump_defines(env),
"includes": _dump_includes(env),
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
@ -181,7 +183,7 @@ def DumpIDEData(env, globalenv):
env_ = env.Clone()
# https://github.com/platformio/platformio-atom-ide/issues/34
_new_defines = []
for item in processDefines(env_.get("CPPDEFINES", [])):
for item in SCons.Defaults.processDefines(env_.get("CPPDEFINES", [])):
item = item.replace('\\"', '"')
if " " in item:
_new_defines.append(item.replace(" ", "\\\\ "))
@ -189,7 +191,13 @@ def DumpIDEData(env, globalenv):
_new_defines.append(item)
env_.Replace(CPPDEFINES=_new_defines)
data.update({"cc_flags": env_.subst(LINTCCOM), "cxx_flags": env_.subst(LINTCXXCOM)})
# export C/C++ build flags
data.update(
{
"cc_flags": _subst_cmd(env_, "$CFLAGS $CCFLAGS $CPPFLAGS"),
"cxx_flags": _subst_cmd(env_, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
}
)
return data

View File

@ -33,7 +33,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.builder.tools import platformio as piotool
from platformio.clients.http import InternetIsOffline
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
from platformio.package.exception import UnknownPackageError
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manifest.parser import (
@ -59,6 +59,16 @@ class LibBuilderFactory(object):
clsname = "%sLibBuilder" % used_frameworks[0].title()
obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose)
# Handle PlatformIOLibBuilder.manifest.build.builder
# pylint: disable=protected-access
if isinstance(obj, PlatformIOLibBuilder) and obj._manifest.get("build", {}).get(
"builder"
):
obj = getattr(
sys.modules[__name__], obj._manifest.get("build", {}).get("builder")
)(env, path, verbose=verbose)
assert isinstance(obj, LibBuilderBase)
return obj
@ -86,7 +96,9 @@ class LibBuilderFactory(object):
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
):
continue
with io.open(os.path.join(root, fname), errors="ignore") as fp:
with io.open(
os.path.join(root, fname), encoding="utf8", errors="ignore"
) as fp:
content = fp.read()
if not content:
continue
@ -126,9 +138,9 @@ class LibBuilderBase(object):
self._is_dependent = False
self._is_built = False
self._depbuilders = list()
self._circular_deps = list()
self._processed_files = list()
self._depbuilders = []
self._circular_deps = []
self._processed_files = []
# reset source filter, could be overridden with extra script
self.env["SRC_FILTER"] = ""
@ -142,7 +154,7 @@ class LibBuilderBase(object):
def __contains__(self, path):
p1 = self.path
p2 = path
if WINDOWS:
if IS_WINDOWS:
p1 = p1.lower()
p2 = p2.lower()
if p1 == p2:
@ -172,19 +184,19 @@ class LibBuilderBase(object):
@property
def include_dir(self):
if not all(
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
):
for name in ("include", "Include"):
d = os.path.join(self.path, name)
if os.path.isdir(d):
return d
return None
return os.path.join(self.path, "include")
@property
def src_dir(self):
return (
os.path.join(self.path, "src")
if os.path.isdir(os.path.join(self.path, "src"))
else self.path
)
for name in ("src", "Src"):
d = os.path.join(self.path, name)
if os.path.isdir(d):
return d
return self.path
def get_include_dirs(self):
items = []
@ -459,12 +471,22 @@ class LibBuilderBase(object):
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
self.env.PrependUnique(**{key: lb.env.get(key)})
if self.lib_archive:
do_not_archive = not self.lib_archive
if not do_not_archive:
nodes = self.env.CollectBuildFiles(
self.build_dir, self.src_dir, self.src_filter
)
if nodes:
libs.append(
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
self.env.BuildLibrary(
self.build_dir, self.src_dir, self.src_filter, nodes
)
)
else:
do_not_archive = True
if do_not_archive:
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
return libs
@ -479,6 +501,14 @@ class ArduinoLibBuilder(LibBuilderBase):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
@property
def include_dir(self):
if not all(
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
):
return None
return os.path.join(self.path, "include")
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
if os.path.isdir(os.path.join(self.path, "src")):
@ -545,6 +575,24 @@ class ArduinoLibBuilder(LibBuilderBase):
def is_platforms_compatible(self, platforms):
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
@property
def build_flags(self):
ldflags = [
LibBuilderBase.build_flags.fget(self), # pylint: disable=no-member
self._manifest.get("ldflags"),
]
if self._manifest.get("precompiled") in ("true", "full"):
# add to LDPATH {build.mcu} folder
board_config = self.env.BoardConfig()
for key in ("build.mcu", "build.cpu"):
libpath = os.path.join(self.src_dir, board_config.get(key, ""))
if not os.path.isdir(libpath):
continue
self.env.PrependUnique(LIBPATH=libpath)
break
ldflags = [flag for flag in ldflags if flag] # remove empty
return " ".join(ldflags) if ldflags else None
class MbedLibBuilder(LibBuilderBase):
def load_manifest(self):
@ -553,12 +601,6 @@ class MbedLibBuilder(LibBuilderBase):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
@property
def include_dir(self):
if os.path.isdir(os.path.join(self.path, "include")):
return os.path.join(self.path, "include")
return None
@property
def src_dir(self):
if os.path.isdir(os.path.join(self.path, "source")):
@ -671,7 +713,7 @@ class MbedLibBuilder(LibBuilderBase):
def _mbed_conf_append_macros(self, mbed_config_path, macros):
lines = []
with open(mbed_config_path) as fp:
with open(mbed_config_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if line == "#endif":
@ -690,7 +732,7 @@ class MbedLibBuilder(LibBuilderBase):
if len(tokens) < 2 or tokens[1] not in macros:
lines.append(line)
lines.append("")
with open(mbed_config_path, "w") as fp:
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
fp.write("\n".join(lines))

View File

@ -14,15 +14,30 @@
from __future__ import absolute_import
from hashlib import md5
from os import makedirs
from os.path import isdir, isfile, join
import hashlib
import os
import re
from platformio.compat import WINDOWS, hashlib_encode_data
from SCons.Platform import TempFileMunge # pylint: disable=import-error
from SCons.Subst import quote_spaces # pylint: disable=import-error
# Windows CLI has limit with command length to 8192
# Leave 2000 chars for flags and other options
MAX_LINE_LENGTH = 6000 if WINDOWS else 128072
from platformio.compat import IS_WINDOWS, hashlib_encode_data
# There are the next limits depending on a platform:
# - Windows = 8192
# - Unix = 131072
# We need ~512 characters for compiler and temporary file paths
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
def tempfile_arg_esc_func(arg):
arg = quote_spaces(arg)
if not IS_WINDOWS:
return arg
# GCC requires double Windows slashes, let's use UNIX separator
return WINPATHSEP_RE.sub(r"/\1", arg)
def long_sources_hook(env, sources):
@ -41,32 +56,16 @@ def long_sources_hook(env, sources):
return '@"%s"' % _file_long_data(env, " ".join(data))
def long_incflags_hook(env, incflags):
_incflags = env.subst(incflags).replace("\\", "/")
if len(_incflags) < MAX_LINE_LENGTH:
return incflags
# fix space in paths
data = []
for line in _incflags.split(" -I"):
line = line.strip()
if not line.startswith("-I"):
line = "-I" + line
data.append('-I"%s"' % line[2:])
return '@"%s"' % _file_long_data(env, " ".join(data))
def _file_long_data(env, data):
build_dir = env.subst("$BUILD_DIR")
if not isdir(build_dir):
makedirs(build_dir)
tmp_file = join(
build_dir, "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest()
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
tmp_file = os.path.join(
build_dir, "longcmd-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
)
if isfile(tmp_file):
if os.path.isfile(tmp_file):
return tmp_file
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write(data)
return tmp_file
@ -76,17 +75,21 @@ def exists(_):
def generate(env):
env.Replace(_long_sources_hook=long_sources_hook)
env.Replace(_long_incflags_hook=long_incflags_hook)
coms = {}
for key in ("ARCOM", "LINKCOM"):
coms[key] = env.get(key, "").replace(
kwargs = dict(
_long_sources_hook=long_sources_hook,
TEMPFILE=TempFileMunge,
MAXLINELENGTH=MAX_LINE_LENGTH,
TEMPFILEARGESCFUNC=tempfile_arg_esc_func,
TEMPFILESUFFIX=".tmp",
TEMPFILEDIR="$BUILD_DIR",
)
for name in ("LINKCOM", "ASCOM", "ASPPCOM", "CCCOM", "CXXCOM"):
kwargs[name] = "${TEMPFILE('%s','$%sSTR')}" % (env.get(name), name)
kwargs["ARCOM"] = env.get("ARCOM", "").replace(
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
)
for key in ("_CCCOMCOM", "ASPPCOM"):
coms[key] = env.get(key, "").replace(
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}"
)
env.Replace(**coms)
env.Replace(**kwargs)
return env

View File

@ -15,16 +15,17 @@
from __future__ import absolute_import
import atexit
import glob
import io
import os
import re
import sys
from tempfile import mkstemp
import tempfile
import click
from platformio import fs, util
from platformio.compat import get_filesystem_encoding, get_locale_encoding, glob_escape
from platformio.compat import get_filesystem_encoding, get_locale_encoding
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import exec_command
@ -116,7 +117,7 @@ class InoToCPPConverter(object):
return out_file
def _gcc_preprocess(self, contents, out_file):
tmp_path = mkstemp()[1]
tmp_path = tempfile.mkstemp()[1]
self.write_safe_contents(tmp_path, contents)
self.env.Execute(
self.env.VerboseAction(
@ -229,7 +230,7 @@ class InoToCPPConverter(object):
def ConvertInoToCpp(env):
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
os.path.join(src_dir, "*.pde")
)
@ -333,7 +334,13 @@ def ConfigureDebugFlags(env):
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
_cleanup_debug_flags(scope)
debug_flags = env.ParseFlags(env.GetProjectOption("debug_build_flags"))
debug_flags = env.ParseFlags(
env.get("PIODEBUGFLAGS")
if env.get("PIODEBUGFLAGS")
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
else env.GetProjectOption("debug_build_flags")
)
env.MergeFlags(debug_flags)
optimization_flags = [
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))

View File

@ -21,7 +21,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
from platformio import fs, util
from platformio.compat import WINDOWS
from platformio.compat import IS_MACOS, IS_WINDOWS
from platformio.package.meta import PackageItem
from platformio.package.version import get_original_version
from platformio.platform.exception import UnknownBoard
@ -52,6 +52,7 @@ def BoardConfig(env, board=None):
except (AssertionError, UnknownBoard) as e:
sys.stderr.write("Error: %s\n" % str(e))
env.Exit(1)
return None
def GetFrameworkScript(env, framework):
@ -70,7 +71,6 @@ def LoadPioPlatform(env):
env["PIOPLATFORM"] = p.name
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
systype = util.get_systype()
for pkg in p.get_installed_packages():
type_ = p.get_package_type(pkg.metadata.name)
if type_ not in ("toolchain", "uploader", "debugger"):
@ -82,12 +82,12 @@ def LoadPioPlatform(env):
else pkg.path,
)
if (
not WINDOWS
not IS_WINDOWS
and os.path.isdir(os.path.join(pkg.path, "lib"))
and type_ != "toolchain"
):
env.PrependENVPath(
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
os.path.join(pkg.path, "lib"),
)

View File

@ -16,6 +16,7 @@
from __future__ import absolute_import
import json
import sys
from os import environ, makedirs, remove
from os.path import isdir, join, splitdrive
@ -23,9 +24,8 @@ from os.path import isdir, join, splitdrive
from elftools.elf.descriptions import describe_sh_flags
from elftools.elf.elffile import ELFFile
from platformio.compat import dump_json_to_unicode
from platformio.compat import IS_WINDOWS
from platformio.proc import exec_command
from platformio.util import get_systype
def _run_tool(cmd, env, tool_args):
@ -37,7 +37,7 @@ def _run_tool(cmd, env, tool_args):
makedirs(build_dir)
tmp_file = join(build_dir, "size-data-longcmd.txt")
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write("\n".join(tool_args))
cmd.append("@" + tmp_file)
@ -164,7 +164,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
location = symbol_locations.get(hex(symbol["addr"]))
if not location or "?" in location:
continue
if "windows" in get_systype():
if IS_WINDOWS:
drive, tail = splitdrive(location)
location = join(drive.upper(), tail)
symbol["file"] = location
@ -220,7 +220,7 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
"sections": sections,
}
files = dict()
files = {}
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
file_path = symbol.get("file") or "unknown"
if not files.get(file_path, {}):
@ -235,14 +235,16 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
files[file_path]["symbols"].append(symbol)
data["memory"]["files"] = list()
data["memory"]["files"] = []
for k, v in files.items():
file_data = {"path": k}
file_data.update(v)
data["memory"]["files"].append(file_data)
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
fp.write(dump_json_to_unicode(data))
with open(
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
) as fp:
fp.write(json.dumps(data))
def exists(_):

View File

@ -29,9 +29,9 @@ def VerboseAction(_, act, actstr):
return Action(act, actstr)
def PioClean(env, clean_dir):
def PioClean(env, clean_all=False):
def _relpath(path):
if compat.WINDOWS:
if compat.IS_WINDOWS:
prefix = os.getcwd()[:2].lower()
if (
":" not in prefix
@ -41,11 +41,9 @@ def PioClean(env, clean_dir):
return path
return os.path.relpath(path)
if not os.path.isdir(clean_dir):
print("Build environment is clean")
env.Exit(0)
clean_rel_path = _relpath(clean_dir)
for root, _, files in os.walk(clean_dir):
def _clean_dir(path):
clean_rel_path = _relpath(path)
for root, _, files in os.walk(path):
for f in files:
dst = os.path.join(root, f)
os.remove(dst)
@ -53,9 +51,20 @@ def PioClean(env, clean_dir):
"Removed %s"
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
)
build_dir = env.subst("$BUILD_DIR")
libdeps_dir = env.subst("$PROJECT_LIBDEPS_DIR")
if os.path.isdir(build_dir):
_clean_dir(build_dir)
fs.rmtree(build_dir)
else:
print("Build environment is clean")
if clean_all and os.path.isdir(libdeps_dir):
_clean_dir(libdeps_dir)
fs.rmtree(libdeps_dir)
print("Done cleaning")
fs.rmtree(clean_dir)
env.Exit(0)
def AddTarget( # pylint: disable=too-many-arguments
@ -65,7 +74,7 @@ def AddTarget( # pylint: disable=too-many-arguments
actions,
title=None,
description=None,
group="Generic",
group="General",
always_build=True,
):
if "__PIO_TARGETS" not in env:
@ -101,7 +110,13 @@ def DumpTargets(env):
description="Generate compilation database `compile_commands.json`",
group="Advanced",
)
targets["clean"] = dict(name="clean", title="Clean", group="Generic")
targets["clean"] = dict(name="clean", title="Clean", group="General")
targets["cleanall"] = dict(
name="cleanall",
title="Clean All",
group="General",
description="Clean a build environment and installed library dependencies",
)
return list(targets.values())

View File

@ -26,7 +26,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
from serial import Serial, SerialException
from platformio import exception, fs, util
from platformio.compat import WINDOWS
from platformio.compat import IS_WINDOWS
from platformio.proc import exec_command
# pylint: disable=unused-argument
@ -134,7 +134,7 @@ def AutodetectUploadPort(*args, **kwargs):
continue
port = item["port"]
if upload_protocol.startswith("blackmagic"):
if WINDOWS and port.startswith("COM") and len(port) > 4:
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
@ -236,9 +236,9 @@ def CheckUploadSize(_, target, source, env):
def _format_availale_bytes(value, total):
percent_raw = float(value) / float(total)
blocks_per_progress = 10
used_blocks = int(round(blocks_per_progress * percent_raw))
if used_blocks > blocks_per_progress:
used_blocks = blocks_per_progress
used_blocks = min(
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
)
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
)

View File

@ -27,7 +27,7 @@ from SCons.Script import Export # pylint: disable=import-error
from SCons.Script import SConscript # pylint: disable=import-error
from platformio import __version__, fs
from platformio.compat import MACOS, string_types
from platformio.compat import IS_MACOS, string_types
from platformio.package.version import pepver_to_semver
SRC_HEADER_EXT = ["h", "hpp"]
@ -50,7 +50,7 @@ def GetBuildType(env):
return (
"debug"
if (
set(["debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
set(["__debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
or env.GetProjectOption("build_type") == "debug"
)
else "release"
@ -69,13 +69,14 @@ def BuildProgram(env):
if (
env.get("LIBS")
and env.GetCompilerType() == "gcc"
and (env.PioPlatform().is_embedded() or not MACOS)
and (env.PioPlatform().is_embedded() or not IS_MACOS)
):
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
program = env.Program(
os.path.join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
os.path.join("$BUILD_DIR", env.subst("$PROGNAME$PROGSUFFIX")),
env["PIOBUILDFILES"],
)
env.Replace(PIOMAINPROG=program)
@ -345,11 +346,10 @@ def BuildFrameworks(env, frameworks):
env.Exit(1)
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
return env.StaticLibrary(
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
)
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
return env.StaticLibrary(env.subst(variant_dir), nodes)
def BuildSources(env, variant_dir, src_dir, src_filter=None):

View File

@ -78,9 +78,9 @@ class ContentCache(object):
if not os.path.isdir(os.path.dirname(cache_path)):
os.makedirs(os.path.dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
with open(self._db_path, mode="a", encoding="utf8") as fp:
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
except UnicodeError:
if os.path.isfile(cache_path):
@ -92,7 +92,7 @@ class ContentCache(object):
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
"""Keys=None, delete expired items"""
if not os.path.isfile(self._db_path):
return None
if not keys:
@ -102,7 +102,7 @@ class ContentCache(object):
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
with open(self._db_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
@ -129,7 +129,7 @@ class ContentCache(object):
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
with open(self._db_path, mode="w", encoding="utf8") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()

View File

@ -207,6 +207,9 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
app.set_state_item("account", account)
return result
def get_logged_username(self):
return self.get_account_info(offline=True).get("profile").get("username")
def destroy_account(self):
return self.send_auth_request("delete", "/v1/account")

View File

@ -80,7 +80,7 @@ class EndpointSessionIterator(object):
return self
def next(self):
""" For Python 2 compatibility """
"""For Python 2 compatibility"""
return self.__next__()
def __next__(self):
@ -101,7 +101,10 @@ class HTTPClient(object):
def __del__(self):
if not self._session:
return
try:
self._session.close()
except: # pylint: disable=bare-except
pass
self._session = None
def _next_session(self):

View File

@ -15,7 +15,6 @@
from platformio import __registry_api__, fs
from platformio.clients.account import AccountClient
from platformio.clients.http import HTTPClient, HTTPClientError
from platformio.package.meta import PackageType
# pylint: disable=too-many-arguments
@ -32,18 +31,13 @@ class RegistryClient(HTTPClient):
kwargs["headers"] = headers
return self.fetch_json_data(*args, **kwargs)
def publish_package(
self, archive_path, owner=None, released_at=None, private=False, notify=True
def publish_package( # pylint: disable=redefined-builtin
self, owner, type, archive_path, released_at=None, private=False, notify=True
):
account = AccountClient()
if not owner:
owner = (
account.get_account_info(offline=True).get("profile").get("username")
)
with open(archive_path, "rb") as fp:
return self.send_auth_request(
"post",
"/v3/packages/%s/%s" % (owner, PackageType.from_archive(archive_path)),
"/v3/packages/%s/%s" % (owner, type),
params={
"private": 1 if private else 0,
"notify": 1 if notify else 0,
@ -59,13 +53,8 @@ class RegistryClient(HTTPClient):
)
def unpublish_package( # pylint: disable=redefined-builtin
self, type, name, owner=None, version=None, undo=False
self, owner, type, name, version=None, undo=False
):
account = AccountClient()
if not owner:
owner = (
account.get_account_info(offline=True).get("profile").get("username")
)
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
if version:
path += "/" + version

View File

@ -184,7 +184,7 @@ def account_destroy():
click.confirm(
"Are you sure you want to delete the %s user account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% client.get_account_info().get("profile").get("username"),
% client.get_logged_username(),
abort=True,
)
client.destroy_account()

View File

@ -13,12 +13,12 @@
# limitations under the License.
import json
import shutil
import click
from tabulate import tabulate
from platformio import fs
from platformio.compat import dump_json_to_unicode
from platformio.package.manager.platform import PlatformPackageManager
@ -41,7 +41,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
grpboards[board["platform"]] = []
grpboards[board["platform"]].append(board)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
for (platform, boards) in sorted(grpboards.items()):
click.echo("")
click.echo("Platform: ", nl=False)
@ -83,4 +83,4 @@ def _print_boards_json(query, installed=False):
if query.lower() not in search_data.lower():
continue
result.append(board)
click.echo(dump_json_to_unicode(result))
click.echo(json.dumps(result))

View File

@ -15,7 +15,9 @@
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
# pylint: disable=redefined-builtin,too-many-statements
import json
import os
import shutil
from collections import Counter
from os.path import dirname, isfile
from time import time
@ -26,7 +28,6 @@ from tabulate import tabulate
from platformio import app, exception, fs, util
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools import CheckToolFactory
from platformio.compat import dump_json_to_unicode
from platformio.project.config import ProjectConfig
from platformio.project.helpers import find_project_dir_above, get_project_dir
@ -163,7 +164,7 @@ def cli(
print_processing_footer(result)
if json_output:
click.echo(dump_json_to_unicode(results_to_json(results)))
click.echo(json.dumps(results_to_json(results)))
elif not silent:
print_check_summary(results)
@ -193,7 +194,7 @@ def print_processing_header(tool, envname, envdump):
"Checking %s > %s (%s)"
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@ -214,7 +215,7 @@ def print_processing_footer(result):
def collect_component_stats(result):
components = dict()
components = {}
def _append_defect(component, defect):
if not components.get(component):
@ -249,7 +250,7 @@ def print_defects_stats(results):
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
severity_labels.reverse()
tabular_data = list()
tabular_data = []
for k, v in component_stats.items():
tool_defect = [v.get(s, 0) for s in severity_labels]
tabular_data.append([k] + tool_defect)

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
from tempfile import NamedTemporaryFile
import tempfile
import click
from platformio import compat, fs, proc
from platformio import fs, proc
from platformio.commands.check.defect import DefectItem
from platformio.project.helpers import load_project_ide_data
@ -104,7 +105,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
def _create_tmp_file(self, data):
with NamedTemporaryFile("w", delete=False) as fp:
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
fp.write(data)
self._tmp_files.append(fp.name)
return fp.name
@ -167,6 +168,29 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
if os.path.isfile(f):
os.remove(f)
@staticmethod
def is_check_successful(cmd_result):
return cmd_result["returncode"] == 0
def execute_check_cmd(self, cmd):
result = proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
if not self.is_check_successful(result):
click.echo(
"\nError: Failed to execute check command! Exited with code %d."
% result["returncode"]
)
if self.options.get("verbose"):
click.echo(result["out"])
click.echo(result["err"])
self._bad_input = True
return result
@staticmethod
def get_project_target_files(patterns):
c_extension = (".c",)
@ -184,7 +208,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
result["c++"].append(os.path.realpath(path))
for pattern in patterns:
for item in compat.glob_recursive(pattern):
for item in glob.glob(pattern, recursive=True):
if not os.path.isdir(item):
_add_file(item)
for root, _, files in os.walk(item, followlinks=True):
@ -200,11 +224,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
self.execute_check_cmd(cmd)
else:
if self.options.get("verbose"):

View File

@ -49,6 +49,12 @@ class ClangtidyCheckTool(CheckToolBase):
return DefectItem(severity, category, message, file_, line, column, defect_id)
@staticmethod
def is_check_successful(cmd_result):
# Note: Clang-Tidy returns 1 for not critical compilation errors,
# so 0 and 1 are only acceptable values
return cmd_result["returncode"] < 2
def configure_command(self):
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")

View File

@ -64,7 +64,7 @@ class CppcheckCheckTool(CheckToolBase):
if any(f not in self._buffer for f in self.defect_fields):
return None
args = dict()
args = {}
for field in self._buffer.split(self._field_delimiter):
field = field.strip().replace('"', "")
name, value = field.split("=", 1)
@ -96,20 +96,19 @@ class CppcheckCheckTool(CheckToolBase):
)
click.echo()
self._bad_input = True
self._buffer = ""
return None
self._buffer = ""
return DefectItem(**args)
def configure_command(
self, language, src_files
): # pylint: disable=arguments-differ
def configure_command(self, language, src_file): # pylint: disable=arguments-differ
tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck")
cmd = [
tool_path,
"--addon-python=%s" % proc.get_pythonexe_path(),
"--error-exitcode=1",
"--error-exitcode=3",
"--verbose" if self.options.get("verbose") else "--quiet",
]
@ -142,10 +141,11 @@ class CppcheckCheckTool(CheckToolBase):
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
if not self.is_flag_set("--std", flags):
# Try to guess the standard version from the build flags
for flag in build_flags:
if "-std" in flag:
# Standards with GNU extensions are not allowed
cmd.append("-" + flag.replace("gnu", "c"))
cmd.append("-" + self.convert_language_standard(flag))
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
@ -157,8 +157,8 @@ class CppcheckCheckTool(CheckToolBase):
"--include=" + inc
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
)
cmd.append("--file-list=%s" % self._generate_src_file(src_files))
cmd.append("--includes-file=%s" % self._generate_inc_file())
cmd.append('"%s"' % src_file)
return cmd
@ -220,29 +220,47 @@ class CppcheckCheckTool(CheckToolBase):
if os.path.isfile(dump_file):
os.remove(dump_file)
@staticmethod
def is_check_successful(cmd_result):
# Cppcheck is configured to return '3' if a defect is found
return cmd_result["returncode"] in (0, 3)
@staticmethod
def convert_language_standard(flag):
cpp_standards_map = {
"0x": "11",
"1y": "14",
"1z": "17",
"2a": "20",
}
standard = flag[-2:]
# Note: GNU extensions are not supported and converted to regular standards
return flag.replace("gnu", "c").replace(
standard, cpp_standards_map.get(standard, standard)
)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
project_files = self.get_project_target_files(self.options["patterns"])
languages = ("c", "c++")
if not any([project_files[t] for t in languages]):
project_files = self.get_project_target_files(self.options["patterns"])
src_files_scope = ("c", "c++")
if not any(project_files[t] for t in src_files_scope):
click.echo("Error: Nothing to check.")
return True
for language in languages:
if not project_files[language]:
for scope, files in project_files.items():
if scope not in src_files_scope:
continue
cmd = self.configure_command(language, project_files[language])
for src_file in files:
cmd = self.configure_command(scope, src_file)
if not cmd:
self._bad_input = True
continue
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
self.execute_check_cmd(cmd)
self.clean_up()

View File

@ -19,9 +19,10 @@ from xml.etree.ElementTree import fromstring
import click
from platformio import proc, util
from platformio import proc
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.compat import IS_WINDOWS
from platformio.package.manager.core import get_core_package_dir
@ -34,24 +35,29 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
self.tool_path = os.path.join(
get_core_package_dir("tool-pvs-studio"),
"x64" if "windows" in util.get_systype() else "bin",
"x64" if IS_WINDOWS else "bin",
"pvs-studio",
)
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
with open(self._tmp_cfg_file, "w") as fp:
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
fp.write(
"exclude-path = "
+ self.config.get_optional_dir("packages").replace("\\", "/")
)
with open(self._tmp_cmd_file, "w") as fp:
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
fp.write(
" ".join(
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
)
)
def tool_output_filter(self, line):
if "license was not entered" in line.lower():
self._bad_input = True
return line
def _process_defects(self, defects):
for defect in defects:
if not isinstance(defect, DefectItem):
@ -65,9 +71,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
def _demangle_report(self, output_file):
converter_tool = os.path.join(
get_core_package_dir("tool-pvs-studio"),
"HtmlGenerator"
if "windows" in util.get_systype()
else os.path.join("bin", "plog-converter"),
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
)
cmd = (
@ -182,7 +186,13 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
flags = self.cc_flags
compiler = self.cc_path
cmd = [compiler, src_file, "-E", "-o", self._tmp_preprocessed_file]
cmd = [
compiler,
'"%s"' % src_file,
"-E",
"-o",
'"%s"' % self._tmp_preprocessed_file,
]
cmd.extend([f for f in flags if f])
cmd.extend(["-D%s" % d for d in self.cpp_defines])
cmd.append('@"%s"' % self._tmp_cmd_file)
@ -203,6 +213,12 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
@staticmethod
def is_check_successful(cmd_result):
return (
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
for scope, files in self.get_project_target_files(
@ -219,11 +235,8 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
self._bad_input = True
continue
result = proc.exec_command(cmd)
# pylint: disable=unsupported-membership-test
if result["returncode"] != 0 or "license" in result["err"].lower():
self._bad_input = True
click.echo(result["err"])
result = self.execute_check_cmd(cmd)
if result["returncode"] != 0:
continue
self._process_defects(self.parse_defects(self._tmp_output_file))

View File

@ -12,14 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from os import getenv, makedirs, remove
from os.path import basename, isdir, isfile, join, realpath
from shutil import copyfile, copytree
from tempfile import mkdtemp
import glob
import os
import shutil
import tempfile
import click
from platformio import app, compat, fs
from platformio import app, fs
from platformio.commands.project import project_init as cmd_project_init
from platformio.commands.project import validate_boards
from platformio.commands.run.command import cli as cmd_run
@ -33,8 +33,8 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
for i, p in enumerate(value):
if p.startswith("~"):
value[i] = fs.expanduser(p)
value[i] = realpath(value[i])
if not compat.glob_recursive(value[i]):
value[i] = os.path.realpath(value[i])
if not glob.glob(value[i], recursive=True):
invalid_path = p
break
try:
@ -51,7 +51,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option(
"--build-dir",
default=mkdtemp,
default=tempfile.mkdtemp,
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
@click.option("--keep-build-dir", is_flag=True)
@ -78,28 +78,28 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
verbose,
):
if not src and getenv("PLATFORMIO_CI_SRC"):
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
if not src and os.getenv("PLATFORMIO_CI_SRC"):
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
if not src:
raise click.BadParameter("Missing argument 'src'")
try:
app.set_session_var("force_option", True)
if not keep_build_dir and isdir(build_dir):
if not keep_build_dir and os.path.isdir(build_dir):
fs.rmtree(build_dir)
if not isdir(build_dir):
makedirs(build_dir)
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
for dir_name, patterns in dict(lib=lib, src=src).items():
if not patterns:
continue
contents = []
for p in patterns:
contents += compat.glob_recursive(p)
_copy_contents(join(build_dir, dir_name), contents)
contents += glob.glob(p, recursive=True)
_copy_contents(os.path.join(build_dir, dir_name), contents)
if project_conf and isfile(project_conf):
if project_conf and os.path.isfile(project_conf):
_copy_project_conf(build_dir, project_conf)
elif not board:
raise CIBuildEnvsEmpty()
@ -122,52 +122,55 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
fs.rmtree(build_dir)
def _copy_contents(dst_dir, contents):
def _copy_contents(dst_dir, contents): # pylint: disable=too-many-branches
items = {"dirs": set(), "files": set()}
for path in contents:
if isdir(path):
if os.path.isdir(path):
items["dirs"].add(path)
elif isfile(path):
elif os.path.isfile(path):
items["files"].add(path)
dst_dir_name = basename(dst_dir)
dst_dir_name = os.path.basename(dst_dir)
if dst_dir_name == "src" and len(items["dirs"]) == 1:
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
if not os.path.isdir(dst_dir):
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
else:
if not isdir(dst_dir):
makedirs(dst_dir)
if not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
for d in items["dirs"]:
copytree(d, join(dst_dir, basename(d)), symlinks=True)
src_dst_dir = os.path.join(dst_dir, os.path.basename(d))
if not os.path.isdir(src_dst_dir):
shutil.copytree(d, src_dst_dir, symlinks=True)
if not items["files"]:
return
if dst_dir_name == "lib":
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
for f in items["files"]:
dst_file = join(dst_dir, basename(f))
dst_file = os.path.join(dst_dir, os.path.basename(f))
if f == dst_file:
continue
copyfile(f, dst_file)
shutil.copyfile(f, dst_file)
def _exclude_contents(dst_dir, patterns):
contents = []
for p in patterns:
contents += compat.glob_recursive(join(compat.glob_escape(dst_dir), p))
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
for path in contents:
path = realpath(path)
if isdir(path):
path = os.path.realpath(path)
if os.path.isdir(path):
fs.rmtree(path)
elif isfile(path):
remove(path)
elif os.path.isfile(path):
os.remove(path)
def _copy_project_conf(build_dir, project_conf):
config = ProjectConfig(project_conf, parse_extra=False)
if config.has_section("platformio"):
config.remove_section("platformio")
config.save(join(build_dir, "platformio.ini"))
config.save(os.path.join(build_dir, "platformio.ini"))

View File

@ -0,0 +1,180 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments, too-many-locals
# pylint: disable=too-many-branches, too-many-statements
import asyncio
import os
import signal
import subprocess
import click
from platformio import app, exception, fs, proc
from platformio.commands.platform import init_platform
from platformio.compat import IS_WINDOWS
from platformio.debug import helpers
from platformio.debug.config.factory import DebugConfigFactory
from platformio.debug.exception import DebugInvalidOptionsError
from platformio.debug.process.gdb import GDBClientProcess
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectEnvsNotAvailableError
from platformio.project.helpers import is_platformio_project
from platformio.project.options import ProjectOptions
@click.command(
"debug",
context_settings=dict(ignore_unknown_options=True),
short_help="Unified debugger",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--environment", "-e", metavar="<environment>")
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
@click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def cli(
ctx,
project_dir,
project_conf,
environment,
load_mode,
verbose,
interface,
__unprocessed,
):
app.set_session_var("custom_project_conf", project_conf)
# use env variables from Eclipse or CLion
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir):
break
if os.getenv(name):
project_dir = os.getenv(name)
with fs.cd(project_dir):
project_config = ProjectConfig.get_instance(project_conf)
project_config.validate(envs=[environment] if environment else None)
env_name = environment or helpers.get_default_debug_env(project_config)
if not interface:
return helpers.predebug_project(
ctx, project_dir, project_config, env_name, False, verbose
)
env_options = project_config.items(env=env_name, as_dict=True)
if "platform" not in env_options:
raise ProjectEnvsNotAvailableError()
with fs.cd(project_dir):
debug_config = DebugConfigFactory.new(
init_platform(env_options["platform"]), project_config, env_name
)
if "--version" in __unprocessed:
return subprocess.run(
[debug_config.client_executable_path, "--version"], check=True
)
try:
fs.ensure_udev_rules()
except exception.InvalidUdevRules as e:
click.echo(
helpers.escape_gdbmi_stream("~", str(e) + "\n")
if helpers.is_gdbmi_mode()
else str(e) + "\n",
nl=False,
)
rebuild_prog = False
preload = debug_config.load_cmds == ["preload"]
load_mode = load_mode or debug_config.load_mode
if load_mode == "always":
rebuild_prog = preload or not helpers.has_debug_symbols(
debug_config.program_path
)
elif load_mode == "modified":
rebuild_prog = helpers.is_prog_obsolete(
debug_config.program_path
) or not helpers.has_debug_symbols(debug_config.program_path)
if not (debug_config.program_path and os.path.isfile(debug_config.program_path)):
rebuild_prog = True
if preload or (not rebuild_prog and load_mode != "always"):
# don't load firmware through debug server
debug_config.load_cmds = []
if rebuild_prog:
if helpers.is_gdbmi_mode():
click.echo(
helpers.escape_gdbmi_stream(
"~", "Preparing firmware for debugging...\n"
),
nl=False,
)
stream = helpers.GDBMIConsoleStream()
with proc.capture_std_streams(stream):
helpers.predebug_project(
ctx, project_dir, project_config, env_name, preload, verbose
)
stream.close()
else:
click.echo("Preparing firmware for debugging...")
helpers.predebug_project(
ctx, project_dir, project_config, env_name, preload, verbose
)
# save SHA sum of newly created prog
if load_mode == "modified":
helpers.is_prog_obsolete(debug_config.program_path)
if not os.path.isfile(debug_config.program_path):
raise DebugInvalidOptionsError("Program/firmware is missed")
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
asyncio.set_event_loop(loop)
with fs.cd(project_dir):
client = GDBClientProcess(project_dir, debug_config)
coro = client.run(__unprocessed)
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
loop.run_until_complete(coro)
if IS_WINDOWS:
# an issue with `asyncio` executor and STIDIN,
# it cannot be closed gracefully
proc.force_exit()
finally:
del client
loop.close()
return True

View File

@ -1,175 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments, too-many-statements
# pylint: disable=too-many-locals, too-many-branches
import os
import signal
from os.path import isfile
import click
from platformio import app, exception, fs, proc
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.package.manager.core import inject_contrib_pysite
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectEnvsNotAvailableError
from platformio.project.helpers import is_platformio_project, load_project_ide_data
@click.command(
"debug",
context_settings=dict(ignore_unknown_options=True),
short_help="Unified debugger",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--environment", "-e", metavar="<environment>")
@click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
app.set_session_var("custom_project_conf", project_conf)
# use env variables from Eclipse or CLion
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir):
break
if os.getenv(sysenv):
project_dir = os.getenv(sysenv)
with fs.cd(project_dir):
config = ProjectConfig.get_instance(project_conf)
config.validate(envs=[environment] if environment else None)
env_name = environment or helpers.get_default_debug_env(config)
env_options = config.items(env=env_name, as_dict=True)
if not set(env_options.keys()) >= set(["platform", "board"]):
raise ProjectEnvsNotAvailableError()
try:
platform = PlatformFactory.new(env_options["platform"])
except UnknownPlatform:
ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.new(env_options["platform"])
debug_options = helpers.configure_initial_debug_options(platform, env_options)
assert debug_options
if not interface:
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
ide_data = load_project_ide_data(project_dir, env_name)
if not ide_data:
raise DebugInvalidOptionsError("Could not load a build configuration")
if "--version" in __unprocessed:
result = proc.exec_command([ide_data["gdb_path"], "--version"])
if result["returncode"] == 0:
return click.echo(result["out"])
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
try:
fs.ensure_udev_rules()
except exception.InvalidUdevRules as e:
click.echo(
helpers.escape_gdbmi_stream("~", str(e) + "\n")
if helpers.is_gdbmi_mode()
else str(e) + "\n",
nl=False,
)
try:
debug_options = platform.configure_debug_options(debug_options, ide_data)
except NotImplementedError:
# legacy for ESP32 dev-platform <=2.0.0
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
debug_options, ide_data
)
rebuild_prog = False
preload = debug_options["load_cmds"] == ["preload"]
load_mode = debug_options["load_mode"]
if load_mode == "always":
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
elif load_mode == "modified":
rebuild_prog = helpers.is_prog_obsolete(
ide_data["prog_path"]
) or not helpers.has_debug_symbols(ide_data["prog_path"])
else:
rebuild_prog = not isfile(ide_data["prog_path"])
if preload or (not rebuild_prog and load_mode != "always"):
# don't load firmware through debug server
debug_options["load_cmds"] = []
if rebuild_prog:
if helpers.is_gdbmi_mode():
click.echo(
helpers.escape_gdbmi_stream(
"~", "Preparing firmware for debugging...\n"
),
nl=False,
)
stream = helpers.GDBMIConsoleStream()
with proc.capture_std_streams(stream):
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
stream.close()
else:
click.echo("Preparing firmware for debugging...")
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
# save SHA sum of newly created prog
if load_mode == "modified":
helpers.is_prog_obsolete(ide_data["prog_path"])
if not isfile(ide_data["prog_path"]):
raise DebugInvalidOptionsError("Program/firmware is missed")
# run debugging client
inject_contrib_pysite()
# pylint: disable=import-outside-toplevel
from platformio.commands.debug.process.client import GDBClient, reactor
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
reactor.run()
return True

View File

@ -1,302 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import is_bytes
from platformio.project.config import ProjectConfig
from platformio.project.options import ProjectOptions
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def get_default_debug_env(config):
default_envs = config.default_envs()
all_envs = config.envs()
for env in default_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
for env in all_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
return default_envs[0] if default_envs else all_envs[0]
def predebug_project(ctx, project_dir, env_name, preload, verbose):
ctx.invoke(
cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def configure_initial_debug_options(platform, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
board_config = platform.board_config(env_options["board"])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
server_options = None
# specific server per a system
if isinstance(tool_settings.get("server", {}), list):
for item in tool_settings["server"][:]:
tool_settings["server"] = item
if util.get_systype() in item.get("system", []):
break
# user overwrites debug server
if env_options.get("debug_server"):
server_options = {
"cwd": None,
"executable": None,
"arguments": env_options.get("debug_server"),
}
server_options["executable"] = server_options["arguments"][0]
server_options["arguments"] = server_options["arguments"][1:]
elif "server" in tool_settings:
server_options = tool_settings["server"]
server_package = server_options.get("package")
server_package_dir = (
platform.get_package_dir(server_package) if server_package else None
)
if server_package and not server_package_dir:
platform.install_packages(
with_packages=[server_package], skip_default_package=True, silent=True
)
server_package_dir = platform.get_package_dir(server_package)
server_options.update(
dict(
cwd=server_package_dir if server_package else None,
executable=server_options.get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
for a in server_options.get("arguments", [])
],
)
)
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
result = dict(
tool=tool_name,
upload_protocol=env_options.get(
"upload_protocol", board_config.get("upload", {}).get("protocol")
),
load_cmds=_cleanup_cmds(
env_options.get(
"debug_load_cmds",
tool_settings.get(
"load_cmds",
tool_settings.get(
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
),
),
)
),
load_mode=env_options.get(
"debug_load_mode",
tool_settings.get(
"load_mode", ProjectOptions["env.debug_load_mode"].default
),
),
init_break=env_options.get(
"debug_init_break",
tool_settings.get(
"init_break", ProjectOptions["env.debug_init_break"].default
),
),
init_cmds=_cleanup_cmds(
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
),
extra_cmds=extra_cmds,
require_debug_port=tool_settings.get("require_debug_port", False),
port=reveal_debug_port(
env_options.get("debug_port", tool_settings.get("port")),
tool_name,
tool_settings,
),
server=server_options,
)
return result
def configure_esp32_load_cmds(debug_options, configuration):
"""
DEPRECATED: Moved to ESP32 dev-platform
See platform.py::configure_debug_options
"""
flash_images = configuration.get("extra", {}).get("flash_images")
ignore_conds = [
debug_options["load_cmds"] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not flash_images,
not all([isfile(item["path"]) for item in flash_images]),
]
if any(ignore_conds):
return debug_options["load_cmds"]
mon_cmds = [
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
for item in flash_images
]
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
% fs.to_unix_path(configuration["prog_path"][:-4])
)
return mon_cmds
def has_debug_symbols(prog_path):
if not isfile(prog_path):
return False
matched = {
b".debug_info": False,
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
while True:
data = fp.read(1024)
if not data:
break
for pattern, found in matched.items():
if found:
continue
if pattern in last_data + data:
matched[pattern] = True
last_data = data
return all(matched.values())
def is_prog_obsolete(prog_path):
prog_hash_path = prog_path + ".sha1"
if not isfile(prog_path):
return True
shasum = sha1()
with open(prog_path, "rb") as fp:
while True:
data = fp.read(1024)
if not data:
break
shasum.update(data)
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path) as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
with open(prog_hash_path, "w") as fp:
fp.write(new_digest)
return True
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
if set(["*", "?", "[", "]"]) & set(env_debug_port):
return env_debug_port
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if tool_name.startswith("blackmagic"):
if (
"windows" in util.get_systype()
and port.startswith("COM")
and len(port) > 4
):
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return None
if env_debug_port and not _get_pattern():
return env_debug_port
if not tool_settings.get("require_debug_port"):
return None
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
return debug_port

View File

@ -1,161 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
GDB_DEFAULT_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset halt
end
define pio_reset_run_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor init
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_STUTIL_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
end
define pio_reset_run_target
monitor reset
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_JLINK_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
end
define pio_reset_run_target
monitor clrbp
monitor reset
monitor go
end
target extended-remote $DEBUG_PORT
monitor clrbp
monitor speed auto
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_BLACKMAGIC_INIT_CONFIG = """
define pio_reset_halt_target
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
end
define pio_reset_run_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
monitor swdp_scan
attach 1
set mem inaccessible-by-default off
$LOAD_CMDS
$INIT_BREAK
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
"""
GDB_MSPDEBUG_INIT_CONFIG = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
target extended-remote $DEBUG_PORT
monitor erase
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_QEMU_INIT_CONFIG = """
define pio_reset_halt_target
monitor system_reset
end
define pio_reset_run_target
monitor system_reset
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_RENODE_INIT_CONFIG = """
define pio_reset_halt_target
monitor machine Reset
$LOAD_CMDS
monitor start
end
define pio_reset_run_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
$INIT_BREAK
monitor start
"""
TOOL_TO_CONFIG = {
"jlink": GDB_JLINK_INIT_CONFIG,
"mspdebug": GDB_MSPDEBUG_INIT_CONFIG,
"qemu": GDB_QEMU_INIT_CONFIG,
"blackmagic": GDB_BLACKMAGIC_INIT_CONFIG,
"renode": GDB_RENODE_INIT_CONFIG,
}
def get_gdb_init_config(debug_options):
tool = debug_options.get("tool")
if tool and tool in TOOL_TO_CONFIG:
return TOOL_TO_CONFIG[tool]
server_exe = (debug_options.get("server") or {}).get("executable", "").lower()
if "st-util" in server_exe:
return GDB_STUTIL_INIT_CONFIG
return GDB_DEFAULT_INIT_CONFIG

View File

@ -1,93 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import signal
import time
import click
from twisted.internet import protocol # pylint: disable=import-error
from platformio import fs
from platformio.compat import string_types
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_core_dir
class BaseProcess(protocol.ProcessProtocol, object):
STDOUT_CHUNK_SIZE = 2048
LOG_FILE = None
COMMON_PATTERNS = {
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
"PYTHONEXE": get_pythonexe_path(),
}
def __init__(self):
self._last_activity = 0
def apply_patterns(self, source, patterns=None):
_patterns = self.COMMON_PATTERNS.copy()
_patterns.update(patterns or {})
for key, value in _patterns.items():
if key.endswith(("_DIR", "_PATH")):
_patterns[key] = fs.to_unix_path(value)
def _replace(text):
for key, value in _patterns.items():
pattern = "$%s" % key
text = text.replace(pattern, value or "")
return text
if isinstance(source, string_types):
source = _replace(source)
elif isinstance(source, (list, dict)):
items = enumerate(source) if isinstance(source, list) else source.items()
for key, value in items:
if isinstance(value, string_types):
source[key] = _replace(value)
elif isinstance(value, (list, dict)):
source[key] = self.apply_patterns(value, patterns)
return source
def onStdInData(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
def outReceived(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
while data:
chunk = data[: self.STDOUT_CHUNK_SIZE]
click.echo(chunk, nl=False)
data = data[self.STDOUT_CHUNK_SIZE :]
def errReceived(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
click.echo(data, nl=False, err=True)
def processEnded(self, _):
self._last_activity = time.time()
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)

View File

@ -1,280 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import signal
import time
from hashlib import sha1
from os.path import basename, dirname, isdir, join, realpath, splitext
from tempfile import mkdtemp
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import protocol # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import stdio # pylint: disable=import-error
from twisted.internet import task # pylint: disable=import-error
from platformio import fs, proc, telemetry, util
from platformio.cache import ContentCache
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.initcfgs import get_gdb_init_config
from platformio.commands.debug.process.base import BaseProcess
from platformio.commands.debug.process.server import DebugServer
from platformio.compat import hashlib_encode_data, is_bytes
from platformio.project.helpers import get_project_cache_dir
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
PIO_SRC_NAME = ".pioinit"
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
def __init__(self, project_dir, args, debug_options, env_options):
super(GDBClient, self).__init__()
self.project_dir = project_dir
self.args = list(args)
self.debug_options = debug_options
self.env_options = env_options
self._debug_server = DebugServer(debug_options, env_options)
self._session_id = None
if not isdir(get_project_cache_dir()):
os.makedirs(get_project_cache_dir())
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
self._target_is_run = False
self._auto_continue_timer = None
self._errors_buffer = b""
@defer.inlineCallbacks
def spawn(self, gdb_path, prog_path):
session_hash = gdb_path + prog_path
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
self._kill_previous_session()
patterns = {
"PROJECT_DIR": self.project_dir,
"PROG_PATH": prog_path,
"PROG_DIR": dirname(prog_path),
"PROG_NAME": basename(splitext(prog_path)[0]),
"DEBUG_PORT": self.debug_options["port"],
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
"INIT_BREAK": self.debug_options["init_break"] or "",
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
}
yield self._debug_server.spawn(patterns)
if not patterns["DEBUG_PORT"]:
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
self.generate_pioinit(self._gdbsrc_dir, patterns)
# start GDB client
args = [
"piogdb",
"-q",
"--directory",
self._gdbsrc_dir,
"--directory",
self.project_dir,
"-l",
"10",
]
args.extend(self.args)
if not gdb_path:
raise DebugInvalidOptionsError("GDB client is not configured")
gdb_data_dir = self._get_data_dir(gdb_path)
if gdb_data_dir:
args.extend(["--data-directory", gdb_data_dir])
args.append(patterns["PROG_PATH"])
transport = reactor.spawnProcess(
self, gdb_path, args, path=self.project_dir, env=os.environ
)
defer.returnValue(transport)
@staticmethod
def _get_data_dir(gdb_path):
if "msp430" in gdb_path:
return None
gdb_data_dir = realpath(join(dirname(gdb_path), "..", "share", "gdb"))
return gdb_data_dir if isdir(gdb_data_dir) else None
def generate_pioinit(self, dst_dir, patterns):
# default GDB init commands depending on debug tool
commands = get_gdb_init_config(self.debug_options).split("\n")
if self.debug_options["init_cmds"]:
commands = self.debug_options["init_cmds"]
commands.extend(self.debug_options["extra_cmds"])
if not any("define pio_reset_run_target" in cmd for cmd in commands):
commands = [
"define pio_reset_run_target",
" echo Warning! Undefined pio_reset_run_target command\\n",
" monitor reset",
"end",
] + commands
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
commands = [
"define pio_reset_halt_target",
" echo Warning! Undefined pio_reset_halt_target command\\n",
" monitor reset halt",
"end",
] + commands
if not any("define pio_restart_target" in cmd for cmd in commands):
commands += [
"define pio_restart_target",
" pio_reset_halt_target",
" $INIT_BREAK",
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
"end",
]
banner = [
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
"echo PlatformIO: Initializing remote target...\\n",
]
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
commands = banner + commands + footer
with open(join(dst_dir, self.PIO_SRC_NAME), "w") as fp:
fp.write("\n".join(self.apply_patterns(commands, patterns)))
def connectionMade(self):
self._lock_session(self.transport.pid)
p = protocol.Protocol()
p.dataReceived = self.onStdInData
stdio.StandardIO(p)
def onStdInData(self, data):
super(GDBClient, self).onStdInData(data)
if b"-exec-run" in data:
if self._target_is_run:
token, _ = data.split(b"-", 1)
self.outReceived(token + b"^running\n")
return
data = data.replace(b"-exec-run", b"-exec-continue")
if b"-exec-continue" in data:
self._target_is_run = True
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)
self.transport.write(b"pio_reset_run_target\n")
self.transport.write(data)
def processEnded(self, reason): # pylint: disable=unused-argument
self._unlock_session()
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
fs.rmtree(self._gdbsrc_dir)
if self._debug_server:
self._debug_server.terminate()
reactor.stop()
def outReceived(self, data):
super(GDBClient, self).outReceived(data)
self._handle_error(data)
# go to init break automatically
if self.INIT_COMPLETED_BANNER.encode() in data:
telemetry.send_event(
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
)
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
self._auto_continue_timer.start(0.1)
def errReceived(self, data):
super(GDBClient, self).errReceived(data)
self._handle_error(data)
def console_log(self, msg):
if helpers.is_gdbmi_mode():
msg = helpers.escape_gdbmi_stream("~", msg)
self.outReceived(msg if is_bytes(msg) else msg.encode())
def _auto_exec_continue(self):
auto_exec_delay = 0.5 # in seconds
if self._last_activity > (time.time() - auto_exec_delay):
return
if self._auto_continue_timer:
self._auto_continue_timer.stop()
self._auto_continue_timer = None
if not self.debug_options["init_break"] or self._target_is_run:
return
self.console_log(
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
% self.debug_options["init_break"]
)
self.console_log(
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
)
self.transport.write(
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
)
self._target_is_run = True
def _handle_error(self, data):
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
if not (
self.PIO_SRC_NAME.encode() in self._errors_buffer
and b"Error in sourced" in self._errors_buffer
):
return
last_erros = self._errors_buffer.decode()
last_erros = " ".join(reversed(last_erros.split("\n")))
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
err = "%s -> %s" % (
telemetry.dump_run_environment(self.env_options),
last_erros,
)
telemetry.send_exception("DebugInitError: %s" % err)
self.transport.loseConnection()
def _kill_previous_session(self):
assert self._session_id
pid = None
with ContentCache() as cc:
pid = cc.get(self._session_id)
cc.delete(self._session_id)
if not pid:
return
if "windows" in util.get_systype():
kill = ["Taskkill", "/PID", pid, "/F"]
else:
kill = ["kill", pid]
try:
proc.exec_command(kill)
except: # pylint: disable=bare-except
pass
def _lock_session(self, pid):
if not self._session_id:
return
with ContentCache() as cc:
cc.set(self._session_id, str(pid), "1h")
def _unlock_session(self):
if not self._session_id:
return
with ContentCache() as cc:
cc.delete(self._session_id)

View File

@ -1,166 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
from os.path import isdir, isfile, join
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from platformio import fs, util
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
from platformio.commands.debug.process.base import BaseProcess
from platformio.proc import where_is_program
class DebugServer(BaseProcess):
def __init__(self, debug_options, env_options):
super(DebugServer, self).__init__()
self.debug_options = debug_options
self.env_options = env_options
self._debug_port = ":3333"
self._transport = None
self._process_ended = False
self._ready = False
@defer.inlineCallbacks
def spawn(self, patterns): # pylint: disable=too-many-branches
systype = util.get_systype()
server = self.debug_options.get("server")
if not server:
defer.returnValue(None)
server = self.apply_patterns(server, patterns)
server_executable = server["executable"]
if not server_executable:
defer.returnValue(None)
if server["cwd"]:
server_executable = join(server["cwd"], server_executable)
if (
"windows" in systype
and not server_executable.endswith(".exe")
and isfile(server_executable + ".exe")
):
server_executable = server_executable + ".exe"
if not isfile(server_executable):
server_executable = where_is_program(server_executable)
if not isfile(server_executable):
raise DebugInvalidOptionsError(
"\nCould not launch Debug Server '%s'. Please check that it "
"is installed and is included in a system PATH\n\n"
"See documentation or contact contact@platformio.org:\n"
"https://docs.platformio.org/page/plus/debugging.html\n"
% server_executable
)
openocd_pipe_allowed = all(
[not self.debug_options["port"], "openocd" in server_executable]
)
if openocd_pipe_allowed:
args = []
if server["cwd"]:
args.extend(["-s", server["cwd"]])
args.extend(
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
)
args.extend(server["arguments"])
str_args = " ".join(
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
)
self._debug_port = '| "%s" %s' % (server_executable, str_args)
self._debug_port = fs.to_unix_path(self._debug_port)
defer.returnValue(self._debug_port)
env = os.environ.copy()
# prepend server "lib" folder to LD path
if (
"windows" not in systype
and server["cwd"]
and isdir(join(server["cwd"], "lib"))
):
ld_key = "DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
env[ld_key] = join(server["cwd"], "lib")
if os.environ.get(ld_key):
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
# prepend BIN to PATH
if server["cwd"] and isdir(join(server["cwd"], "bin")):
env["PATH"] = "%s%s%s" % (
join(server["cwd"], "bin"),
os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")),
)
self._transport = reactor.spawnProcess(
self,
server_executable,
[server_executable] + server["arguments"],
path=server["cwd"],
env=env,
)
if "mspdebug" in server_executable.lower():
self._debug_port = ":2000"
elif "jlink" in server_executable.lower():
self._debug_port = ":2331"
elif "qemu" in server_executable.lower():
self._debug_port = ":1234"
yield self._wait_until_ready()
defer.returnValue(self._debug_port)
@defer.inlineCallbacks
def _wait_until_ready(self):
timeout = 10
elapsed = 0
delay = 0.5
auto_ready_delay = 0.5
while not self._ready and not self._process_ended and elapsed < timeout:
yield self.async_sleep(delay)
if not self.debug_options.get("server", {}).get("ready_pattern"):
self._ready = self._last_activity < (time.time() - auto_ready_delay)
elapsed += delay
@staticmethod
def async_sleep(secs):
d = defer.Deferred()
reactor.callLater(secs, d.callback, None)
return d
def get_debug_port(self):
return self._debug_port
def outReceived(self, data):
super(DebugServer, self).outReceived(
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
)
if self._ready:
return
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
if ready_pattern:
self._ready = ready_pattern.encode() in data
def processEnded(self, reason):
self._process_ended = True
super(DebugServer, self).processEnded(reason)
def terminate(self):
if self._process_ended or not self._transport:
return
try:
self._transport.signalProcess("KILL")
except: # pylint: disable=bare-except
pass

View File

@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
from fnmatch import fnmatch
@ -21,7 +22,6 @@ from serial.tools import miniterm
from platformio import exception, fs, util
from platformio.commands.device import helpers as device_helpers
from platformio.compat import dump_json_to_unicode
from platformio.platform.factory import PlatformFactory
from platformio.project.exception import NotPlatformIOProjectError
@ -52,9 +52,7 @@ def device_list( # pylint: disable=too-many-branches
single_key = list(data)[0] if len(list(data)) == 1 else None
if json_output:
return click.echo(
dump_json_to_unicode(data[single_key] if single_key else data)
)
return click.echo(json.dumps(data[single_key] if single_key else data))
titles = {
"serial": "Serial Ports",
@ -179,7 +177,9 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
for name in os.listdir(filters_dir):
if not name.endswith(".py"):
continue
device_helpers.load_monitor_filter(os.path.join(filters_dir, name))
device_helpers.load_monitor_filter(
os.path.join(filters_dir, name), options=kwargs
)
project_options = {}
try:
@ -193,9 +193,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
if "platform" in project_options:
with fs.cd(kwargs["project_dir"]):
platform = PlatformFactory.new(project_options["platform"])
device_helpers.register_platform_filters(
platform, kwargs["project_dir"], kwargs["environment"]
)
device_helpers.register_platform_filters(platform, options=kwargs)
if not kwargs["port"]:
ports = util.get_serial_ports(filter_hwid=True)

View File

@ -18,12 +18,13 @@ from platformio.project.config import ProjectConfig
class DeviceMonitorFilter(miniterm.Transform):
def __init__(self, project_dir=None, environment=None):
""" Called by PlatformIO to pass context """
def __init__(self, options=None):
"""Called by PlatformIO to pass context"""
miniterm.Transform.__init__(self)
self.project_dir = project_dir
self.environment = environment
self.options = options or {}
self.project_dir = self.options.get("project_dir")
self.environment = self.options.get("environment")
self.config = ProjectConfig.get_instance()
if not self.environment:
@ -34,7 +35,7 @@ class DeviceMonitorFilter(miniterm.Transform):
self.environment = self.config.envs()[0]
def __call__(self):
""" Called by the miniterm library when the filter is actually used """
"""Called by the miniterm library when the filter is actually used"""
return self
@property

View File

@ -31,6 +31,7 @@ class LogToFile(DeviceMonitorFilter):
"%y%m%d-%H%M%S"
)
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
# pylint: disable=consider-using-with
self._log_fp = io.open(log_file_name, "w", encoding="utf-8")
return self

View File

@ -22,10 +22,17 @@ class SendOnEnter(DeviceMonitorFilter):
super(SendOnEnter, self).__init__(*args, **kwargs)
self._buffer = ""
if self.options.get("eol") == "CR":
self._eol = "\r"
elif self.options.get("eol") == "LF":
self._eol = "\n"
else:
self._eol = "\r\n"
def tx(self, text):
self._buffer += text
if self._buffer.endswith("\r\n"):
text = self._buffer[:-2]
if self._buffer.endswith(self._eol):
text = self._buffer
self._buffer = ""
return text
return ""

View File

@ -76,7 +76,7 @@ def get_board_hwids(project_dir, platform, board):
return platform.board_config(board).get("build.hwids", [])
def load_monitor_filter(path, project_dir=None, environment=None):
def load_monitor_filter(path, options=None):
name = os.path.basename(path)
name = name[: name.find(".")]
module = load_python_module("platformio.commands.device.filters.%s" % name, path)
@ -87,12 +87,12 @@ def load_monitor_filter(path, project_dir=None, environment=None):
or cls == DeviceMonitorFilter
):
continue
obj = cls(project_dir, environment)
obj = cls(options)
miniterm.TRANSFORMATIONS[obj.NAME] = obj
return True
def register_platform_filters(platform, project_dir, environment):
def register_platform_filters(platform, options=None):
monitor_dir = os.path.join(platform.get_dir(), "monitor")
if not os.path.isdir(monitor_dir):
return
@ -103,4 +103,4 @@ def register_platform_filters(platform, project_dir, environment):
path = os.path.join(monitor_dir, name)
if not os.path.isfile(path):
continue
load_monitor_filter(path, project_dir, environment)
load_monitor_filter(path, options)

View File

@ -12,20 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-locals,too-many-statements
import mimetypes
import socket
from os.path import isdir
import click
from platformio import exception
from platformio.compat import WINDOWS
from platformio.package.manager.core import get_core_package_dir, inject_contrib_pysite
from platformio.commands.home.helpers import is_port_used
from platformio.commands.home.run import run_server
@click.command("home", short_help="UI to manage PlatformIO")
@click.command("home", short_help="GUI to manage PlatformIO")
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
@click.option(
"--host",
@ -45,61 +40,28 @@ from platformio.package.manager.core import get_core_package_dir, inject_contrib
"are connected. Default is 0 which means never auto shutdown"
),
)
def cli(port, host, no_open, shutdown_timeout):
# pylint: disable=import-error, import-outside-toplevel
# import contrib modules
inject_contrib_pysite()
from autobahn.twisted.resource import WebSocketResource
from twisted.internet import reactor
from twisted.web import server
from twisted.internet.error import CannotListenError
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
from platformio.commands.home.rpc.handlers.misc import MiscRPC
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.handlers.account import AccountRPC
from platformio.commands.home.rpc.server import JSONRPCServerFactory
from platformio.commands.home.web import WebRoot
factory = JSONRPCServerFactory(shutdown_timeout)
factory.addHandler(AppRPC(), namespace="app")
factory.addHandler(IDERPC(), namespace="ide")
factory.addHandler(MiscRPC(), namespace="misc")
factory.addHandler(OSRPC(), namespace="os")
factory.addHandler(PIOCoreRPC(), namespace="core")
factory.addHandler(ProjectRPC(), namespace="project")
factory.addHandler(AccountRPC(), namespace="account")
contrib_dir = get_core_package_dir("contrib-piohome")
if not isdir(contrib_dir):
raise exception.PlatformioException("Invalid path to PIO Home Contrib")
@click.option(
"--session-id",
help=(
"A unique session identifier to keep PIO Home isolated from other instances "
"and protect from 3rd party access"
),
)
def cli(port, host, no_open, shutdown_timeout, session_id):
# Ensure PIO Home mimetypes are known
mimetypes.add_type("text/html", ".html")
mimetypes.add_type("text/css", ".css")
mimetypes.add_type("application/javascript", ".js")
root = WebRoot(contrib_dir)
root.putChild(b"wsrpc", WebSocketResource(factory))
site = server.Site(root)
# hook for `platformio-node-helpers`
if host == "__do_not_start__":
return
already_started = is_port_used(host, port)
home_url = "http://%s:%d" % (host, port)
if not no_open:
if already_started:
click.launch(home_url)
else:
reactor.callLater(1, lambda: click.launch(home_url))
home_url = "http://%s:%d%s" % (
host,
port,
("/session/%s/" % session_id) if session_id else "/",
)
click.echo(
"\n".join(
[
@ -108,45 +70,25 @@ def cli(port, host, no_open, shutdown_timeout):
" /\\-_--\\ PlatformIO Home",
"/ \\_-__\\",
"|[]| [] | %s" % home_url,
"|__|____|______________%s" % ("_" * len(host)),
"|__|____|__%s" % ("_" * len(home_url)),
]
)
)
click.echo("")
click.echo("Open PlatformIO Home in your browser by this URL => %s" % home_url)
try:
reactor.listenTCP(port, site, interface=host)
except CannotListenError as e:
click.secho(str(e), fg="red", err=True)
already_started = True
if already_started:
if is_port_used(host, port):
click.secho(
"PlatformIO Home server is already started in another process.", fg="yellow"
)
if not no_open:
click.launch(home_url)
return
click.echo("PIO Home has been started. Press Ctrl+C to shutdown.")
reactor.run()
def is_port_used(host, port):
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if WINDOWS:
try:
s.bind((host, port))
s.close()
return False
except (OSError, socket.error):
pass
else:
try:
s.connect((host, port))
s.close()
except socket.error:
return False
return True
run_server(
host=host,
port=port,
no_open=no_open,
shutdown_timeout=shutdown_timeout,
home_url=home_url,
)

View File

@ -12,40 +12,49 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=keyword-arg-before-vararg,arguments-differ,signature-differs
import socket
import requests
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import threads # pylint: disable=import-error
from starlette.concurrency import run_in_threadpool
from platformio import util
from platformio.compat import IS_WINDOWS
from platformio.proc import where_is_program
class AsyncSession(requests.Session):
def __init__(self, n=None, *args, **kwargs):
if n:
pool = reactor.getThreadPool()
pool.adjustPoolsize(0, n)
super(AsyncSession, self).__init__(*args, **kwargs)
def request(self, *args, **kwargs):
async def request( # pylint: disable=signature-differs,invalid-overridden-method
self, *args, **kwargs
):
func = super(AsyncSession, self).request
return threads.deferToThread(func, *args, **kwargs)
def wrap(self, *args, **kwargs): # pylint: disable=no-self-use
return defer.ensureDeferred(*args, **kwargs)
return await run_in_threadpool(func, *args, **kwargs)
@util.memoized(expire="60s")
def requests_session():
return AsyncSession(n=5)
return AsyncSession()
@util.memoized(expire="60s")
def get_core_fullpath():
return where_is_program(
"platformio" + (".exe" if "windows" in util.get_systype() else "")
)
return where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
def is_port_used(host, port):
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if IS_WINDOWS:
try:
s.bind((host, port))
s.close()
return False
except (OSError, socket.error):
pass
else:
try:
s.connect((host, port))
s.close()
except socket.error:
return False
return True

View File

@ -12,18 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import jsonrpc # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from platformio.clients.account import AccountClient
class AccountRPC(object):
class AccountRPC:
@staticmethod
def call_client(method, *args, **kwargs):
try:
client = AccountClient()
return getattr(client, method)(*args, **kwargs)
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4003, message="PIO Account Call Error", data=str(e)
)

View File

@ -20,7 +20,7 @@ from platformio import __version__, app, fs, util
from platformio.project.helpers import get_project_core_dir, is_platformio_project
class AppRPC(object):
class AppRPC:
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")

View File

@ -14,29 +14,30 @@
import time
import jsonrpc # pylint: disable=import-error
from twisted.internet import defer # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from platformio.compat import aio_get_running_loop
class IDERPC(object):
class IDERPC:
def __init__(self):
self._queue = {}
def send_command(self, sid, command, params):
if not self._queue.get(sid):
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4005, message="PIO Home IDE agent is not started"
)
while self._queue[sid]:
self._queue[sid].pop().callback(
self._queue[sid].pop().set_result(
{"id": time.time(), "method": command, "params": params}
)
def listen_commands(self, sid=0):
async def listen_commands(self, sid=0):
if sid not in self._queue:
self._queue[sid] = []
self._queue[sid].append(defer.Deferred())
return self._queue[sid][-1]
self._queue[sid].append(aio_get_running_loop().create_future())
return await self._queue[sid][-1]
def open_project(self, sid, project_dir):
return self.send_command(sid, "open_project", project_dir)

View File

@ -15,14 +15,13 @@
import json
import time
from twisted.internet import defer, reactor # pylint: disable=import-error
from platformio.cache import ContentCache
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.compat import aio_create_task
class MiscRPC(object):
def load_latest_tweets(self, data_url):
class MiscRPC:
async def load_latest_tweets(self, data_url):
cache_key = ContentCache.key_from_args(data_url, "tweets")
cache_valid = "180d"
with ContentCache() as cc:
@ -31,22 +30,20 @@ class MiscRPC(object):
cache_data = json.loads(cache_data)
# automatically update cache in background every 12 hours
if cache_data["time"] < (time.time() - (3600 * 12)):
reactor.callLater(
5, self._preload_latest_tweets, data_url, cache_key, cache_valid
aio_create_task(
self._preload_latest_tweets(data_url, cache_key, cache_valid)
)
return cache_data["result"]
result = self._preload_latest_tweets(data_url, cache_key, cache_valid)
return result
return await self._preload_latest_tweets(data_url, cache_key, cache_valid)
@staticmethod
@defer.inlineCallbacks
def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((yield OSRPC.fetch_content(data_url)))
async def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((await OSRPC.fetch_content(data_url)))
with ContentCache() as cc:
cc.set(
cache_key,
json.dumps({"time": int(time.time()), "result": result}),
cache_valid,
)
defer.returnValue(result)
return result

View File

@ -14,25 +14,23 @@
from __future__ import absolute_import
import glob
import io
import os
import shutil
from functools import cmp_to_key
import click
from twisted.internet import defer # pylint: disable=import-error
from platformio import __default_requests_timeout__, fs, util
from platformio.cache import ContentCache
from platformio.clients.http import ensure_internet_on
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding, glob_recursive
class OSRPC(object):
class OSRPC:
@staticmethod
@defer.inlineCallbacks
def fetch_content(uri, data=None, headers=None, cache_valid=None):
async def fetch_content(uri, data=None, headers=None, cache_valid=None):
if not headers:
headers = {
"User-Agent": (
@ -46,18 +44,18 @@ class OSRPC(object):
if cache_key:
result = cc.get(cache_key)
if result is not None:
defer.returnValue(result)
return result
# check internet before and resolve issue with 60 seconds timeout
ensure_internet_on(raise_exception=True)
session = helpers.requests_session()
if data:
r = yield session.post(
r = await session.post(
uri, data=data, headers=headers, timeout=__default_requests_timeout__
)
else:
r = yield session.get(
r = await session.get(
uri, headers=headers, timeout=__default_requests_timeout__
)
@ -66,11 +64,11 @@ class OSRPC(object):
if cache_valid:
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
defer.returnValue(result)
return result
def request_content(self, uri, data=None, headers=None, cache_valid=None):
async def request_content(self, uri, data=None, headers=None, cache_valid=None):
if uri.startswith("http"):
return self.fetch_content(uri, data, headers, cache_valid)
return await self.fetch_content(uri, data, headers, cache_valid)
if os.path.isfile(uri):
with io.open(uri, encoding="utf-8") as fp:
return fp.read()
@ -82,13 +80,11 @@ class OSRPC(object):
@staticmethod
def reveal_file(path):
return click.launch(
path.encode(get_filesystem_encoding()) if PY2 else path, locate=True
)
return click.launch(path, locate=True)
@staticmethod
def open_file(path):
return click.launch(path.encode(get_filesystem_encoding()) if PY2 else path)
return click.launch(path)
@staticmethod
def is_file(path):
@ -121,7 +117,9 @@ class OSRPC(object):
result = set()
for pathname in pathnames:
result |= set(
glob_recursive(os.path.join(root, pathname) if root else pathname)
glob.glob(
os.path.join(root, pathname) if root else pathname, recursive=True
)
)
return list(result)

View File

@ -17,23 +17,15 @@ from __future__ import absolute_import
import json
import os
import sys
from io import BytesIO, StringIO
from io import StringIO
import click
import jsonrpc # pylint: disable=import-error
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import threads # pylint: disable=import-error
from twisted.internet import utils # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from starlette.concurrency import run_in_threadpool
from platformio import __main__, __version__, fs
from platformio import __main__, __version__, fs, proc
from platformio.commands.home import helpers
from platformio.compat import (
PY2,
get_filesystem_encoding,
get_locale_encoding,
is_bytes,
string_types,
)
from platformio.compat import get_locale_encoding, is_bytes
try:
from thread import get_ident as thread_get_ident
@ -52,13 +44,11 @@ class MultiThreadingStdStream(object):
def _ensure_thread_buffer(self, thread_id):
if thread_id not in self._buffers:
self._buffers[thread_id] = BytesIO() if PY2 else StringIO()
self._buffers[thread_id] = StringIO()
def write(self, value):
thread_id = thread_get_ident()
self._ensure_thread_buffer(thread_id)
if PY2 and isinstance(value, unicode): # pylint: disable=undefined-variable
value = value.encode()
return self._buffers[thread_id].write(
value.decode() if is_bytes(value) else value
)
@ -74,7 +64,7 @@ class MultiThreadingStdStream(object):
return result
class PIOCoreRPC(object):
class PIOCoreRPC:
@staticmethod
def version():
return __version__
@ -89,16 +79,9 @@ class PIOCoreRPC(object):
sys.stderr = PIOCoreRPC.thread_stderr
@staticmethod
def call(args, options=None):
return defer.maybeDeferred(PIOCoreRPC._call_generator, args, options)
@staticmethod
@defer.inlineCallbacks
def _call_generator(args, options=None):
async def call(args, options=None):
for i, arg in enumerate(args):
if isinstance(arg, string_types):
args[i] = arg.encode(get_filesystem_encoding()) if PY2 else arg
else:
if not isinstance(arg, str):
args[i] = str(arg)
options = options or {}
@ -106,27 +89,34 @@ class PIOCoreRPC(object):
try:
if options.get("force_subprocess"):
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
else:
result = yield PIOCoreRPC._call_inline(args, options)
result = await PIOCoreRPC._call_subprocess(args, options)
return PIOCoreRPC._process_result(result, to_json)
result = await PIOCoreRPC._call_inline(args, options)
try:
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
return PIOCoreRPC._process_result(result, to_json)
except ValueError:
# fall-back to subprocess method
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
result = await PIOCoreRPC._call_subprocess(args, options)
return PIOCoreRPC._process_result(result, to_json)
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4003, message="PIO Core Call Error", data=str(e)
)
@staticmethod
def _call_inline(args, options):
PIOCoreRPC.setup_multithreading_std_streams()
cwd = options.get("cwd") or os.getcwd()
async def _call_subprocess(args, options):
result = await run_in_threadpool(
proc.exec_command,
[helpers.get_core_fullpath()] + args,
cwd=options.get("cwd") or os.getcwd(),
)
return (result["out"], result["err"], result["returncode"])
def _thread_task():
@staticmethod
async def _call_inline(args, options):
PIOCoreRPC.setup_multithreading_std_streams()
def _thread_safe_call(args, cwd):
with fs.cd(cwd):
exit_code = __main__.main(["-c"] + args)
return (
@ -135,16 +125,8 @@ class PIOCoreRPC(object):
exit_code,
)
return threads.deferToThread(_thread_task)
@staticmethod
def _call_subprocess(args, options):
cwd = (options or {}).get("cwd") or os.getcwd()
return utils.getProcessOutputAndValue(
helpers.get_core_fullpath(),
args,
path=cwd,
env={k: v for k, v in os.environ.items() if "%" not in k},
return await run_in_threadpool(
_thread_safe_call, args=args, cwd=options.get("cwd") or os.getcwd()
)
@staticmethod

View File

@ -18,12 +18,11 @@ import os
import shutil
import time
import jsonrpc # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from platformio import exception, fs
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.compat import PY2, get_filesystem_encoding
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.package.manager.platform import PlatformPackageManager
from platformio.project.config import ProjectConfig
@ -32,7 +31,7 @@ from platformio.project.helpers import get_project_dir, is_platformio_project
from platformio.project.options import get_config_options_schema
class ProjectRPC(object):
class ProjectRPC:
@staticmethod
def config_call(init_kwargs, method, *args):
assert isinstance(init_kwargs, dict)
@ -185,7 +184,7 @@ class ProjectRPC(object):
)
return sorted(result, key=lambda data: data["platform"]["title"])
def init(self, board, framework, project_dir):
async def init(self, board, framework, project_dir):
assert project_dir
state = AppRPC.load_state()
if not os.path.isdir(project_dir):
@ -198,14 +197,13 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(
await PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
d.addCallback(self._generate_project_main, project_dir, framework)
return d
return self._generate_project_main(project_dir, board, framework)
@staticmethod
def _generate_project_main(_, project_dir, framework):
def _generate_project_main(project_dir, board, framework):
main_content = None
if framework == "arduino":
main_content = "\n".join(
@ -240,28 +238,41 @@ class ProjectRPC(object):
)
if not main_content:
return project_dir
is_cpp_project = True
pm = PlatformPackageManager()
try:
board = pm.board_config(board)
platforms = board.get("platforms", board.get("platform"))
if not isinstance(platforms, list):
platforms = [platforms]
c_based_platforms = ["intel_mcs51", "ststm8"]
is_cpp_project = not (set(platforms) & set(c_based_platforms))
except exception.PlatformioException:
pass
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get_optional_dir("src")
main_path = os.path.join(src_dir, "main.cpp")
main_path = os.path.join(
src_dir, "main.%s" % ("cpp" if is_cpp_project else "c")
)
if os.path.isfile(main_path):
return project_dir
if not os.path.isdir(src_dir):
os.makedirs(src_dir)
with open(main_path, "w") as fp:
with open(main_path, mode="w", encoding="utf8") as fp:
fp.write(main_content.strip())
return project_dir
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
@staticmethod
async def import_arduino(board, use_arduino_libs, arduino_project_dir):
board = str(board)
if arduino_project_dir and PY2:
arduino_project_dir = arduino_project_dir.encode(get_filesystem_encoding())
# don't import PIO Project
if is_platformio_project(arduino_project_dir):
return arduino_project_dir
is_arduino_project = any(
[
os.path.isfile(
os.path.join(
arduino_project_dir,
@ -269,10 +280,9 @@ class ProjectRPC(object):
)
)
for ext in ("ino", "pde")
]
)
if not is_arduino_project:
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
)
@ -293,14 +303,9 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(
await PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
return d
@staticmethod
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get_optional_dir("src")
@ -310,9 +315,9 @@ class ProjectRPC(object):
return project_dir
@staticmethod
def import_pio(project_dir):
async def import_pio(project_dir):
if not project_dir or not is_platformio_project(project_dir):
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4001, message="Not an PlatformIO project: %s" % project_dir
)
new_project_dir = os.path.join(
@ -328,8 +333,7 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(
await PIOCoreRPC.call(
args, options={"cwd": new_project_dir, "force_subprocess": True}
)
d.addCallback(lambda _: new_project_dir)
return d
return new_project_dir

View File

@ -12,90 +12,86 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=import-error
import click
import jsonrpc
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from jsonrpc.exceptions import JSONRPCDispatchException
from twisted.internet import defer, reactor
from ajsonrpc.dispatcher import Dispatcher
from ajsonrpc.manager import AsyncJSONRPCResponseManager
from starlette.endpoints import WebSocketEndpoint
from platformio.compat import PY2, dump_json_to_unicode, is_bytes
from platformio.compat import aio_create_task, aio_get_running_loop
from platformio.proc import force_exit
class JSONRPCServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.connection_nums += 1
if self.factory.shutdown_timer:
self.factory.shutdown_timer.cancel()
self.factory.shutdown_timer = None
class JSONRPCServerFactoryBase:
def onClose(self, wasClean, code, reason): # pylint: disable=unused-argument
self.factory.connection_nums -= 1
if self.factory.connection_nums == 0:
self.factory.shutdownByTimeout()
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
# click.echo("> %s" % payload)
response = jsonrpc.JSONRPCResponseManager.handle(
payload, self.factory.dispatcher
).data
# if error
if "result" not in response:
self.sendJSONResponse(response)
return None
d = defer.maybeDeferred(lambda: response["result"])
d.addCallback(self._callback, response)
d.addErrback(self._errback, response)
return None
def _callback(self, result, response):
response["result"] = result
self.sendJSONResponse(response)
def _errback(self, failure, response):
if isinstance(failure.value, JSONRPCDispatchException):
e = failure.value
else:
e = JSONRPCDispatchException(code=4999, message=failure.getErrorMessage())
del response["result"]
response["error"] = e.error._data # pylint: disable=protected-access
self.sendJSONResponse(response)
def sendJSONResponse(self, response):
# click.echo("< %s" % response)
if "error" in response:
click.secho("Error: %s" % response["error"], fg="red", err=True)
response = dump_json_to_unicode(response)
if not PY2 and not is_bytes(response):
response = response.encode("utf-8")
self.sendMessage(response)
class JSONRPCServerFactory(WebSocketServerFactory):
protocol = JSONRPCServerProtocol
connection_nums = 0
shutdown_timer = 0
shutdown_timer = None
def __init__(self, shutdown_timeout=0):
super(JSONRPCServerFactory, self).__init__()
self.shutdown_timeout = shutdown_timeout
self.dispatcher = jsonrpc.Dispatcher()
self.manager = AsyncJSONRPCResponseManager(
Dispatcher(), is_server_error_verbose=True
)
def shutdownByTimeout(self):
def __call__(self, *args, **kwargs):
raise NotImplementedError
def addObjectHandler(self, handler, namespace):
self.manager.dispatcher.add_object(handler, prefix="%s." % namespace)
def on_client_connect(self):
self.connection_nums += 1
if self.shutdown_timer:
self.shutdown_timer.cancel()
self.shutdown_timer = None
def on_client_disconnect(self):
self.connection_nums -= 1
if self.connection_nums < 1:
self.connection_nums = 0
if self.connection_nums == 0:
self.shutdown_by_timeout()
async def on_shutdown(self):
pass
def shutdown_by_timeout(self):
if self.shutdown_timeout < 1:
return
def _auto_shutdown_server():
click.echo("Automatically shutdown server on timeout")
reactor.stop()
force_exit()
self.shutdown_timer = reactor.callLater(
self.shutdown_timer = aio_get_running_loop().call_later(
self.shutdown_timeout, _auto_shutdown_server
)
def addHandler(self, handler, namespace):
self.dispatcher.build_method_map(handler, prefix="%s." % namespace)
class WebSocketJSONRPCServerFactory(JSONRPCServerFactoryBase):
def __call__(self, *args, **kwargs):
ws = WebSocketJSONRPCServer(*args, **kwargs)
ws.factory = self
return ws
class WebSocketJSONRPCServer(WebSocketEndpoint):
encoding = "text"
factory: WebSocketJSONRPCServerFactory = None
async def on_connect(self, websocket):
await websocket.accept()
self.factory.on_client_connect() # pylint: disable=no-member
async def on_receive(self, websocket, data):
aio_create_task(self._handle_rpc(websocket, data))
async def on_disconnect(self, websocket, close_code):
self.factory.on_client_disconnect() # pylint: disable=no-member
async def _handle_rpc(self, websocket, data):
# pylint: disable=no-member
response = await self.factory.manager.get_response_for_payload(data)
if response.error:
click.secho("Error: %s" % response.error.data, fg="red", err=True)
await websocket.send_text(self.factory.manager.serialize(response.body))

View File

@ -0,0 +1,99 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from urllib.parse import urlparse
import click
import uvicorn
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.responses import PlainTextResponse
from starlette.routing import Mount, Route, WebSocketRoute
from starlette.staticfiles import StaticFiles
from starlette.status import HTTP_403_FORBIDDEN
from platformio.commands.home.rpc.handlers.account import AccountRPC
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
from platformio.commands.home.rpc.handlers.misc import MiscRPC
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
from platformio.compat import aio_get_running_loop
from platformio.exception import PlatformioException
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import force_exit
class ShutdownMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
if scope["type"] == "http" and b"__shutdown__" in scope.get("query_string", {}):
await shutdown_server()
await self.app(scope, receive, send)
async def shutdown_server(_=None):
aio_get_running_loop().call_later(0.5, force_exit)
return PlainTextResponse("Server has been shutdown!")
async def protected_page(_):
return PlainTextResponse(
"Protected PlatformIO Home session", status_code=HTTP_403_FORBIDDEN
)
def run_server(host, port, no_open, shutdown_timeout, home_url):
contrib_dir = get_core_package_dir("contrib-piohome")
if not os.path.isdir(contrib_dir):
raise PlatformioException("Invalid path to PIO Home Contrib")
ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)
ws_rpc_factory.addObjectHandler(AccountRPC(), namespace="account")
ws_rpc_factory.addObjectHandler(AppRPC(), namespace="app")
ws_rpc_factory.addObjectHandler(IDERPC(), namespace="ide")
ws_rpc_factory.addObjectHandler(MiscRPC(), namespace="misc")
ws_rpc_factory.addObjectHandler(OSRPC(), namespace="os")
ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace="core")
ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace="project")
path = urlparse(home_url).path
routes = [
WebSocketRoute(path + "wsrpc", ws_rpc_factory, name="wsrpc"),
Route(path + "__shutdown__", shutdown_server, methods=["POST"]),
Mount(path, StaticFiles(directory=contrib_dir, html=True), name="static"),
]
if path != "/":
routes.append(Route("/", protected_page))
uvicorn.run(
Starlette(
middleware=[Middleware(ShutdownMiddleware)],
routes=routes,
on_startup=[
lambda: click.echo(
"PIO Home has been started. Press Ctrl+C to shutdown."
),
lambda: None if no_open else click.launch(home_url),
],
),
host=host,
port=port,
log_level="warning",
)

View File

@ -14,6 +14,7 @@
# pylint: disable=too-many-branches, too-many-locals
import json
import os
import time
@ -23,7 +24,6 @@ from tabulate import tabulate
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
from platformio.compat import dump_json_to_unicode
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.meta import PackageItem, PackageSpec
@ -286,7 +286,7 @@ def lib_update( # pylint: disable=too-many-arguments
if json_output:
return click.echo(
dump_json_to_unicode(
json.dumps(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
@ -315,7 +315,7 @@ def lib_list(ctx, json_output):
if json_output:
return click.echo(
dump_json_to_unicode(
json.dumps(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
@ -359,7 +359,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
)
if json_output:
click.echo(dump_json_to_unicode(result))
click.echo(json.dumps(result))
return
if result["total"] == 0:
@ -418,7 +418,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
def lib_builtin(storage, json_output):
items = get_builtin_libs(storage)
if json_output:
return click.echo(dump_json_to_unicode(items))
return click.echo(json.dumps(items))
for storage_ in items:
if not storage_["items"]:
@ -442,7 +442,7 @@ def lib_show(library, json_output):
regclient = lm.get_registry_client_instance()
lib = regclient.fetch_json_data("get", "/v2/lib/info/%d" % lib_id, cache_valid="1h")
if json_output:
return click.echo(dump_json_to_unicode(lib))
return click.echo(json.dumps(lib))
title = "{ownername}/{name}".format(**lib)
click.secho(title, fg="cyan")
@ -538,7 +538,7 @@ def lib_stats(json_output):
result = regclient.fetch_json_data("get", "/v2/lib/stats", cache_valid="1h")
if json_output:
return click.echo(dump_json_to_unicode(result))
return click.echo(json.dumps(result))
for key in ("updated", "added"):
tabular_data = [

View File

@ -17,12 +17,17 @@ import tempfile
from datetime import datetime
import click
from tabulate import tabulate
from platformio import fs
from platformio.clients.account import AccountClient
from platformio.clients.registry import RegistryClient
from platformio.compat import ensure_python3
from platformio.exception import UserSideException
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.package.manifest.schema import ManifestSchema, ManifestValidationError
from platformio.package.meta import PackageSpec, PackageType
from platformio.package.pack import PackagePacker
from platformio.package.unpack import FileUnpacker, TARArchiver
def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
@ -35,6 +40,54 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
return value
def load_manifest_from_archive(path):
return ManifestSchema().load_manifest(
ManifestParserFactory.new_from_archive(path).as_dict()
)
def check_package_duplicates(
owner, type, name, version, system
): # pylint: disable=redefined-builtin
found = False
items = (
RegistryClient()
.list_packages(filters=dict(types=[type], names=[name]))
.get("items")
)
if not items:
return True
# duplicated version by owner / system
found = False
for item in items:
if item["owner"]["username"] != owner or item["version"]["name"] != version:
continue
if not system:
found = True
break
published_systems = []
for f in item["version"]["files"]:
published_systems.extend(f.get("system", []))
found = set(system).issubset(set(published_systems))
if found:
raise UserSideException(
"The package `%s/%s@%s` is already published in the registry"
% (owner, name, version)
)
other_owners = [
item["owner"]["username"]
for item in items
if item["owner"]["username"] != owner
]
if other_owners:
click.secho(
"\nWarning! A package with the name `%s` is already published by the next "
"owners: %s\n" % (name, ", ".join(other_owners)),
fg="yellow",
)
return True
@click.group("package", short_help="Package manager")
def cli():
pass
@ -53,6 +106,12 @@ def cli():
def package_pack(package, output):
p = PackagePacker(package)
archive_path = p.pack(output)
# validate manifest
try:
load_manifest_from_archive(archive_path)
except ManifestValidationError as e:
os.remove(archive_path)
raise e
click.secho('Wrote a tarball to "%s"' % archive_path, fg="green")
@ -79,15 +138,63 @@ def package_pack(package, output):
default=True,
help="Notify by email when package is processed",
)
def package_publish(package, owner, released_at, private, notify):
assert ensure_python3()
@click.option(
"--non-interactive",
is_flag=True,
help="Do not show interactive prompt",
)
def package_publish( # pylint: disable=too-many-arguments, too-many-locals
package, owner, released_at, private, notify, non_interactive
):
click.secho("Preparing a package...", fg="cyan")
owner = owner or AccountClient().get_logged_username()
do_not_pack = not os.path.isdir(package) and isinstance(
FileUnpacker.new_archiver(package), TARArchiver
)
archive_path = None
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
# publish .tar.gz instantly without repacking
if do_not_pack:
archive_path = package
else:
with fs.cd(tmp_dir):
p = PackagePacker(package)
archive_path = p.pack()
response = RegistryClient().publish_package(
archive_path, owner, released_at, private, notify
type_ = PackageType.from_archive(archive_path)
manifest = load_manifest_from_archive(archive_path)
name = manifest.get("name")
version = manifest.get("version")
data = [
("Type:", type_),
("Owner:", owner),
("Name:", name),
("Version:", version),
]
if manifest.get("system"):
data.insert(len(data) - 1, ("System:", ", ".join(manifest.get("system"))))
click.echo(tabulate(data, tablefmt="plain"))
# look for duplicates
check_package_duplicates(owner, type_, name, version, manifest.get("system"))
if not non_interactive:
click.confirm(
"Are you sure you want to publish the %s %s to the registry?\n"
% (
type_,
click.style(
"%s/%s@%s" % (owner, name, version),
fg="cyan",
),
),
abort=True,
)
response = RegistryClient().publish_package(
owner, type_, archive_path, released_at, private, notify
)
if not do_not_pack:
os.remove(archive_path)
click.secho(response.get("message"), fg="green")
@ -110,9 +217,9 @@ def package_publish(package, owner, released_at, private, notify):
def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
spec = PackageSpec(package)
response = RegistryClient().unpublish_package(
owner=spec.owner or AccountClient().get_logged_username(),
type=type,
name=spec.name,
owner=spec.owner,
version=str(spec.requirements),
undo=undo,
)

View File

@ -12,13 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import click
from platformio.cache import cleanup_content_cache
from platformio.commands.boards import print_boards
from platformio.compat import dump_json_to_unicode
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.meta import PackageItem, PackageSpec
from platformio.package.version import get_original_version
@ -31,6 +31,301 @@ def cli():
pass
@cli.command("search", short_help="Search for development platform")
@click.argument("query", required=False)
@click.option("--json-output", is_flag=True)
def platform_search(query, json_output):
platforms = []
for platform in _get_registry_platforms():
if query == "all":
query = ""
search_data = json.dumps(platform)
if query and query.lower() not in search_data.lower():
continue
platforms.append(
_get_registry_platform_data(
platform["name"], with_boards=False, expose_packages=False
)
)
if json_output:
click.echo(json.dumps(platforms))
else:
_print_platforms(platforms)
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
@click.argument("query", required=False)
@click.option("--json-output", is_flag=True)
def platform_frameworks(query, json_output):
regclient = PlatformPackageManager().get_registry_client_instance()
frameworks = []
for framework in regclient.fetch_json_data(
"get", "/v2/frameworks", cache_valid="1d"
):
if query == "all":
query = ""
search_data = json.dumps(framework)
if query and query.lower() not in search_data.lower():
continue
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
framework["platforms"] = [
platform["name"]
for platform in _get_registry_platforms()
if framework["name"] in platform["frameworks"]
]
frameworks.append(framework)
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
if json_output:
click.echo(json.dumps(frameworks))
else:
_print_platforms(frameworks)
@cli.command("list", short_help="List installed development platforms")
@click.option("--json-output", is_flag=True)
def platform_list(json_output):
platforms = []
pm = PlatformPackageManager()
for pkg in pm.get_installed():
platforms.append(
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
)
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
if json_output:
click.echo(json.dumps(platforms))
else:
_print_platforms(platforms)
@cli.command("show", short_help="Show details about development platform")
@click.argument("platform")
@click.option("--json-output", is_flag=True)
def platform_show(platform, json_output): # pylint: disable=too-many-branches
data = _get_platform_data(platform)
if not data:
raise UnknownPlatform(platform)
if json_output:
return click.echo(json.dumps(data))
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
click.echo(
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
)
click.echo("=" * (3 + len(dep + data["title"])))
click.echo(data["description"])
click.echo()
if "version" in data:
click.echo("Version: %s" % data["version"])
if data["homepage"]:
click.echo("Home: %s" % data["homepage"])
if data["repository"]:
click.echo("Repository: %s" % data["repository"])
if data["url"]:
click.echo("Vendor: %s" % data["url"])
if data["license"]:
click.echo("License: %s" % data["license"])
if data["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
if not data["packages"]:
return None
if not isinstance(data["packages"][0], dict):
click.echo("Packages: %s" % ", ".join(data["packages"]))
else:
click.echo()
click.secho("Packages", bold=True)
click.echo("--------")
for item in data["packages"]:
click.echo()
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
click.echo("-" * (8 + len(item["name"])))
if item["type"]:
click.echo("Type: %s" % item["type"])
click.echo("Requirements: %s" % item["requirements"])
click.echo(
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
)
if "version" in item:
click.echo("Version: %s" % item["version"])
if "originalVersion" in item:
click.echo("Original version: %s" % item["originalVersion"])
if "description" in item:
click.echo("Description: %s" % item["description"])
if data["boards"]:
click.echo()
click.secho("Boards", bold=True)
click.echo("------")
print_boards(data["boards"])
return True
@cli.command("install", short_help="Install new development platform")
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
@click.option("--with-package", multiple=True)
@click.option("--without-package", multiple=True)
@click.option("--skip-default-package", is_flag=True)
@click.option("--with-all-packages", is_flag=True)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"-f",
"--force",
is_flag=True,
help="Reinstall/redownload dev/platform and its packages if exist",
)
def platform_install( # pylint: disable=too-many-arguments
platforms,
with_package,
without_package,
skip_default_package,
with_all_packages,
silent,
force,
):
return _platform_install(
platforms,
with_package,
without_package,
skip_default_package,
with_all_packages,
silent,
force,
)
def _platform_install( # pylint: disable=too-many-arguments
platforms,
with_package=None,
without_package=None,
skip_default_package=False,
with_all_packages=False,
silent=False,
force=False,
):
pm = PlatformPackageManager()
for platform in platforms:
pkg = pm.install(
spec=platform,
with_packages=with_package or [],
without_packages=without_package or [],
skip_default_package=skip_default_package,
with_all_packages=with_all_packages,
silent=silent,
force=force,
)
if pkg and not silent:
click.secho(
"The platform '%s' has been successfully installed!\n"
"The rest of the packages will be installed later "
"depending on your build environment." % platform,
fg="green",
)
@cli.command("uninstall", short_help="Uninstall development platform")
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
def platform_uninstall(platforms):
pm = PlatformPackageManager()
for platform in platforms:
if pm.uninstall(platform):
click.secho(
"The platform '%s' has been successfully removed!" % platform,
fg="green",
)
@cli.command("update", short_help="Update installed development platforms")
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
@click.option(
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
)
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option("--json-output", is_flag=True)
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
platforms, only_packages, only_check, dry_run, silent, json_output
):
pm = PlatformPackageManager()
platforms = platforms or pm.get_installed()
only_check = dry_run or only_check
if only_check and json_output:
result = []
for platform in platforms:
spec = None
pkg = None
if isinstance(platform, PackageItem):
pkg = platform
else:
spec = PackageSpec(platform)
pkg = pm.get_package(spec)
if not pkg:
continue
outdated = pm.outdated(pkg, spec)
if (
not outdated.is_outdated(allow_incompatible=True)
and not PlatformFactory.new(pkg).are_outdated_packages()
):
continue
data = _get_installed_platform_data(
pkg, with_boards=False, expose_packages=False
)
if outdated.is_outdated(allow_incompatible=True):
data["versionLatest"] = (
str(outdated.latest) if outdated.latest else None
)
result.append(data)
return click.echo(json.dumps(result))
# cleanup cached board and platform lists
cleanup_content_cache("http")
for platform in platforms:
click.echo(
"Platform %s"
% click.style(
platform.metadata.name
if isinstance(platform, PackageItem)
else platform,
fg="cyan",
)
)
click.echo("--------")
pm.update(
platform, only_packages=only_packages, only_check=only_check, silent=silent
)
click.echo()
return True
#
# Helpers
#
def init_platform(name, skip_default_package=True, auto_install=True):
try:
return PlatformFactory.new(name)
except UnknownPlatform:
if auto_install:
_platform_install([name], skip_default_package=skip_default_package)
return PlatformFactory.new(name)
def _print_platforms(platforms):
for platform in platforms:
click.echo(
@ -162,264 +457,3 @@ def _get_registry_platform_data( # pylint: disable=unused-argument
]
return data
@cli.command("search", short_help="Search for development platform")
@click.argument("query", required=False)
@click.option("--json-output", is_flag=True)
def platform_search(query, json_output):
platforms = []
for platform in _get_registry_platforms():
if query == "all":
query = ""
search_data = dump_json_to_unicode(platform)
if query and query.lower() not in search_data.lower():
continue
platforms.append(
_get_registry_platform_data(
platform["name"], with_boards=False, expose_packages=False
)
)
if json_output:
click.echo(dump_json_to_unicode(platforms))
else:
_print_platforms(platforms)
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
@click.argument("query", required=False)
@click.option("--json-output", is_flag=True)
def platform_frameworks(query, json_output):
regclient = PlatformPackageManager().get_registry_client_instance()
frameworks = []
for framework in regclient.fetch_json_data(
"get", "/v2/frameworks", cache_valid="1d"
):
if query == "all":
query = ""
search_data = dump_json_to_unicode(framework)
if query and query.lower() not in search_data.lower():
continue
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
framework["platforms"] = [
platform["name"]
for platform in _get_registry_platforms()
if framework["name"] in platform["frameworks"]
]
frameworks.append(framework)
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
if json_output:
click.echo(dump_json_to_unicode(frameworks))
else:
_print_platforms(frameworks)
@cli.command("list", short_help="List installed development platforms")
@click.option("--json-output", is_flag=True)
def platform_list(json_output):
platforms = []
pm = PlatformPackageManager()
for pkg in pm.get_installed():
platforms.append(
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
)
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
if json_output:
click.echo(dump_json_to_unicode(platforms))
else:
_print_platforms(platforms)
@cli.command("show", short_help="Show details about development platform")
@click.argument("platform")
@click.option("--json-output", is_flag=True)
def platform_show(platform, json_output): # pylint: disable=too-many-branches
data = _get_platform_data(platform)
if not data:
raise UnknownPlatform(platform)
if json_output:
return click.echo(dump_json_to_unicode(data))
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
click.echo(
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
)
click.echo("=" * (3 + len(dep + data["title"])))
click.echo(data["description"])
click.echo()
if "version" in data:
click.echo("Version: %s" % data["version"])
if data["homepage"]:
click.echo("Home: %s" % data["homepage"])
if data["repository"]:
click.echo("Repository: %s" % data["repository"])
if data["url"]:
click.echo("Vendor: %s" % data["url"])
if data["license"]:
click.echo("License: %s" % data["license"])
if data["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
if not data["packages"]:
return None
if not isinstance(data["packages"][0], dict):
click.echo("Packages: %s" % ", ".join(data["packages"]))
else:
click.echo()
click.secho("Packages", bold=True)
click.echo("--------")
for item in data["packages"]:
click.echo()
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
click.echo("-" * (8 + len(item["name"])))
if item["type"]:
click.echo("Type: %s" % item["type"])
click.echo("Requirements: %s" % item["requirements"])
click.echo(
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
)
if "version" in item:
click.echo("Version: %s" % item["version"])
if "originalVersion" in item:
click.echo("Original version: %s" % item["originalVersion"])
if "description" in item:
click.echo("Description: %s" % item["description"])
if data["boards"]:
click.echo()
click.secho("Boards", bold=True)
click.echo("------")
print_boards(data["boards"])
return True
@cli.command("install", short_help="Install new development platform")
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
@click.option("--with-package", multiple=True)
@click.option("--without-package", multiple=True)
@click.option("--skip-default-package", is_flag=True)
@click.option("--with-all-packages", is_flag=True)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"-f",
"--force",
is_flag=True,
help="Reinstall/redownload dev/platform and its packages if exist",
)
def platform_install( # pylint: disable=too-many-arguments
platforms,
with_package,
without_package,
skip_default_package,
with_all_packages,
silent,
force,
):
pm = PlatformPackageManager()
for platform in platforms:
pkg = pm.install(
spec=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
with_all_packages=with_all_packages,
silent=silent,
force=force,
)
if pkg and not silent:
click.secho(
"The platform '%s' has been successfully installed!\n"
"The rest of the packages will be installed later "
"depending on your build environment." % platform,
fg="green",
)
@cli.command("uninstall", short_help="Uninstall development platform")
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
def platform_uninstall(platforms):
pm = PlatformPackageManager()
for platform in platforms:
if pm.uninstall(platform):
click.secho(
"The platform '%s' has been successfully removed!" % platform,
fg="green",
)
@cli.command("update", short_help="Update installed development platforms")
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
@click.option(
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
)
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option("--json-output", is_flag=True)
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
platforms, only_packages, only_check, dry_run, silent, json_output
):
pm = PlatformPackageManager()
platforms = platforms or pm.get_installed()
only_check = dry_run or only_check
if only_check and json_output:
result = []
for platform in platforms:
spec = None
pkg = None
if isinstance(platform, PackageItem):
pkg = platform
else:
spec = PackageSpec(platform)
pkg = pm.get_package(spec)
if not pkg:
continue
outdated = pm.outdated(pkg, spec)
if (
not outdated.is_outdated(allow_incompatible=True)
and not PlatformFactory.new(pkg).are_outdated_packages()
):
continue
data = _get_installed_platform_data(
pkg, with_boards=False, expose_packages=False
)
if outdated.is_outdated(allow_incompatible=True):
data["versionLatest"] = (
str(outdated.latest) if outdated.latest else None
)
result.append(data)
return click.echo(dump_json_to_unicode(result))
# cleanup cached board and platform lists
cleanup_content_cache("http")
for platform in platforms:
click.echo(
"Platform %s"
% click.style(
platform.metadata.name
if isinstance(platform, PackageItem)
else platform,
fg="cyan",
)
)
click.echo("--------")
pm.update(
platform, only_packages=only_packages, only_check=only_check, silent=silent
)
click.echo()
return True

View File

@ -149,15 +149,19 @@ def project_init(
):
if not silent:
if project_dir == os.getcwd():
click.secho("\nThe current working directory", fg="yellow", nl=False)
click.secho(" %s " % project_dir, fg="cyan", nl=False)
click.secho("will be used for the project.", fg="yellow")
click.secho("\nThe current working directory ", fg="yellow", nl=False)
try:
click.secho(project_dir, fg="cyan", nl=False)
except UnicodeEncodeError:
click.secho(json.dumps(project_dir), fg="cyan", nl=False)
click.secho(" will be used for the project.", fg="yellow")
click.echo("")
click.echo(
"The next files/directories have been created in %s"
% click.style(project_dir, fg="cyan")
)
click.echo("The next files/directories have been created in ", nl=False)
try:
click.secho(project_dir, fg="cyan")
except UnicodeEncodeError:
click.secho(json.dumps(project_dir), fg="cyan")
click.echo(
"%s - Put project header files here" % click.style("include", fg="cyan")
)
@ -236,7 +240,7 @@ def init_base_project(project_dir):
def init_include_readme(include_dir):
with open(os.path.join(include_dir, "README"), "w") as fp:
with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for project header files.
@ -282,7 +286,7 @@ https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
def init_lib_readme(lib_dir):
with open(os.path.join(lib_dir, "README"), "w") as fp:
with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for project specific (private) libraries.
@ -335,7 +339,7 @@ More information about PlatformIO Library Dependency Finder
def init_test_readme(test_dir):
with open(os.path.join(test_dir, "README"), "w") as fp:
with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for PlatformIO Unit Testing and project tests.
@ -356,7 +360,7 @@ def init_cvs_ignore(project_dir):
conf_path = os.path.join(project_dir, ".gitignore")
if os.path.isfile(conf_path):
return
with open(conf_path, "w") as fp:
with open(conf_path, mode="w", encoding="utf8") as fp:
fp.write(".pio\n")

View File

@ -173,7 +173,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
address = port.getHost()
self.log.debug("Serial Bridge is started on {address!r}", address=address)
if "sock" in self.cmd_options:
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
with open(
os.path.join(self.cmd_options["sock"], "sock"),
mode="w",
encoding="utf8",
) as fp:
fp.write("socket://localhost:%d" % address.port)
def client_terminal_stopped(self):

View File

@ -28,7 +28,6 @@ from platformio.commands.device import helpers as device_helpers
from platformio.commands.device.command import device_monitor as cmd_device_monitor
from platformio.commands.run.command import cli as cmd_run
from platformio.commands.test.command import cli as cmd_test
from platformio.compat import ensure_python3
from platformio.package.manager.core import inject_contrib_pysite
from platformio.project.exception import NotPlatformIOProjectError
@ -37,7 +36,6 @@ from platformio.project.exception import NotPlatformIOProjectError
@click.option("-a", "--agent", multiple=True)
@click.pass_context
def cli(ctx, agent):
assert ensure_python3()
ctx.obj = agent
inject_contrib_pysite(verify_openssl=True)
@ -338,7 +336,10 @@ def device_monitor(ctx, agents, **kwargs):
kwargs["baud"] = kwargs["baud"] or 9600
def _tx_target(sock_dir):
subcmd_argv = ["remote", "device", "monitor"]
subcmd_argv = ["remote"]
for agent in agents:
subcmd_argv.extend(["--agent", agent])
subcmd_argv.extend(["device", "monitor"])
subcmd_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
subcmd_argv.extend(["--sock", sock_dir])
subprocess.call([proc.where_is_program("platformio")] + subcmd_argv)
@ -352,7 +353,7 @@ def device_monitor(ctx, agents, **kwargs):
sleep(0.1)
if not t.is_alive():
return
with open(sock_file) as fp:
with open(sock_file, encoding="utf8") as fp:
kwargs["port"] = fp.read()
ctx.invoke(cmd_device_monitor, **kwargs)
t.join(2)

View File

@ -14,6 +14,7 @@
import operator
import os
import shutil
from multiprocessing import cpu_count
from time import time
@ -200,7 +201,7 @@ def print_processing_header(env, config, verbose=False):
"Processing %s (%s)"
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
click.secho("-" * terminal_width, bold=True)

View File

@ -54,11 +54,11 @@ def clean_build_dir(build_dir, config):
if isdir(build_dir):
# check project structure
if isfile(checksum_file):
with open(checksum_file) as fp:
with open(checksum_file, encoding="utf8") as fp:
if fp.read() == checksum:
return
fs.rmtree(build_dir)
makedirs(build_dir)
with open(checksum_file, "w") as fp:
with open(checksum_file, mode="w", encoding="utf8") as fp:
fp.write(checksum)

View File

@ -12,10 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.platform import init_platform
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.exception import UndefinedEnvPlatformError
# pylint: disable=too-many-instance-attributes
@ -66,15 +64,7 @@ class EnvironmentProcessor(object):
if "monitor" in build_targets:
build_targets.remove("monitor")
try:
p = PlatformFactory.new(self.options["platform"])
except UnknownPlatform:
self.cmd_ctx.invoke(
cmd_platform_install,
platforms=[self.options["platform"]],
skip_default_package=True,
result = init_platform(self.options["platform"]).run(
build_vars, build_targets, self.silent, self.verbose, self.jobs
)
p = PlatformFactory.new(self.options["platform"])
result = p.run(build_vars, build_targets, self.silent, self.verbose, self.jobs)
return result["returncode"] == 0

View File

@ -13,7 +13,6 @@
# limitations under the License.
import json
import os
import platform
import subprocess
import sys
@ -27,11 +26,15 @@ from platformio.commands.system.completion import (
install_completion_code,
uninstall_completion_code,
)
from platformio.commands.system.prune import (
prune_cached_data,
prune_core_packages,
prune_platform_packages,
)
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manager.tool import ToolPackageManager
from platformio.project.config import ProjectConfig
from platformio.project.helpers import get_project_cache_dir
@click.group("system", short_help="Miscellaneous system commands")
@ -66,7 +69,7 @@ def system_info(json_output):
data["platformio_exe"] = {
"title": "PlatformIO Core Executable",
"value": proc.where_is_program(
"platformio.exe" if proc.WINDOWS else "platformio"
"platformio.exe" if compat.IS_WINDOWS else "platformio"
),
}
data["python_exe"] = {
@ -99,22 +102,49 @@ def system_info(json_output):
@cli.command("prune", short_help="Remove unused data")
@click.option("--force", "-f", is_flag=True, help="Do not prompt for confirmation")
def system_prune(force):
click.secho("WARNING! This will remove:", fg="yellow")
click.echo(" - cached API requests")
click.echo(" - cached package downloads")
click.echo(" - temporary data")
if not force:
click.confirm("Do you want to continue?", abort=True)
@click.option(
"--dry-run", is_flag=True, help="Do not prune, only show data that will be removed"
)
@click.option("--cache", is_flag=True, help="Prune only cached data")
@click.option(
"--core-packages", is_flag=True, help="Prune only unnecessary core packages"
)
@click.option(
"--platform-packages",
is_flag=True,
help="Prune only unnecessary development platform packages",
)
def system_prune(force, dry_run, cache, core_packages, platform_packages):
if dry_run:
click.secho(
"Dry run mode (do not prune, only show data that will be removed)",
fg="yellow",
)
click.echo()
reclaimed_total = 0
cache_dir = get_project_cache_dir()
if os.path.isdir(cache_dir):
reclaimed_total += fs.calculate_folder_size(cache_dir)
fs.rmtree(cache_dir)
reclaimed_cache = 0
reclaimed_core_packages = 0
reclaimed_platform_packages = 0
prune_all = not any([cache, core_packages, platform_packages])
if cache or prune_all:
reclaimed_cache = prune_cached_data(force, dry_run)
click.echo()
if core_packages or prune_all:
reclaimed_core_packages = prune_core_packages(force, dry_run)
click.echo()
if platform_packages or prune_all:
reclaimed_platform_packages = prune_platform_packages(force, dry_run)
click.echo()
click.secho(
"Total reclaimed space: %s" % fs.humanize_file_size(reclaimed_total), fg="green"
"Total reclaimed space: %s"
% fs.humanize_file_size(
reclaimed_cache + reclaimed_core_packages + reclaimed_platform_packages
),
fg="green",
)

View File

@ -42,7 +42,7 @@ def is_completion_code_installed(shell, path):
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path) as fp:
with open(path, encoding="utf8") as fp:
return click_completion.get_code(shell=shell) in fp.read()
@ -64,7 +64,7 @@ def uninstall_completion_code(shell, path):
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path, "r+") as fp:
with open(path, "r+", encoding="utf8") as fp:
contents = fp.read()
fp.seek(0)
fp.truncate()

View File

@ -0,0 +1,98 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from operator import itemgetter
import click
from tabulate import tabulate
from platformio import fs
from platformio.package.manager.core import remove_unnecessary_core_packages
from platformio.package.manager.platform import remove_unnecessary_platform_packages
from platformio.project.helpers import get_project_cache_dir
def prune_cached_data(force=False, dry_run=False, silent=False):
reclaimed_space = 0
if not silent:
click.secho("Prune cached data:", bold=True)
click.echo(" - cached API requests")
click.echo(" - cached package downloads")
click.echo(" - temporary data")
cache_dir = get_project_cache_dir()
if os.path.isdir(cache_dir):
reclaimed_space += fs.calculate_folder_size(cache_dir)
if not dry_run:
if not force:
click.confirm("Do you want to continue?", abort=True)
fs.rmtree(cache_dir)
if not silent:
click.secho("Space on disk: %s" % fs.humanize_file_size(reclaimed_space))
return reclaimed_space
def prune_core_packages(force=False, dry_run=False, silent=False):
if not silent:
click.secho("Prune unnecessary core packages:", bold=True)
return _prune_packages(force, dry_run, silent, remove_unnecessary_core_packages)
def prune_platform_packages(force=False, dry_run=False, silent=False):
if not silent:
click.secho("Prune unnecessary development platform packages:", bold=True)
return _prune_packages(force, dry_run, silent, remove_unnecessary_platform_packages)
def _prune_packages(force, dry_run, silent, handler):
if not silent:
click.echo("Calculating...")
items = [
(
pkg,
fs.calculate_folder_size(pkg.path),
)
for pkg in handler(dry_run=True)
]
items = sorted(items, key=itemgetter(1), reverse=True)
reclaimed_space = sum([item[1] for item in items])
if items and not silent:
click.echo(
tabulate(
[
(
pkg.metadata.spec.humanize(),
str(pkg.metadata.version),
fs.humanize_file_size(size),
)
for (pkg, size) in items
],
headers=["Package", "Version", "Size"],
)
)
if not dry_run:
if not force:
click.confirm("Do you want to continue?", abort=True)
handler(dry_run=False)
if not silent:
click.secho("Space on disk: %s" % fs.humanize_file_size(reclaimed_space))
return reclaimed_space
def calculate_unnecessary_system_data():
return (
prune_cached_data(force=True, dry_run=True, silent=True)
+ prune_core_packages(force=True, dry_run=True, silent=True)
+ prune_platform_packages(force=True, dry_run=True, silent=True)
)

View File

@ -14,16 +14,18 @@
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
from fnmatch import fnmatch
from os import getcwd, listdir
from os.path import isdir, join
import fnmatch
import os
import shutil
from time import time
import click
from tabulate import tabulate
from platformio import app, exception, fs, util
from platformio.commands.platform import init_platform
from platformio.commands.test.embedded import EmbeddedTestProcessor
from platformio.commands.test.helpers import get_test_names
from platformio.commands.test.native import NativeTestProcessor
from platformio.project.config import ProjectConfig
@ -49,7 +51,7 @@ from platformio.project.config import ProjectConfig
@click.option(
"-d",
"--project-dir",
default=getcwd,
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
@ -101,11 +103,7 @@ def cli( # pylint: disable=redefined-builtin
with fs.cd(project_dir):
config = ProjectConfig.get_instance(project_conf)
config.validate(envs=environment)
test_dir = config.get_optional_dir("test")
if not isdir(test_dir):
raise exception.TestDirNotExists(test_dir)
test_names = get_test_names(test_dir)
test_names = get_test_names(config)
if not verbose:
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
@ -128,9 +126,11 @@ def cli( # pylint: disable=redefined-builtin
not environment and default_envs and envname not in default_envs,
testname != "*"
and patterns["filter"]
and not any([fnmatch(testname, p) for p in patterns["filter"]]),
and not any(
fnmatch.fnmatch(testname, p) for p in patterns["filter"]
),
testname != "*"
and any([fnmatch(testname, p) for p in patterns["ignore"]]),
and any(fnmatch.fnmatch(testname, p) for p in patterns["ignore"]),
]
if any(skip_conditions):
results.append({"env": envname, "test": testname})
@ -140,9 +140,10 @@ def cli( # pylint: disable=redefined-builtin
print_processing_header(testname, envname)
cls = (
NativeTestProcessor
if config.get(section, "platform") == "native"
else EmbeddedTestProcessor
EmbeddedTestProcessor
if config.get(section, "platform")
and init_platform(config.get(section, "platform")).is_embedded()
else NativeTestProcessor
)
tp = cls(
ctx,
@ -177,23 +178,13 @@ def cli( # pylint: disable=redefined-builtin
if without_testing:
return
print_testing_summary(results)
print_testing_summary(results, verbose)
command_failed = any(r.get("succeeded") is False for r in results)
if command_failed:
raise exception.ReturnErrorCode(1)
def get_test_names(test_dir):
names = []
for item in sorted(listdir(test_dir)):
if isdir(join(test_dir, item)):
names.append(item)
if not names:
names = ["*"]
return names
def print_processing_header(test, env):
click.echo(
"Processing %s in %s environment"
@ -202,7 +193,7 @@ def print_processing_header(test, env):
click.style(env, fg="cyan", bold=True),
)
)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@ -222,7 +213,7 @@ def print_processing_footer(result):
)
def print_testing_summary(results):
def print_testing_summary(results, verbose=False):
click.echo()
tabular_data = []
@ -236,6 +227,8 @@ def print_testing_summary(results):
failed_nums += 1
status_str = click.style("FAILED", fg="red")
elif result.get("succeeded") is None:
if not verbose:
continue
status_str = "IGNORED"
else:
succeeded_nums += 1

View File

@ -95,7 +95,7 @@ class EmbeddedTestProcessor(TestProcessorBase):
if isinstance(line, bytes):
line = line.decode("utf8", "ignore")
self.on_run_out(line)
if all([l in line for l in ("Tests", "Failures", "Ignored")]):
if all(l in line for l in ("Tests", "Failures", "Ignored")):
break
ser.close()
return not self._run_failed
@ -117,13 +117,10 @@ class EmbeddedTestProcessor(TestProcessorBase):
port = item["port"]
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
if hwid_str in item["hwid"] and self.is_serial_port_ready(port):
return port
# check if port is already configured
try:
serial.Serial(port, timeout=self.SERIAL_TIMEOUT).close()
except serial.SerialException:
if port and not self.is_serial_port_ready(port):
port = None
if not port:
@ -136,3 +133,18 @@ class EmbeddedTestProcessor(TestProcessorBase):
"global `--test-port` option."
)
return port
@staticmethod
def is_serial_port_ready(port, timeout=3):
if not port:
return False
elapsed = 0
while elapsed < timeout:
try:
serial.Serial(port, timeout=1).close()
return True
except: # pylint: disable=bare-except
pass
sleep(1)
elapsed += 1
return False

View File

@ -12,17 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import reactor # pylint: disable=import-error
from twisted.web import static # pylint: disable=import-error
import os
from platformio import exception
class WebRoot(static.File):
def render_GET(self, request):
if request.args.get(b"__shutdown__", False):
reactor.stop()
return "Server has been stopped"
request.setHeader("cache-control", "no-cache, no-store, must-revalidate")
request.setHeader("pragma", "no-cache")
request.setHeader("expires", "0")
return static.File.render_GET(self, request)
def get_test_names(config):
test_dir = config.get_optional_dir("test")
if not os.path.isdir(test_dir):
raise exception.TestDirNotExists(test_dir)
names = []
for item in sorted(os.listdir(test_dir)):
if os.path.isdir(os.path.join(test_dir, item)):
names.append(item)
if not names:
names = ["*"]
return names

View File

@ -139,9 +139,9 @@ class TestProcessorBase(object):
cmd_run,
project_dir=self.options["project_dir"],
project_conf=self.options["project_config"].path,
upload_port=self.options["upload_port"],
upload_port=self.options.get("upload_port"),
verbose=self.options["verbose"],
silent=self.options["silent"],
silent=self.options.get("silent"),
environment=[self.env_name],
disable_auto_clean="nobuild" in target,
target=target,
@ -224,7 +224,7 @@ class TestProcessorBase(object):
test_dir,
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
)
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write(data)
atexit.register(delete_tmptest_files, test_dir)

View File

@ -21,7 +21,7 @@ import click
from platformio import VERSION, __version__, app, exception
from platformio.clients.http import fetch_remote_content
from platformio.compat import WINDOWS
from platformio.compat import IS_WINDOWS
from platformio.proc import exec_command, get_pythonexe_path
from platformio.project.helpers import get_project_cache_dir
@ -40,7 +40,7 @@ def cli(dev):
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
cmds = (
["pip", "install", "--upgrade", get_pip_package(to_develop)],
["pip", "install", "--upgrade", download_dist_package(to_develop)],
["platformio", "--version"],
)
@ -73,7 +73,7 @@ def cli(dev):
if not r:
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
permission_errors = ("permission denied", "not permitted")
if any(m in r["err"].lower() for m in permission_errors) and not WINDOWS:
if any(m in r["err"].lower() for m in permission_errors) and not IS_WINDOWS:
click.secho(
"""
-----------------
@ -94,7 +94,7 @@ WARNING! Don't use `sudo` for the rest PlatformIO commands.
return True
def get_pip_package(to_develop):
def download_dist_package(to_develop):
if not to_develop:
return "platformio"
dl_url = "https://github.com/platformio/platformio-core/archive/develop.zip"
@ -103,7 +103,7 @@ def get_pip_package(to_develop):
os.makedirs(cache_dir)
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
try:
with open(pkg_name, "w") as fp:
with open(pkg_name, "wb") as fp:
r = exec_command(
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
)

View File

@ -12,23 +12,57 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-import, no-name-in-module, import-error,
# pylint: disable=no-member, undefined-variable, unexpected-keyword-arg
# pylint: disable=unused-import,no-name-in-module
import glob
import inspect
import json
import locale
import os
import re
import sys
from platformio.exception import UserSideException
if sys.version_info >= (3,):
if sys.version_info >= (3, 7):
from asyncio import create_task as aio_create_task
from asyncio import get_running_loop as aio_get_running_loop
else:
from asyncio import ensure_future as aio_create_task
from asyncio import get_event_loop as aio_get_running_loop
PY2 = sys.version_info[0] == 2
CYGWIN = sys.platform.startswith("cygwin")
WINDOWS = sys.platform.startswith("win")
MACOS = sys.platform.startswith("darwin")
IS_CYGWIN = sys.platform.startswith("cygwin")
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
IS_MACOS = sys.platform.startswith("darwin")
string_types = (str,)
def is_bytes(x):
return isinstance(x, (bytes, memoryview, bytearray))
def ci_strings_are_equal(a, b):
if a == b:
return True
if not a or not b:
return False
return a.strip().lower() == b.strip().lower()
def hashlib_encode_data(data):
if is_bytes(data):
return data
if not isinstance(data, string_types):
data = str(data)
return data.encode()
def load_python_module(name, pathname):
import importlib.util # pylint: disable=import-outside-toplevel
spec = importlib.util.spec_from_file_location(name, pathname)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def get_filesystem_encoding():
@ -53,106 +87,21 @@ def get_object_members(obj, ignore_private=True):
}
def ci_strings_are_equal(a, b):
if a == b:
return True
if not a or not b:
return False
return a.strip().lower() == b.strip().lower()
def ensure_python3(raise_exception=True):
if not raise_exception or not PY2:
return not PY2
compatible = sys.version_info >= (3, 6)
if not raise_exception or compatible:
return compatible
raise UserSideException(
"Python 3.5 or later is required for this operation. \n"
"Please install the latest Python 3 and reinstall PlatformIO Core using "
"installation script:\n"
"https://docs.platformio.org/page/core/installation.html"
"Python 3.6 or later is required for this operation. \n"
"Please check a migration guide:\n"
"https://docs.platformio.org/en/latest/core/migration.html"
"#drop-support-for-python-2-and-3-5"
)
if PY2:
import imp
string_types = (str, unicode)
def is_bytes(x):
return isinstance(x, (buffer, bytearray))
def path_to_unicode(path):
if isinstance(path, unicode):
def path_to_unicode(path):
"""
Deprecated: Compatibility with dev-platforms,
and custom device monitor filters
"""
return path
return path.decode(get_filesystem_encoding())
def hashlib_encode_data(data):
if is_bytes(data):
return data
if isinstance(data, unicode):
data = data.encode(get_filesystem_encoding())
elif not isinstance(data, string_types):
data = str(data)
return data
def dump_json_to_unicode(obj):
if isinstance(obj, unicode):
return obj
return json.dumps(
obj, encoding=get_filesystem_encoding(), ensure_ascii=False
).encode("utf8")
_magic_check = re.compile("([*?[])")
_magic_check_bytes = re.compile(b"([*?[])")
def glob_recursive(pathname):
return glob.glob(pathname)
def glob_escape(pathname):
"""Escape all special characters."""
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be
# escaped.
drive, pathname = os.path.splitdrive(pathname)
if isinstance(pathname, bytes):
pathname = _magic_check_bytes.sub(br"[\1]", pathname)
else:
pathname = _magic_check.sub(r"[\1]", pathname)
return drive + pathname
def load_python_module(name, pathname):
return imp.load_source(name, pathname)
else:
import importlib.util
from glob import escape as glob_escape
string_types = (str,)
def is_bytes(x):
return isinstance(x, (bytes, memoryview, bytearray))
def path_to_unicode(path):
return path
def hashlib_encode_data(data):
if is_bytes(data):
return data
if not isinstance(data, string_types):
data = str(data)
return data.encode()
def dump_json_to_unicode(obj):
if isinstance(obj, string_types):
return obj
return json.dumps(obj)
def glob_recursive(pathname):
return glob.glob(pathname, recursive=True)
def load_python_module(name, pathname):
spec = importlib.util.spec_from_file_location(name, pathname)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module

View File

@ -0,0 +1,246 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
from platformio import fs, proc, util
from platformio.compat import string_types
from platformio.debug.exception import DebugInvalidOptionsError
from platformio.debug.helpers import reveal_debug_port
from platformio.project.config import ProjectConfig
from platformio.project.helpers import get_project_core_dir, load_project_ide_data
from platformio.project.options import ProjectOptions
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
def __init__(self, platform, project_config, env_name):
self.platform = platform
self.project_config = project_config
self.env_name = env_name
self.env_options = project_config.items(env=env_name, as_dict=True)
self.build_data = self._load_build_data()
self.tool_name = None
self.board_config = {}
self.tool_settings = {}
if "board" in self.env_options:
self.board_config = platform.board_config(self.env_options["board"])
self.tool_name = self.board_config.get_debug_tool_name(
self.env_options.get("debug_tool")
)
self.tool_settings = (
self.board_config.get("debug", {})
.get("tools", {})
.get(self.tool_name, {})
)
self._load_cmds = None
self._port = None
self.server = self._configure_server()
try:
platform.configure_debug_session(self)
except NotImplementedError:
pass
@staticmethod
def cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
@property
def program_path(self):
return self.build_data["prog_path"]
@property
def client_executable_path(self):
return self.build_data["gdb_path"]
@property
def load_cmds(self):
if self._load_cmds is not None:
return self._load_cmds
result = self.env_options.get("debug_load_cmds")
if not result:
result = self.tool_settings.get("load_cmds")
if not result:
# legacy
result = self.tool_settings.get("load_cmd")
if not result:
result = ProjectOptions["env.debug_load_cmds"].default
return self.cleanup_cmds(result)
@load_cmds.setter
def load_cmds(self, cmds):
self._load_cmds = cmds
@property
def load_mode(self):
result = self.env_options.get("debug_load_mode")
if not result:
result = self.tool_settings.get("load_mode")
return result or ProjectOptions["env.debug_load_mode"].default
@property
def init_break(self):
missed = object()
result = self.env_options.get("debug_init_break", missed)
if result != missed:
return result
result = None
if not result:
result = self.tool_settings.get("init_break")
return result or ProjectOptions["env.debug_init_break"].default
@property
def init_cmds(self):
return self.cleanup_cmds(
self.env_options.get("debug_init_cmds", self.tool_settings.get("init_cmds"))
)
@property
def extra_cmds(self):
return self.cleanup_cmds(
self.env_options.get("debug_extra_cmds")
) + self.cleanup_cmds(self.tool_settings.get("extra_cmds"))
@property
def port(self):
return reveal_debug_port(
self.env_options.get("debug_port", self.tool_settings.get("port"))
or self._port,
self.tool_name,
self.tool_settings,
)
@port.setter
def port(self, value):
self._port = value
@property
def upload_protocol(self):
return self.env_options.get(
"upload_protocol", self.board_config.get("upload", {}).get("protocol")
)
@property
def speed(self):
return self.env_options.get("debug_speed", self.tool_settings.get("speed"))
@property
def server_ready_pattern(self):
return self.env_options.get(
"debug_server_ready_pattern", (self.server or {}).get("ready_pattern")
)
def _load_build_data(self):
data = load_project_ide_data(os.getcwd(), self.env_name, cache=True)
if data:
return data
raise DebugInvalidOptionsError("Could not load a build configuration")
def _configure_server(self):
result = None
# specific server per a system
if isinstance(self.tool_settings.get("server", {}), list):
for item in self.tool_settings["server"][:]:
self.tool_settings["server"] = item
if util.get_systype() in item.get("system", []):
break
# user overwrites debug server
if self.env_options.get("debug_server"):
result = {
"cwd": None,
"executable": None,
"arguments": self.env_options.get("debug_server"),
}
result["executable"] = result["arguments"][0]
result["arguments"] = result["arguments"][1:]
elif "server" in self.tool_settings:
result = self.tool_settings["server"]
server_package = result.get("package")
server_package_dir = (
self.platform.get_package_dir(server_package)
if server_package
else None
)
if server_package and not server_package_dir:
self.platform.install_packages(
with_packages=[server_package],
skip_default_package=True,
silent=True,
)
server_package_dir = self.platform.get_package_dir(server_package)
result.update(
dict(
cwd=server_package_dir if server_package else None,
executable=result.get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
for a in result.get("arguments", [])
],
)
)
return self.reveal_patterns(result) if result else None
def get_init_script(self, debugger):
try:
return getattr(self, "%s_INIT_SCRIPT" % debugger.upper())
except AttributeError:
raise NotImplementedError
def reveal_patterns(self, source, recursive=True):
program_path = self.program_path or ""
patterns = {
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
"PYTHONEXE": proc.get_pythonexe_path(),
"PROJECT_DIR": self.project_config.path,
"PROG_PATH": program_path,
"PROG_DIR": os.path.dirname(program_path),
"PROG_NAME": os.path.basename(os.path.splitext(program_path)[0]),
"DEBUG_PORT": self.port,
"UPLOAD_PROTOCOL": self.upload_protocol,
"INIT_BREAK": self.init_break or "",
"LOAD_CMDS": "\n".join(self.load_cmds or []),
}
for key, value in patterns.items():
if key.endswith(("_DIR", "_PATH")):
patterns[key] = fs.to_unix_path(value)
def _replace(text):
for key, value in patterns.items():
pattern = "$%s" % key
text = text.replace(pattern, value or "")
return text
if isinstance(source, string_types):
source = _replace(source)
elif isinstance(source, (list, dict)):
items = enumerate(source) if isinstance(source, list) else source.items()
for key, value in items:
if isinstance(value, string_types):
source[key] = _replace(value)
elif isinstance(value, (list, dict)) and recursive:
source[key] = self.reveal_patterns(value, patterns)
data = json.dumps(source)
if any(("$" + key) in data for key in patterns):
source = self.reveal_patterns(source, patterns)
return source

View File

@ -0,0 +1,49 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class BlackmagicDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
end
define pio_reset_run_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
monitor swdp_scan
attach 1
set mem inaccessible-by-default off
$LOAD_CMDS
$INIT_BREAK
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
"""

View File

@ -0,0 +1,48 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import re
from platformio.debug.config.generic import GenericDebugConfig
from platformio.debug.config.native import NativeDebugConfig
class DebugConfigFactory(object):
@staticmethod
def get_clsname(name):
name = re.sub(r"[^\da-z\_\-]+", "", name, flags=re.I)
return "%s%sDebugConfig" % (name.upper()[0], name.lower()[1:])
@classmethod
def new(cls, platform, project_config, env_name):
board_config = platform.board_config(
project_config.get("env:" + env_name, "board")
)
tool_name = (
board_config.get_debug_tool_name(
project_config.get("env:" + env_name, "debug_tool")
)
if board_config
else None
)
config_cls = None
try:
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
config_cls = getattr(mod, cls.get_clsname(tool_name))
except ModuleNotFoundError:
config_cls = (
GenericDebugConfig if platform.is_embedded() else NativeDebugConfig
)
return config_cls(platform, project_config, env_name)

View File

@ -0,0 +1,38 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class GenericDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor reset halt
end
define pio_reset_run_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor init
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
super(GenericDebugConfig, self).__init__(*args, **kwargs)
self.port = ":3333"

View File

@ -0,0 +1,48 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class JlinkDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor reset
monitor halt
end
define pio_reset_run_target
monitor clrbp
monitor reset
monitor go
end
target extended-remote $DEBUG_PORT
monitor clrbp
monitor speed auto
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
super(JlinkDebugConfig, self).__init__(*args, **kwargs)
self.port = ":2331"
@property
def server_ready_pattern(self):
return super(JlinkDebugConfig, self).server_ready_pattern or (
"Waiting for GDB connection"
)

View File

@ -0,0 +1,36 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class MspdebugDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
target remote $DEBUG_PORT
monitor erase
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
super(MspdebugDebugConfig, self).__init__(*args, **kwargs)
self.port = ":2000"

View File

@ -0,0 +1,34 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.compat import IS_WINDOWS
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
""" + (
"set startup-with-shell off" if not IS_WINDOWS else ""
)

View File

@ -0,0 +1,37 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class QemuDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor system_reset
end
define pio_reset_run_target
monitor system_reset
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
def __init__(self, *args, **kwargs):
super(QemuDebugConfig, self).__init__(*args, **kwargs)
self.port = ":1234"

View File

@ -0,0 +1,45 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class RenodeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
monitor machine Reset
$LOAD_CMDS
monitor start
end
define pio_reset_run_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
$INIT_BREAK
monitor start
"""
def __init__(self, *args, **kwargs):
super(RenodeDebugConfig, self).__init__(*args, **kwargs)
self.port = ":3333"
@property
def server_ready_pattern(self):
return super(RenodeDebugConfig, self).server_ready_pattern or (
"GDB server with all CPUs started on port"
)

204
platformio/debug/helpers.py Normal file
View File

@ -0,0 +1,204 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import util
from platformio.commands import PlatformioCLI
from platformio.commands.run.command import cli as cmd_run
from platformio.commands.run.command import print_processing_header
from platformio.commands.test.helpers import get_test_names
from platformio.commands.test.processor import TestProcessorBase
from platformio.compat import IS_WINDOWS, is_bytes
from platformio.debug.exception import DebugInvalidOptionsError
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def get_default_debug_env(config):
default_envs = config.default_envs()
all_envs = config.envs()
for env in default_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
for env in all_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
return default_envs[0] if default_envs else all_envs[0]
def predebug_project(
ctx, project_dir, project_config, env_name, preload, verbose
): # pylint: disable=too-many-arguments
debug_testname = project_config.get("env:" + env_name, "debug_test")
if debug_testname:
test_names = get_test_names(project_config)
if debug_testname not in test_names:
raise DebugInvalidOptionsError(
"Unknown test name `%s`. Valid names are `%s`"
% (debug_testname, ", ".join(test_names))
)
print_processing_header(env_name, project_config, verbose)
tp = TestProcessorBase(
ctx,
debug_testname,
env_name,
dict(
project_config=project_config,
project_dir=project_dir,
without_building=False,
without_uploading=True,
without_testing=True,
verbose=False,
),
)
tp.build_or_upload(["__debug", "__test"] + (["upload"] if preload else []))
else:
ctx.invoke(
cmd_run,
project_dir=project_dir,
project_conf=project_config.path,
environment=[env_name],
target=["__debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def has_debug_symbols(prog_path):
if not isfile(prog_path):
return False
matched = {
b".debug_info": False,
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
# b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
while True:
data = fp.read(1024)
if not data:
break
for pattern, found in matched.items():
if found:
continue
if pattern in last_data + data:
matched[pattern] = True
last_data = data
return all(matched.values())
def is_prog_obsolete(prog_path):
prog_hash_path = prog_path + ".sha1"
if not isfile(prog_path):
return True
shasum = sha1()
with open(prog_path, "rb") as fp:
while True:
data = fp.read(1024)
if not data:
break
shasum.update(data)
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path, encoding="utf8") as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
with open(prog_hash_path, mode="w", encoding="utf8") as fp:
fp.write(new_digest)
return True
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
if set(["*", "?", "[", "]"]) & set(env_debug_port):
return env_debug_port
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if tool_name.startswith("blackmagic"):
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return None
if env_debug_port and not _get_pattern():
return env_debug_port
if not tool_settings.get("require_debug_port"):
return None
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
return debug_port

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,155 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import signal
import subprocess
import sys
import time
from platformio.compat import (
IS_WINDOWS,
aio_create_task,
aio_get_running_loop,
get_locale_encoding,
)
class DebugSubprocessProtocol(asyncio.SubprocessProtocol):
def __init__(self, factory):
self.factory = factory
self._is_exited = False
def connection_made(self, transport):
self.factory.connection_made(transport)
def pipe_data_received(self, fd, data):
pipe_to_cb = [
self.factory.stdin_data_received,
self.factory.stdout_data_received,
self.factory.stderr_data_received,
]
pipe_to_cb[fd](data)
def connection_lost(self, exc):
self.process_exited()
def process_exited(self):
if self._is_exited:
return
self.factory.process_exited()
self._is_exited = True
class DebugBaseProcess:
STDOUT_CHUNK_SIZE = 2048
LOG_FILE = None
def __init__(self):
self.transport = None
self._is_running = False
self._last_activity = 0
self._exit_future = None
self._stdin_read_task = None
self._std_encoding = get_locale_encoding()
async def spawn(self, *args, **kwargs):
wait_until_exit = False
if "wait_until_exit" in kwargs:
wait_until_exit = kwargs["wait_until_exit"]
del kwargs["wait_until_exit"]
for pipe in ("stdin", "stdout", "stderr"):
if pipe not in kwargs:
kwargs[pipe] = subprocess.PIPE
loop = aio_get_running_loop()
await loop.subprocess_exec(
lambda: DebugSubprocessProtocol(self), *args, **kwargs
)
if wait_until_exit:
self._exit_future = loop.create_future()
await self._exit_future
def is_running(self):
return self._is_running
def connection_made(self, transport):
self._is_running = True
self.transport = transport
def connect_stdin_pipe(self):
self._stdin_read_task = aio_create_task(self._read_stdin_pipe())
async def _read_stdin_pipe(self):
loop = aio_get_running_loop()
if IS_WINDOWS:
while True:
self.stdin_data_received(
await loop.run_in_executor(None, sys.stdin.buffer.readline)
)
else:
reader = asyncio.StreamReader()
protocol = asyncio.StreamReaderProtocol(reader)
await loop.connect_read_pipe(lambda: protocol, sys.stdin)
while True:
self.stdin_data_received(await reader.readline())
def stdin_data_received(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
def stdout_data_received(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
while data:
chunk = data[: self.STDOUT_CHUNK_SIZE]
print(chunk.decode(self._std_encoding, "replace"), end="", flush=True)
data = data[self.STDOUT_CHUNK_SIZE :]
def stderr_data_received(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
print(
data.decode(self._std_encoding, "replace"),
end="",
file=sys.stderr,
flush=True,
)
def process_exited(self):
self._is_running = False
self._last_activity = time.time()
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)
if self._stdin_read_task:
self._stdin_read_task.cancel()
self._stdin_read_task = None
if self._exit_future:
self._exit_future.set_result(True)
self._exit_future = None
def terminate(self):
if not self.is_running() or not self.transport:
return
try:
self.transport.kill()
self.transport.close()
except: # pylint: disable=bare-except
pass

View File

@ -0,0 +1,101 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import os
import signal
import tempfile
from platformio import fs, proc
from platformio.cache import ContentCache
from platformio.compat import IS_WINDOWS, hashlib_encode_data
from platformio.debug.process.base import DebugBaseProcess
from platformio.debug.process.server import DebugServerProcess
from platformio.project.helpers import get_project_cache_dir
class DebugClientProcess(DebugBaseProcess):
def __init__(self, project_dir, debug_config):
super(DebugClientProcess, self).__init__()
self.project_dir = project_dir
self.debug_config = debug_config
self._server_process = None
self._session_id = None
if not os.path.isdir(get_project_cache_dir()):
os.makedirs(get_project_cache_dir())
self.working_dir = tempfile.mkdtemp(
dir=get_project_cache_dir(), prefix=".piodebug-"
)
self._target_is_running = False
self._errors_buffer = b""
async def run(self):
session_hash = (
self.debug_config.client_executable_path + self.debug_config.program_path
)
self._session_id = hashlib.sha1(hashlib_encode_data(session_hash)).hexdigest()
self._kill_previous_session()
if self.debug_config.server:
self._server_process = DebugServerProcess(self.debug_config)
self.debug_config.port = await self._server_process.run()
def connection_made(self, transport):
super(DebugClientProcess, self).connection_made(transport)
self._lock_session(transport.get_pid())
# Disable SIGINT and allow GDB's Ctrl+C interrupt
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
self.connect_stdin_pipe()
def process_exited(self):
if self._server_process:
self._server_process.terminate()
super(DebugClientProcess, self).process_exited()
def _kill_previous_session(self):
assert self._session_id
pid = None
with ContentCache() as cc:
pid = cc.get(self._session_id)
cc.delete(self._session_id)
if not pid:
return
if IS_WINDOWS:
kill = ["Taskkill", "/PID", pid, "/F"]
else:
kill = ["kill", pid]
try:
proc.exec_command(kill)
except: # pylint: disable=bare-except
pass
def _lock_session(self, pid):
if not self._session_id:
return
with ContentCache() as cc:
cc.set(self._session_id, str(pid), "1h")
def _unlock_session(self):
if not self._session_id:
return
with ContentCache() as cc:
cc.delete(self._session_id)
def __del__(self):
self._unlock_session()
if self.working_dir and os.path.isdir(self.working_dir):
fs.rmtree(self.working_dir)

View File

@ -0,0 +1,193 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import signal
import time
from platformio import telemetry
from platformio.compat import aio_get_running_loop, is_bytes
from platformio.debug import helpers
from platformio.debug.process.client import DebugClientProcess
class GDBClientProcess(DebugClientProcess):
PIO_SRC_NAME = ".pioinit"
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
def __init__(self, *args, **kwargs):
super(GDBClientProcess, self).__init__(*args, **kwargs)
self._target_is_running = False
self._errors_buffer = b""
async def run(self, extra_args): # pylint: disable=arguments-differ
await super(GDBClientProcess, self).run()
self.generate_init_script(os.path.join(self.working_dir, self.PIO_SRC_NAME))
gdb_path = self.debug_config.client_executable_path or "gdb"
# start GDB client
args = [
gdb_path,
"-q",
"--directory",
self.working_dir,
"--directory",
self.project_dir,
"-l",
"10",
]
args.extend(list(extra_args or []))
gdb_data_dir = self._get_data_dir(gdb_path)
if gdb_data_dir:
args.extend(["--data-directory", gdb_data_dir])
args.append(self.debug_config.program_path)
await self.spawn(*args, cwd=self.project_dir, wait_until_exit=True)
@staticmethod
def _get_data_dir(gdb_path):
if "msp430" in gdb_path:
return None
gdb_data_dir = os.path.realpath(
os.path.join(os.path.dirname(gdb_path), "..", "share", "gdb")
)
return gdb_data_dir if os.path.isdir(gdb_data_dir) else None
def generate_init_script(self, dst):
# default GDB init commands depending on debug tool
commands = self.debug_config.get_init_script("gdb").split("\n")
if self.debug_config.init_cmds:
commands = self.debug_config.init_cmds
commands.extend(self.debug_config.extra_cmds)
if not any("define pio_reset_run_target" in cmd for cmd in commands):
commands = [
"define pio_reset_run_target",
" echo Warning! Undefined pio_reset_run_target command\\n",
" monitor reset",
"end",
] + commands
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
commands = [
"define pio_reset_halt_target",
" echo Warning! Undefined pio_reset_halt_target command\\n",
" monitor reset halt",
"end",
] + commands
if not any("define pio_restart_target" in cmd for cmd in commands):
commands += [
"define pio_restart_target",
" pio_reset_halt_target",
" $INIT_BREAK",
" %s" % ("continue" if self.debug_config.init_break else "next"),
"end",
]
banner = [
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
"echo PlatformIO: debug_tool = %s\\n" % self.debug_config.tool_name,
"echo PlatformIO: Initializing remote target...\\n",
]
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
commands = banner + commands + footer
with open(dst, mode="w", encoding="utf8") as fp:
fp.write("\n".join(self.debug_config.reveal_patterns(commands)))
def stdin_data_received(self, data):
super(GDBClientProcess, self).stdin_data_received(data)
if b"-exec-run" in data:
if self._target_is_running:
token, _ = data.split(b"-", 1)
self.stdout_data_received(token + b"^running\n")
return
if self.debug_config.platform.is_embedded():
data = data.replace(b"-exec-run", b"-exec-continue")
if b"-exec-continue" in data:
self._target_is_running = True
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)
self.transport.get_pipe_transport(0).write(b"pio_reset_run_target\n")
self.transport.get_pipe_transport(0).write(data)
def stdout_data_received(self, data):
super(GDBClientProcess, self).stdout_data_received(data)
self._handle_error(data)
# go to init break automatically
if self.INIT_COMPLETED_BANNER.encode() in data:
telemetry.send_event(
"Debug",
"Started",
telemetry.dump_run_environment(self.debug_config.env_options),
)
self._auto_exec_continue()
def console_log(self, msg):
if helpers.is_gdbmi_mode():
msg = helpers.escape_gdbmi_stream("~", msg)
self.stdout_data_received(msg if is_bytes(msg) else msg.encode())
def _auto_exec_continue(self):
auto_exec_delay = 0.5 # in seconds
if self._last_activity > (time.time() - auto_exec_delay):
aio_get_running_loop().call_later(0.1, self._auto_exec_continue)
return
if not self.debug_config.init_break or self._target_is_running:
return
self.console_log(
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
% self.debug_config.init_break
)
self.console_log(
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
)
if self.debug_config.platform.is_embedded():
self.transport.get_pipe_transport(0).write(
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
)
else:
self.transport.get_pipe_transport(0).write(
b"0-exec-run\n" if helpers.is_gdbmi_mode() else b"run\n"
)
self._target_is_running = True
def stderr_data_received(self, data):
super(GDBClientProcess, self).stderr_data_received(data)
self._handle_error(data)
def _handle_error(self, data):
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
if not (
self.PIO_SRC_NAME.encode() in self._errors_buffer
and b"Error in sourced" in self._errors_buffer
):
return
last_erros = self._errors_buffer.decode()
last_erros = " ".join(reversed(last_erros.split("\n")))
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
err = "%s -> %s" % (
telemetry.dump_run_environment(self.debug_config.env_options),
last_erros,
)
telemetry.send_exception("DebugInitError: %s" % err)
self.transport.close()

View File

@ -0,0 +1,148 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import os
import re
import time
from platformio import fs
from platformio.compat import IS_MACOS, IS_WINDOWS
from platformio.debug.exception import DebugInvalidOptionsError
from platformio.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
from platformio.debug.process.base import DebugBaseProcess
from platformio.proc import where_is_program
class DebugServerProcess(DebugBaseProcess):
STD_BUFFER_SIZE = 1024
def __init__(self, debug_config):
super(DebugServerProcess, self).__init__()
self.debug_config = debug_config
self._ready = False
self._std_buffer = {"out": b"", "err": b""}
async def run(self): # pylint: disable=too-many-branches
server = self.debug_config.server
if not server:
return None
server_executable = server["executable"]
if not server_executable:
return None
if server["cwd"]:
server_executable = os.path.join(server["cwd"], server_executable)
if (
IS_WINDOWS
and not server_executable.endswith(".exe")
and os.path.isfile(server_executable + ".exe")
):
server_executable = server_executable + ".exe"
if not os.path.isfile(server_executable):
server_executable = where_is_program(server_executable)
if not os.path.isfile(server_executable):
raise DebugInvalidOptionsError(
"Could not launch Debug Server '%s'. Please check that it "
"is installed and is included in a system PATH\n"
"See https://docs.platformio.org/page/plus/debugging.html"
% server_executable
)
openocd_pipe_allowed = all(
[
not self.debug_config.env_options.get("debug_port"),
"gdb" in self.debug_config.client_executable_path,
"openocd" in server_executable,
]
)
if openocd_pipe_allowed:
args = []
if server["cwd"]:
args.extend(["-s", server["cwd"]])
args.extend(
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
)
args.extend(server["arguments"])
str_args = " ".join(
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
)
return fs.to_unix_path('| "%s" %s' % (server_executable, str_args))
env = os.environ.copy()
# prepend server "lib" folder to LD path
if (
not IS_WINDOWS
and server["cwd"]
and os.path.isdir(os.path.join(server["cwd"], "lib"))
):
ld_key = "DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH"
env[ld_key] = os.path.join(server["cwd"], "lib")
if os.environ.get(ld_key):
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
# prepend BIN to PATH
if server["cwd"] and os.path.isdir(os.path.join(server["cwd"], "bin")):
env["PATH"] = "%s%s%s" % (
os.path.join(server["cwd"], "bin"),
os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")),
)
await self.spawn(
*([server_executable] + server["arguments"]), cwd=server["cwd"], env=env
)
await self._wait_until_ready()
return self.debug_config.port
async def _wait_until_ready(self):
ready_pattern = self.debug_config.server_ready_pattern
timeout = 60 if ready_pattern else 10
elapsed = 0
delay = 0.5
auto_ready_delay = 0.5
while not self._ready and self.is_running() and elapsed < timeout:
await asyncio.sleep(delay)
if not ready_pattern:
self._ready = self._last_activity < (time.time() - auto_ready_delay)
elapsed += delay
def _check_ready_by_pattern(self, data):
if self._ready:
return self._ready
ready_pattern = self.debug_config.server_ready_pattern
if ready_pattern:
if ready_pattern.startswith("^"):
self._ready = re.match(
ready_pattern,
data.decode("utf-8", "ignore"),
)
else:
self._ready = ready_pattern.encode() in data
return self._ready
def stdout_data_received(self, data):
super(DebugServerProcess, self).stdout_data_received(
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
)
self._std_buffer["out"] += data
self._check_ready_by_pattern(self._std_buffer["out"])
self._std_buffer["out"] = self._std_buffer["out"][-1 * self.STD_BUFFER_SIZE :]
def stderr_data_received(self, data):
super(DebugServerProcess, self).stderr_data_received(data)
self._std_buffer["err"] += data
self._check_ready_by_pattern(self._std_buffer["err"])
self._std_buffer["err"] = self._std_buffer["err"][-1 * self.STD_BUFFER_SIZE :]

View File

@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import hashlib
import io
import json
@ -24,7 +25,7 @@ import sys
import click
from platformio import exception
from platformio.compat import WINDOWS, glob_escape, glob_recursive
from platformio.compat import IS_WINDOWS
class cd(object):
@ -51,7 +52,7 @@ def get_source_dir():
def load_json(file_path):
try:
with open(file_path, "r") as f:
with open(file_path, mode="r", encoding="utf8") as f:
return json.load(f)
except ValueError:
raise exception.InvalidJSONFile(file_path)
@ -101,7 +102,7 @@ def ensure_udev_rules():
def _rules_to_set(rules_path):
result = set()
with open(rules_path) as fp:
with open(rules_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if not line or line.startswith("#"):
@ -158,7 +159,9 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
items = set()
for item in glob_recursive(os.path.join(glob_escape(src_dir), pattern)):
for item in glob.glob(
os.path.join(glob.escape(src_dir), pattern), recursive=True
):
if os.path.isdir(item):
for root, _, files in os.walk(item, followlinks=followlinks):
for f in files:
@ -173,7 +176,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
def to_unix_path(path):
if not WINDOWS or not path:
if not IS_WINDOWS or not path:
return path
return re.sub(r"[\\]+", "/", path)
@ -182,7 +185,7 @@ def expanduser(path):
"""
Be compatible with Python 3.8, on Windows skip HOME and check for USERPROFILE
"""
if not WINDOWS or not path.startswith("~") or "USERPROFILE" not in os.environ:
if not IS_WINDOWS or not path.startswith("~") or "USERPROFILE" not in os.environ:
return os.path.expanduser(path)
return os.environ["USERPROFILE"] + path[1:]

View File

@ -81,6 +81,7 @@ class ProjectGenerator(object):
"src_files": self.get_src_files(),
"project_src_dir": self.config.get_optional_dir("src"),
"project_lib_dir": self.config.get_optional_dir("lib"),
"project_test_dir": self.config.get_optional_dir("test"),
"project_libdeps_dir": os.path.join(
self.config.get_optional_dir("libdeps"), self.env_name
),

View File

@ -75,7 +75,7 @@ set(CMAKE_CXX_STANDARD {{ cxx_stds[-1] }})
if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}")
% for define in defines:
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
add_definitions(-D{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}})
% end
% for include in filter_includes(includes):
@ -99,7 +99,7 @@ endif()
% for env, data in ide_data.items():
if (CMAKE_BUILD_TYPE MATCHES "{{ env }}")
% for define in data["defines"]:
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
add_definitions(-D{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}})
% end
% for include in filter_includes(data["includes"]):
@ -115,7 +115,7 @@ endif()
% end
FILE(GLOB_RECURSE SRC_LIST
% for path in (project_src_dir, project_lib_dir):
% for path in (project_src_dir, project_lib_dir, project_test_dir):
{{ _normalize_path(path) + "/*.*" }}
% end
)

View File

@ -1,22 +1,12 @@
% import re
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
% cc_stds = STD_RE.findall(cc_flags)
% cxx_stds = STD_RE.findall(cxx_flags)
%
%
clang
% if cc_stds:
{{"%c"}} -std=c{{ cc_stds[-1] }}
% end
% if cxx_stds:
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
% end
{{"%c"}} {{ !cc_flags }}
{{"%cpp"}} {{ !cxx_flags }}
% for include in filter_includes(includes):
-I{{ include }}
-I{{ !include }}
% end
% for define in defines:
-D{{ define }}
-D{{ !define }}
% end

View File

@ -1,6 +0,0 @@
% for include in filter_includes(includes):
-I{{include}}
% end
% for define in defines:
-D{{!define}}
% end

View File

@ -1,3 +1,13 @@
% import re
%
% cpp_standards_remap = {
% "0x": "11",
% "1y": "14",
% "1z": "17",
% "2a": "20",
% "2b": "23"
% }
win32 {
HOMEDIR += $$(USERPROFILE)
}
@ -27,3 +37,9 @@ HEADERS += {{file}}
SOURCES += {{file}}
% end
% end
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
% cxx_stds = STD_RE.findall(cxx_flags)
% if cxx_stds:
CONFIG += c++{{ cpp_standards_remap.get(cxx_stds[-1], cxx_stds[-1]) }}
% end

Some files were not shown because too many files have changed in this diff Show More