Compare commits

..

1231 Commits

Author SHA1 Message Date
Ivan Kravets
a76e445ed9 Merge branch 'release/v6.0.1' 2022-05-17 19:23:31 +03:00
Ivan Kravets
cb7148d018 Bump version to 6.0.1 2022-05-17 19:23:00 +03:00
Ivan Kravets
38afa07dbe Use Marshmallow v3.14.1 for Python 3.6 2022-05-17 19:10:54 +03:00
Ivan Kravets
92073a4ccd Deprecate "pio update", "pio lib", and "pio platform" commands 2022-05-17 18:57:40 +03:00
Ivan Kravets
abf6304818 Fixed an issue when using "Interpolation of Values" and merging str+int options // Resolve #4271 2022-05-17 16:03:33 +03:00
Ivan Kravets
9a86175701 Bump version to 6.0.1b1 2022-05-17 13:34:03 +03:00
Ivan Kravets
b764a2220f Improved support for the renamed configuration options // Resolve #4270 2022-05-17 13:33:25 +03:00
Ivan Kravets
3776233233 Rename "shared" module to the "public" 2022-05-16 16:56:01 +03:00
Ivan Kravets
0d92e8fc17 Bump version to 6.0.0a1 2022-05-16 14:46:52 +03:00
Ivan Kravets
40422eac2e Fixed an issue when calling built-in pio device monitor filter 2022-05-16 14:46:37 +03:00
Ivan Kravets
0fb4b1e109 Merge tag 'v6.0.0' into develop
Bump version to 6.0.0
2022-05-16 14:22:08 +03:00
Ivan Kravets
44ecc7c666 Merge branch 'release/v6.0.0' 2022-05-16 14:22:07 +03:00
Ivan Kravets
26d659c433 Bump version to 6.0.0 2022-05-16 14:21:57 +03:00
Ivan Kravets
58c4145809 Refactor library management docs 2022-05-16 14:18:45 +03:00
Ivan Kravets
fe08ce7795 Implement shared API 2022-05-16 11:39:18 +03:00
Ivan Kravets
9163e9e67d Rename pio project data to the pio project metadata command 2022-05-15 16:57:27 +03:00
Ivan Kravets
7acae6461e Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2022-05-15 15:35:07 +03:00
John Belmonte
e7a172b8dd qtcreator: add project-update makefile target (#4267)
* qtcreator: add project-update makefile target

* add prompt and delete .pio/

* formatting

* forced rm

* remove workaround of deleting .pio/
2022-05-15 15:34:57 +03:00
Ivan Kravets
b90e89a791 no message 2022-05-15 14:54:07 +03:00
John Belmonte
db11244f49 qtcreator IDE gitignore tweaks (#4266)
* add .gitignore to project files
  * exclude qtc_clangd
  * don't exclude user project config file
2022-05-15 13:52:59 +03:00
Ivan Kravets
54f0748201 Cache a build metadata only for debugging // Resolve #4267 2022-05-15 13:52:11 +03:00
Ivan Kravets
575f0ae300 Bump version to 6.0.0rc3 2022-05-15 13:47:32 +03:00
Ivan Kravets
7a100fb0b0 Use device finder for automatic detection of upload port 2022-05-15 13:46:44 +03:00
Ivan Kravets
d01d314f47 Pick the last USB device port 2022-05-15 13:13:45 +03:00
Ivan Kravets
e5e2210768 Improved automatic detection of a testing serial port // Resolve #4076 2022-05-14 23:30:36 +03:00
Ivan Kravets
d22b479bd3 Regroup device command 2022-05-14 18:21:44 +03:00
Ivan Kravets
19853b0b66 Implement config.get_default_env() 2022-05-14 17:55:36 +03:00
Ivan Kravets
ce62514a17 Resolve project dependencies with pio project init command 2022-05-14 16:31:08 +03:00
Ivan Kravets
4a4ba5594b Rename "load_project_ide_data" to the "load_build_metadata" 2022-05-14 16:30:20 +03:00
Ivan Kravets
af5a820862 Rename "load_project_ide_data" to the "load_build_metadata" 2022-05-14 16:29:41 +03:00
Ivan Kravets
40e4e38e0c Do not override CWD when executing a package command 2022-05-14 16:23:36 +03:00
Ivan Kravets
cb1c825747 Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2022-05-14 15:27:13 +03:00
John Belmonte
8c27754045 qtcreator IDE template now generates a "generic" Qt project (#4262)
* Create qtcreator-generic IDE template.

* Fix case of #define in qtcreator-generic template .config file.

* follow directory move

* * fix includes output
  * fixup -mlong-calls for clang
  * add Makefile to files output

* fix escaping in config output

* Makefile improvements:
  * support any platformio run target
  * remove platformio deprecated -f option
  * remove explicit default target (first is always default)

* replace qtcreator rather than making another IDE target

Co-authored-by: Donna Whisnant <dewhisna@users.noreply.github.com>
2022-05-14 15:26:04 +03:00
Ivan Kravets
3247e661e9 Regroup "pio project" command 2022-05-14 13:41:20 +03:00
Ivan Kravets
7c93167d52 Docs: Document double hyphen for "pio debug" // Resolve #4260 2022-05-13 21:04:44 +03:00
Ivan Kravets
79b2bfdefe Fix an issue with multiple symbol definitions when framework uses own Unity // Resolve #4259 2022-05-12 15:34:50 +03:00
Ivan Kravets
de7d710943 Look for custom "unity_config.h" only in the "test" dir 2022-05-12 14:17:45 +03:00
Ivan Kravets
b88a29e652 Bump version to 6.0.0rc2 2022-05-12 13:41:45 +03:00
Ivan Kravets
ed0b12dcf9 Improve project config parser to resolve renamed options // Issue #4259 2022-05-12 13:24:27 +03:00
Ivan Kravets
280bede0e9 Bump version to 6.0.0rc1 2022-05-10 20:22:36 +03:00
Ivan Kravets
e6938f8f39 List available project tests with a new "pio test --list-tests" option 2022-05-10 20:21:49 +03:00
Ivan Kravets
6d705172f5 Docs: Extend migration guide with Unit Testing solution 2022-05-10 19:18:36 +03:00
Ivan Kravets
8fff7084db Rename pio test --output-{format} options to --{format}-output 2022-05-10 18:25:26 +03:00
Ivan Kravets
e75bf27b5f Add "-pthread" to the LINKFLAGS 2022-05-10 17:23:03 +03:00
Ivan Kravets
2c99607d3d Pass "-pthread" flag to GoogleTest only on Unix OS 2022-05-10 16:46:48 +03:00
Ivan Kravets
c09af13b7f Add "-pthread" flag for GoogleTest 2022-05-10 16:13:30 +03:00
Ivan Kravets
ee6b498ca9 Optimize unit testing report CLI 2022-05-10 15:25:30 +03:00
Ivan Kravets
65f2f02d93 Add support for GoogleTest testing and mocking framework // Resolve #3572 2022-05-10 14:30:02 +03:00
Ivan Kravets
960edb5611 Use full testing program path on Windows 2022-05-10 11:59:59 +03:00
Ivan Kravets
cda7a97e67 Do not automatically generate JSON report 2022-05-09 22:32:16 +03:00
Ivan Kravets
c520700276 Export testcase file & line to JUnit XML 2022-05-09 19:20:33 +03:00
Ivan Kravets
a7654a6098 Move Unity code parts to the Unity runner 2022-05-09 18:58:43 +03:00
Ivan Kravets
814679522a Do not override embedded std flag 2022-05-09 18:49:15 +03:00
Ivan Kravets
4249349c2b Add hint about verbose output 2022-05-09 18:40:46 +03:00
Ivan Kravets
d065646d3e Update SPDX license list to v3.17 2022-05-09 10:08:08 +03:00
Ivan Kravets
0cf7aeeec9 Fix test on Github Actions 2022-05-08 14:42:07 +03:00
Ivan Kravets
277ccdafb6 Bump version to 6.0.0b1 2022-05-07 17:58:42 +03:00
Ivan Kravets
5b00f6fb95 Skip "test_doctest_framework" from Github Actions / Windows 2022-05-07 17:55:32 +03:00
Ivan Kravets
3f46a97b6b Fix LDF lib resolving 2022-05-07 16:44:11 +03:00
Ivan Kravets
3989979ca3 Pass extra arguments to the native program with a new "pio run --program-arg" option // Resolve #4246 2022-05-07 16:22:05 +03:00
Ivan Kravets
50eda82e27 Fix test 2022-05-07 14:09:11 +03:00
Ivan Kravets
daa3481862 Pass extra arguments to the testing program with a new "pio test --program-arg" option // Resolve # 3132 2022-05-07 13:31:19 +03:00
Ivan Kravets
2d94000dd5 Rename source.file to source.file name and report project folder 2022-05-07 13:24:27 +03:00
Ivan Kravets
e3eb155d76 Improve doctest results parser 2022-05-07 13:23:03 +03:00
Ivan Kravets
f95e23118c Fix test 2022-05-06 21:57:39 +03:00
Ivan Kravets
82778473fe New: "doctest" testing framework // Resolve #4240 2022-05-06 20:00:23 +03:00
Ivan Kravets
dae3b9665b Implement TestCase.humanize 2022-05-06 19:56:39 +03:00
Ivan Kravets
f19058df65 Try to resolve paths if the common part is not found 2022-05-06 19:40:00 +03:00
Ivan Kravets
3c7bec7c61 Exclude SVG files by default 2022-05-06 19:39:21 +03:00
Ivan Kravets
c4388a6904 Fixed an issue when LDF ignores build_src_flags in the “deep+” mode // Resolve #4253 2022-05-06 10:31:34 +03:00
Ivan Kravets
6d1e637518 Add support for Semihosting and Unit Testing // Resolve #3516 2022-05-05 17:36:15 +03:00
Ivan Kravets
bbd56d6eb0 Document using QEMU, Renode, SimAVR simulators with Unit Testing // Resolve #4238 2022-05-05 15:33:39 +03:00
Ivan Kravets
0b317ef04b Implement buffering for the testing output 2022-05-05 13:02:27 +03:00
Ivan Kravets
c0cfbe2ce0 Using hardware Simulators for Unit Testing // Issue #4238 2022-05-04 23:20:37 +03:00
Ivan Kravets
3ed5d41df5 Strip ANSI codes from Unity output 2022-05-04 18:56:57 +03:00
Ivan Kravets
517ee6532f Move "strip_ansi_codes" to the util 2022-05-04 18:55:34 +03:00
Ivan Kravets
653f22f85b Fix issue with nested interpolation 2022-05-04 14:52:11 +03:00
Ivan Kravets
38906478d3 Professional collaborative platform for safety-critical and declarative embedded development 2022-05-03 22:09:25 +03:00
Ivan Kravets
e81d83b8c2 Added support for a Custom Unity Library // Resolve #3980 2022-05-03 21:47:20 +03:00
Ivan Kravets
b12d9f62b9 Show list of failed tests in the summary // Resolve #4251 2022-05-03 19:30:15 +03:00
Ivan Kravets
0849e5faad Rename "src_filter" and "src_build_flags" options // Resolve #4245 2022-05-03 18:39:49 +03:00
Ivan Kravets
1a4419059d Added support for "socket://" and "rfc2217://" protocols using "test_port" option // Resolve #4229 2022-05-03 18:11:23 +03:00
Ivan Kravets
4ef1333abc Refactor test runner mixins to the test output readers 2022-05-03 15:21:53 +03:00
Ivan Kravets
2b11f64ef1 New Custom Testing Framework 2022-05-03 14:30:15 +03:00
Ivan Kravets
5b98f432f2 Update deps 2022-05-03 14:25:29 +03:00
Ivan Kravets
76779e6af4 Sync docs 2022-05-01 23:00:25 +03:00
Ivan Kravets
738d537266 Docs: Sync Intel MCS51 dev-platform 2022-05-01 20:10:25 +03:00
Ivan Kravets
327d5990d6 Docs: Minor improvements 2022-04-29 21:51:35 +03:00
Ivan Kravets
16021d0df7 Added support for "Test Hierarchies" // Issue #4135 2022-04-29 20:46:43 +03:00
Ivan Kravets
b37a74dfd9 Refactor Unit Testing documentation 2022-04-29 20:46:04 +03:00
Ivan Kravets
d02f02731f Rename the "test_build_project_src" project configuration option to "test_build_src" 2022-04-29 20:44:28 +03:00
Ivan Kravets
4295c54c67 Sync docs and examples 2022-04-29 14:50:15 +03:00
Ivan Kravets
fb1e4fa02b Add "--filter" option to the pio remote test command 2022-04-28 22:02:16 +03:00
Sebastian Guarino
62b8a63b80 Add --filter to remote test (#4244) 2022-04-28 18:25:43 +03:00
Valerii Koval
ab3c832f5e Pylint fix 2022-04-27 21:15:08 +03:00
Valerii Koval
d380e7ea01 Update Cppcheck and PVS-Studio tools to the latest available 2022-04-27 20:47:13 +03:00
Valerii Koval
e69fd5e682 Minor improvements to check tools
- Better handling of unusual macro for PVS-Studio
- Fail the analysis if Cppcheck exited with an internal error
2022-04-27 20:45:21 +03:00
Valerii Koval
285f19e132 Properly handle cases when path to a file with a defect is unknown
Resolves #4237
2022-04-27 20:40:55 +03:00
Ivan Kravets
4151f53e14 Rename unit testing module to "test" 2022-04-26 15:09:51 +03:00
Ivan Kravets
5895fb9faf Bump version to 6.0.0a2 2022-04-25 22:11:50 +03:00
Ivan Kravets
19e22d74f3 Fix unit testing case 2022-04-25 15:30:54 +03:00
Ivan Kravets
26ed6a5548 Implement required setUp/tearDown functions for the latest Unity testing framework 2022-04-25 13:23:33 +03:00
Ivan Kravets
05dd7dd811 Revert back showing test cases status before 2022-04-24 21:08:49 +03:00
Ivan Kravets
8b694f3734 Unity: show test case status before stdout 2022-04-24 11:28:07 +03:00
Ivan Kravets
c9026a1b9c Generate reports in JUnit and JSON formats // Resolve #2891 2022-04-23 19:19:25 +03:00
Ivan Kravets
9b221a06c8 Unity: Avoid "weak" attributes on Windows 2022-04-23 11:05:28 +03:00
Ivan Kravets
f88904e246 Export "ConfigureDebugFlags" to build env (bakward compatibility with Zephyr build script) 2022-04-22 18:14:28 +03:00
Ivan Kravets
e3533dcb01 Added support for test hierarchies (nested test suites) // Resolve #4135 2022-04-22 15:19:12 +03:00
Ivan Kravets
8edb5ffe20 Use unsigned long for unityOutputStart 2022-04-22 10:55:59 +03:00
Ivan Kravets
90e6cd7b46 Fixed an issue when command line parameters do not override values // Resolve #3845 2022-04-21 20:23:30 +03:00
Ivan Kravets
1fa73fb632 Typo fixes 2022-04-21 20:22:57 +03:00
Ivan Kravets
a615af233a Provide more information when the native program crashed on a host (errored with a negative return code) // Resolve #3429 2022-04-21 19:32:12 +03:00
Ivan Kravets
4817e13823 PyLint fixes 2022-04-21 19:30:55 +03:00
Ivan Kravets
ee43b86742 Introduce a new PlatformIO Unit Testing engine 2022-04-21 18:11:49 +03:00
Ivan Kravets
93bfc57dea Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2022-04-21 17:12:31 +03:00
Ivan Kravets
a568a5c356 Keep recursive for the glob 2022-04-21 17:10:38 +03:00
Valerii Koval
0b21977e48 Sync docs 2022-04-21 17:07:21 +03:00
Ivan Kravets
2f7668aef5 Improve src matcher for the symbolic links 2022-04-21 16:31:40 +03:00
Ivan Kravets
72fa6eebba Switch to FS JSON loader 2022-04-21 16:30:55 +03:00
Ivan Kravets
2f6a417168 Move test 2022-04-20 18:54:40 +03:00
Ivan Kravets
faa63727ab Revert back to title() 2022-04-20 18:48:26 +03:00
Ivan Kravets
a2b1a0a0a7 Use capitalize instead of title 2022-04-20 18:36:28 +03:00
Ivan Kravets
0d7bc09c49 Cache DL requests 2022-04-20 18:33:46 +03:00
Ivan Kravets
f57ca747a9 Add support for DL mirrors 2022-04-20 18:03:55 +03:00
Ivan Kravets
624421e4b0 Memoize dev-platform instance cross the clonned build envs 2022-04-19 13:51:43 +03:00
Ivan Kravets
943c6bc59c Move INO converter to a separate tool 2022-04-19 11:36:05 +03:00
Ivan Kravets
9ce0b0e25b Use builtin "title()" 2022-04-19 11:33:56 +03:00
Ivan Kravets
df3a13fc61 Move MISSING to the compat 2022-04-19 11:32:36 +03:00
Ivan Kravets
5a0a215bfc Use PY3 super() zero-argument syntax 2022-04-15 14:44:30 +03:00
Ivan Kravets
eaff7f307c Avoid RecursionError for circular_dependencies // Resolve #4228 2022-04-15 14:17:21 +03:00
Ivan Kravets
8d63591ce8 Extend "library.json" with an example for passing flags to library dependencies // Resolve #1941 2022-04-13 18:55:44 +03:00
Ivan Kravets
0e3aa29689 Introduce PlatformIO Core 6.0 2022-04-13 15:32:05 +03:00
Ivan Kravets
a56b19ff65 Improve pio exec command on Windows 2022-04-13 13:58:31 +03:00
Ivan Kravets
62b7ec271f Keep PY2 for backward compatibility with ESP8266/ESP32 // Resolve #4226 2022-04-13 12:51:13 +03:00
Ivan Kravets
5515bef3d7 Add backward compatibility with ESP-IDF build script // Resolve #4225 2022-04-13 12:47:17 +03:00
Ivan Kravets
092f5de231 Fix removing temporary debugging data on Windows 2022-04-12 18:17:38 +03:00
Ivan Kravets
81fdd75aac Report problematic file before publishing package to the registry 2022-04-12 12:30:49 +03:00
Ivan Kravets
f63b2f79e0 Fixed an issue when GCC preprocessor was applied to the ".s" assembly files on case-sensitive OS such as Window OS // Resolve #3917 2022-04-10 19:21:03 +03:00
Ivan Kravets
0501d55c8f Fixed an issue with calling an extra script located outside a project // Resolve #4220 2022-04-10 19:09:29 +03:00
Ivan Kravets
fe6f51369e Autoinstall dev-platform for the "clean" target 2022-04-10 13:56:44 +03:00
Ivan Kravets
8f454c7e9c Bump version to 5.3.0b5 2022-04-09 20:31:40 +03:00
Ivan Kravets
965feccfdc Extended Interpolation of Values with "${this}" pattern // Resolve #3953 2022-04-09 20:31:06 +03:00
Ivan Kravets
5e18f9bbda Finally removed all tracks to the Python 2.7 2022-04-09 17:46:21 +03:00
Ivan Kravets
541fcbf015 Added a new build variable (COMPILATIONDB_INCLUDE_TOOLCHAIN) to include toolchain paths in the compilation database // Resolve #3735 2022-04-09 12:53:22 +03:00
Ivan Kravets
16f5374474 Typo fix 2022-04-08 21:58:29 +03:00
Ivan Kravets
b414745aa1 Fixed an issue when LDF ignores the project "lib_deps" while resolving library dependencies // Resolve #3598 2022-04-08 18:37:16 +03:00
Ivan Kravets
696d95bf1b Black formatter 2022-04-08 18:36:43 +03:00
Ivan Kravets
1269ce064a Improved detection of a package type from the tarball archive // Resolve #3828 2022-04-08 13:58:40 +03:00
Max Prokhorov
9097d455db Avoid working with detached / non-existent git branches when checking for updates (#4217)
* Avoid working with detached / non-existent git branches when checking for updates

b/c we can't use `pull` anyway in that situation
Otherwise, ask for the specific branch via `refs/heads/{branch}` and
also fail when it is not available

* Update vcsclient.py

Co-authored-by: Ivan Kravets <me@ikravets.com>
2022-04-08 13:15:35 +03:00
Ivan Kravets
1615159014 Fix test 2022-04-08 12:03:31 +03:00
Ivan Kravets
e4e1e72c30 Bump version to 5.3.0b4 2022-04-07 23:10:35 +03:00
Ivan Kravets
43329b7748 Minor improvements for symlink support // Issue #3348 2022-04-07 23:03:40 +03:00
Ivan Kravets
2280865936 Resovle symlink based on the saved cwd 2022-04-05 09:11:10 +03:00
Ivan Kravets
fb2f3c8836 Resovle symlink based on the saved cwd 2022-04-05 09:07:44 +03:00
Ivan Kravets
e2f21212b7 Added support for symbolic links allowing pointing the local source folder to the Package Manager // Resolve #3348 2022-04-04 23:14:19 +03:00
Ivan Kravets
d7597d0992 Cache downloads cleanup 2022-04-04 22:45:25 +03:00
Ivan Kravets
c21876ebe3 Typo fix in class name 2022-04-04 22:22:22 +03:00
Ivan Kravets
76bea5b7a7 Cache downloads cleanup 2022-04-04 22:21:06 +03:00
Ivan Kravets
a03d82ff1a Replace package meta URL with URI 2022-04-04 14:18:11 +03:00
Ivan Kravets
f555656c92 Bump version to 5.3.0b3 2022-04-03 23:18:01 +03:00
Ivan Kravets
f289ebd1f3 Revert back lib deps tree to ascii chars 2022-04-03 23:17:29 +03:00
Ivan Kravets
41b3646012 Bump version to 5.3.0b2 2022-04-03 19:54:03 +03:00
Ivan Kravets
8de5db4b48 Added support for “scripts” in package manifest // Resolve #485 2022-04-03 19:53:34 +03:00
Ivan Kravets
d8be12dcdd PyLint fix 2022-04-03 10:54:23 +03:00
Ivan Kravets
71f9401e23 Fixed an issue when manually removed dependencies were not uninstalled from the storage // Resolve #3076 2022-04-02 22:30:35 +03:00
Ivan Kravets
cdd63dec65 Do not process package that was installed into the "env" storage // Resolve #2910 2022-04-02 16:38:54 +03:00
Ivan Kravets
279fdfc47a Show project dependency licenses when building in the verbose mode 2022-04-02 16:28:40 +03:00
Ivan Kravets
feda42f18f Added support for multi-licensed packages in library.json using SPDX Expressions // Resolve #4037 2022-04-02 14:19:24 +03:00
Ivan Kravets
d86f7fc25e Added ability to override a tool version using the "platform_packages" option // Resolve #3798 2022-04-01 22:05:30 +03:00
Ivan Kravets
e4fb675d5f Install only missed dependencies for the private libraries // Resolve #2910 2022-04-01 17:25:40 +03:00
Ivan Kravets
25e786e6a5 Docs: Sync with dev-platforms 2022-04-01 14:29:38 +03:00
Ivan Kravets
fd01e98cb1 Fix an issue with automatic installation of debug dependencies 2022-04-01 13:47:07 +03:00
Ivan Kravets
2a88cdb8df Bump version to 5.3.0b1 2022-03-31 19:26:21 +03:00
Ivan Kravets
be8f842061 Automatically install dependencies of the local (private) libraries // Resolve #2910 2022-03-31 19:25:44 +03:00
Ivan Kravets
fcb81ae074 Update docs with the new Package Specifications // Resolve #3373 2022-03-31 15:44:16 +03:00
Ivan Kravets
7d9c018b44 Implement Click logging handler for package manager 2022-03-30 21:40:59 +03:00
Ivan Kravets
a6e12532f8 Implement pio pkg search command // Issue #3373 2022-03-30 17:32:05 +03:00
Ivan Kravets
bd202f55ce Rename search "filters" to "qualifiers" 2022-03-30 14:43:02 +03:00
Ivan Kravets
f7b5a7bed8 Added support for the custom Clang-Tidy configuration file // issue #4186 2022-03-30 12:01:17 +03:00
Zach Zodkoy
6123d6f9bf Don't append --checks=* when the --config or --config-file flags are set (#4210)
Appending --checks=* causes clang-tidy to ignore the flags --config
and --config-file, which breaks the ability to use a clang-tidy file
2022-03-30 11:47:14 +03:00
Ivan Kravets
6c8173d1aa Implement pio pkg show command // Issue #3373 2022-03-29 16:39:48 +03:00
Ivan Kravets
d2f857d176 Lock "click" dependency for Python 3.6 2022-03-28 20:56:23 +03:00
Ivan Kravets
1e2afafbc4 Use parse_datetime API 2022-03-28 18:18:51 +03:00
Ivan Kravets
927c5c5e36 Do not install any dependencies on the "clean" target 2022-03-28 00:05:20 +03:00
Ivan Kravets
b2ea96b4a7 Resolve package path 2022-03-27 22:34:43 +03:00
Ivan Kravets
6afb53dd7d PyLint fixes 2022-03-27 22:34:22 +03:00
Ivan Kravets
d7477833d6 PyLint fixes 2022-03-24 14:29:32 +02:00
Ivan Kravets
7624645626 Implement pio pkg list command // Issue #3373 2022-03-24 14:17:18 +02:00
Ivan Kravets
53753c0127 Do not install dependencies that are built-in libraries 2022-03-23 18:01:23 +02:00
Ivan Kravets
95604ff66a Minor enhancements 2022-03-23 18:00:31 +02:00
Ivan Kravets
99e0d1071a Add package METAVAR for CLI 2022-03-23 17:57:18 +02:00
Ivan Kravets
13aacbcc05 Dump only required toolchains 2022-03-23 17:56:15 +02:00
Ivan Kravets
b137b25169 Enhance library dependency tree 2022-03-23 17:55:27 +02:00
Ivan Kravets
b44fb101c4 Remove deprecated code 2022-03-21 18:38:36 +02:00
Ivan Kravets
accc8ac254 Add test for "pio pkg outdated" command 2022-03-21 16:00:29 +02:00
Ivan Kravets
435a526140 Implement pio pkg update command // Issue #3373 2022-03-20 15:40:44 +02:00
Ivan Kravets
346580d955 Do not warn about unknown packages if they are built-in libraries 2022-03-19 18:13:29 +02:00
Ivan Kravets
81f343dbe8 Cleanup dev-platform package installer 2022-03-19 18:12:36 +02:00
Ivan Kravets
fa443f2e5f Strict PackageItem comparison 2022-03-19 18:08:34 +02:00
Ivan Kravets
a25a86e42f Init dev-platform with autoinstallation 2022-03-19 18:07:19 +02:00
Ivan Kravets
1ffa924483 Fix test 2022-03-16 18:17:21 +02:00
Ivan Kravets
463a16a68f Implement "pio pkg uninstall" command // Issue #3373 2022-03-16 16:23:09 +02:00
Ivan Kravets
d2adca8d68 Minor improvements 2022-03-16 16:18:59 +02:00
Ivan Kravets
057bf89894 Sync "asrmicro650x" dev-platform 2022-03-16 12:36:22 +02:00
Ivan Kravets
c9037982d7 Save tool deps into the "platformio.ini" // Issue #3373 2022-03-14 13:37:47 +02:00
Ivan Kravets
ce1264564f Ensure default libs are saved 2022-03-14 12:31:48 +02:00
Ivan Kravets
61ffab376d Split code 2022-03-14 12:18:05 +02:00
Ivan Kravets
f3bcaae4e4 Update deps 2022-03-13 17:54:13 +02:00
Ivan Kravets
2201214717 Allow to skip saving of package dependencies to the "platformio.ini" // Issue #3373 2022-03-09 19:07:11 +02:00
Ivan Kravets
eba4231cdc Move test 2022-03-09 19:01:37 +02:00
Ivan Kravets
de0a810fcf Update "wsproto" dependencies to the "1.1.*" 2022-03-09 14:18:09 +02:00
Ivan Kravets
644fc36c32 Revert back to using TOX tmp dir for PyTest 2022-03-08 18:29:54 +02:00
Ivan Kravets
41144bffeb Reset custom project config per command 2022-03-08 18:00:10 +02:00
Ivan Kravets
c84709dd9d Switch to the new "pio pkg install" command 2022-03-08 15:57:25 +02:00
Ivan Kravets
f28651eaf7 Ensure package dependencies are installed // Resolve #2573 2022-03-08 14:59:12 +02:00
Ivan Kravets
9e40eb992e Implement unified "pio pkg install" CLI // Issue #3373 2022-03-08 14:58:01 +02:00
Ivan Kravets
f445cb7895 Ignore Python3 "__pycache__" binaries 2022-03-06 16:00:01 +02:00
Volodymyr Shymanskyy
dfc0ecdf69 #StandWithUkraine (#4195) 2022-03-06 13:20:54 +02:00
Ivan Kravets
6f11f812f8 Ignore files according to the patterns declared in ".gitignore" when using pio package pack // Resolve #4188 2022-02-23 18:46:53 +02:00
Ivan Kravets
4191a9bc3c Fixed issue linked to package refactoring // Resolve #4189 2022-02-23 13:37:02 +02:00
Ivan Kravets
f2fbdafe64 Use the latest PIO Remote dependencies on non-ARM platforms // Issue #3865 2022-02-22 13:36:11 +02:00
Ivan Kravets
22a037b213 Better handling of the failed tests using "Unit Testing" solution 2022-02-22 13:02:10 +02:00
Ivan Kravets
dbe3ab6c97 Docs: Fix platformio.ini contents for Zephyr and Nordic nRF52-DK tutorial 2022-02-21 19:27:05 +02:00
Ivan Kravets
6bed610af3 Check for invalid version with leading zeros 2022-02-21 18:02:56 +02:00
Ivan Kravets
4d9547066b Show package size before publishing to the registry 2022-02-21 15:00:13 +02:00
Ivan Kravets
54c18ae0c6 Fix test on Win 2022-02-19 21:10:57 +02:00
Ivan Kravets
e49fb9f0d0 Minor Py.Test fixes 2022-02-19 20:45:37 +02:00
Ivan Kravets
33da2af31e Improve pio pkg exec test 2022-02-19 19:22:40 +02:00
Ivan Kravets
bcb3678055 Add test for pio pkg exec command 2022-02-18 21:03:12 +02:00
Ivan Kravets
28da2d245b Handle "BlockingIOError" when locking file resource 2022-02-18 18:51:03 +02:00
Ivan Kravets
e6864adfb6 Minor improvements 2022-02-18 18:34:50 +02:00
Ivan Kravets
8562319638 Do not handle built-in libraries when using package manager 2022-02-18 18:34:24 +02:00
Ivan Kravets
6be17cec37 Added support for dependencies declared in a "tool" type package 2022-02-18 17:51:07 +02:00
Ivan Kravets
f34e6e9c4c Port package management "print_message" to the Python logging system 2022-02-18 12:57:30 +02:00
Ivan Kravets
e8051838a3 Dropped support for "pythonPackages" field in "platform.json" manifest in favor of "Extra Python Dependencies" 2022-02-17 17:25:21 +02:00
Ivan Kravets
f1f5497d8d Fix test 2022-02-16 22:33:16 +02:00
Ivan Kravets
1b44ba4ce0 Dropped automatic updates of global libraries and development platforms // Resolve#4179 2022-02-16 21:53:18 +02:00
Ivan Kravets
a4d2dc856c Do not check for "system prune" for newest PlatformIO Core installation 2022-02-16 21:08:13 +02:00
Ivan Kravets
7964d1c2bf Docs: Add community book "Developing IoT Projects with ESP32" 2022-02-15 20:49:26 +02:00
Ivan Kravets
5df5dd155f Bump version to 5.3.0a3 2022-02-12 23:14:16 +02:00
Ivan Kravets
89cce21161 Move "pio exec" command to "pio pkg exec" // Issue #4163 2022-02-12 23:13:17 +02:00
Ivan Kravets
0bdef36e2a pio pkg outdated - check for project outdated packages // Issue #3373 2022-02-12 23:06:10 +02:00
Ivan Kravets
e549a07901 Typo fix 2022-02-12 23:01:20 +02:00
Ivan Kravets
98603dad66 Configure platform instance with project packages using "configure_project_packages" API 2022-02-12 21:59:27 +02:00
Ivan Kravets
c37fbda7a8 Bump version to 5.3.0a2 2022-02-11 22:42:50 +02:00
Ivan Kravets
34ea4d8f41 Move "debug" command to its main module 2022-02-11 22:42:02 +02:00
Ivan Kravets
452a76105f Update command titles 2022-02-11 22:33:33 +02:00
Ivan Kravets
4982676ca8 Rename "package" command to "pkg" 2022-02-11 22:24:37 +02:00
Ivan Kravets
83d115acca Ensure that platform directory path is string or bytes 2022-02-11 22:22:20 +02:00
Ivan Kravets
86bd0f7c37 Show current working directory, not a path to platformio.ini 2022-02-11 22:21:44 +02:00
Ivan Kravets
83fe00a0cf Revert "Run library extra script only at a build process" (breaks mbed framework) // Issue #3915 2022-02-11 17:00:33 +02:00
Ivan Kravets
526abc6a9f Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) // Resolve #3865 2022-02-11 14:42:17 +02:00
Ivan Kravets
63feda6efc Simplify dependency on "zeroconf" package // Resolve #4177 2022-02-11 12:15:47 +02:00
Ivan Kravets
c9b3dedbb0 Merge tag 'v5.2.5' into develop
Bump version to 5.2.5

# Conflicts:
#	HISTORY.rst
#	docs
#	platformio/__init__.py
2022-02-10 21:02:47 +02:00
Ivan Kravets
dae8dfe1fc Merge branch 'release/v5.2.5' 2022-02-10 20:59:25 +02:00
Ivan Kravets
100def7609 Bump version to 5.2.5 2022-02-10 20:59:16 +02:00
Ivan Kravets
8594012fa1 Update deps 2022-02-10 20:55:38 +02:00
Ivan Kravets
27400f66a9 Strip the path to userhome dir on Linux // Resolve #4173 Issue #4158 2022-02-10 20:55:31 +02:00
Ivan Kravets
bb1e590222 Update SPDX License List to 3.16 2022-02-10 20:55:18 +02:00
Kalle Bracht
a4b414010d Removing inconsistent dot at README.rst, HISTORY.rst and CONTRIBUTING.md (#4172)
* Removing inconsistent dot at README list

* Removing inconsistent dot at HISTORY file

* Removing inconsistent dot at CONTRIBUTING file
2022-02-10 20:55:08 +02:00
Ivan Kravets
1d72a96654 Merge tag 'v5.2.5' into develop
Bump version to 5.2.5

# Conflicts:
#	docs
#	platformio/__init__.py
2022-02-10 20:52:16 +02:00
Maciej Augustyniak
9b85ed86a9 fix: Added udev rule for FireBeetle-ESP32. (#4168) 2022-02-10 20:50:55 +02:00
Ivan Kravets
e36066a9a2 Move package's related commands to "package" sub-folder 2022-02-10 15:22:20 +02:00
Ivan Kravets
8082158a16 Update deps 2022-02-08 17:40:50 +02:00
Ivan Kravets
1a8567a6da Sync docs 2022-02-08 17:33:58 +02:00
Ivan Kravets
b17cbe30e2 Strip the path to userhome dir on Linux // Resolve #4173 Issue #4158 2022-02-08 17:21:13 +02:00
Ivan Kravets
8aadc88dd5 Update SPDX License List to 3.16 2022-02-07 13:46:47 +02:00
Kalle Bracht
f3d26fae64 Removing inconsistent dot at README.rst, HISTORY.rst and CONTRIBUTING.md (#4172)
* Removing inconsistent dot at README list

* Removing inconsistent dot at HISTORY file

* Removing inconsistent dot at CONTRIBUTING file
2022-02-07 13:45:56 +02:00
Ivan Kravets
828d6f5baf Fixed a "module 'asyncio' has no attribute 'run'" error when launching PIO Home using Python 3.6 // Resolve #4169 2022-02-05 20:00:37 +02:00
Maciej Augustyniak
2003806481 fix: Added udev rule for FireBeetle-ESP32. (#4168) 2022-02-05 13:13:43 +02:00
Ivan Kravets
362823c1e1 Bump version to 5.2.5b1 2022-02-04 19:15:55 +02:00
Ivan Kravets
9c10e00234 Run command from a PlatformIO package with a new pio exec command // Resolve #4163 2022-02-04 19:15:31 +02:00
Ivan Kravets
a4cef2fbd8 Bump version to 5.2.5a7 2022-02-03 15:33:30 +02:00
Ivan Kravets
e5fca99b52 Run library extra script only at a build process // Resolve #3915 2022-02-03 15:33:03 +02:00
Ivan Kravets
f4c692eed2 Bump PIO Home to 3.4.1 2022-02-02 17:42:28 +02:00
Ivan Kravets
2e0688db5f Fix test 2022-02-02 12:42:31 +02:00
Ivan Kravets
ac2b358f87 Docs: generate docs from the registry 2022-02-01 21:56:53 +02:00
Ivan Kravets
251a2c9fa4 Docs: link packages with the registry 2022-02-01 15:38:15 +02:00
Ivan Kravets
0064d4b2c5 Docs: remove deprecated links to "boards" page 2022-02-01 15:01:58 +02:00
Ivan Kravets
ebbac6b483 Use "black" profile 2022-02-01 15:00:47 +02:00
Ivan Kravets
d5373a62f4 Docs: Sync dev-platforms 2022-01-28 14:24:25 +02:00
Ivan Kravets
681b91a6a4 Update deps 2022-01-23 14:17:22 +02:00
CommanderRedYT
8c66352994 Fixed wrong path (#4158)
* Fixed wrong path

On linux, "Documents" doesn't have to be the right folder. It depends on the language selected when installing the operating system.

* Refactor code

* Update HISTORY.rst

Co-authored-by: Ivan Kravets <me@ikravets.com>
2022-01-20 12:19:30 +02:00
Ivan Kravets
4e1ec1215a Bump version to 5.2.5a6 2022-01-19 17:16:44 +02:00
Ivan Kravets
6981894060 Minor updates 2022-01-19 17:16:23 +02:00
Ivan Kravets
57c92e877c Respect disabling debugging server from platformio.ini 2022-01-19 16:53:31 +02:00
Ivan Kravets
e8c0b8504a Ignore annoying "ms-vscode.cpptools-extension-pack" for VSCode and C/C++ files 2022-01-15 22:27:30 +02:00
Ivan Kravets
93bbe8f2a3 Update deps 2022-01-15 15:00:55 +02:00
Ivan Kravets
c78bb1f572 Docs: Remove icons from navbar 2022-01-11 14:11:32 +02:00
Scott Lahteine
7256102785 Unix line-endings for extensions.json (#4153) 2022-01-09 13:58:39 +02:00
Ivan Kravets
fc907c568d Improved checking of available Internet connection for IPv6-only workstations // Issue #4151 2022-01-08 15:08:39 +02:00
Ivan Kravets
9e078ff4d7 Sync docs 2022-01-08 15:00:35 +02:00
Alexey Vazhnov
5658e7f718 _internet_on: try IPv4, if not acceptable — try IPv6 (#4151)
* _internet_on: try IPv4, if not acceptable — try IPv6

* _internet_on: replace IPv4 `socket.socket` + IPv6 `socket.socket` with one universal `socket.create_connection`
2022-01-08 14:59:47 +02:00
Ivan Kravets
111eb55a9f Docs: Update "platformio.ini" examples 2022-01-05 15:00:41 +02:00
Ivan Kravets
0630ec5503 Bump version to 5.2.5a5 2022-01-04 17:18:14 +02:00
Ivan Kravets
38cc493eb7 Minor improvements 2022-01-04 17:17:51 +02:00
Ivan Kravets
254507c3a3 Escape custom request arguments 2022-01-04 15:02:48 +02:00
Ivan Kravets
7cdcc9099b Escape custom request arguments 2022-01-04 14:53:34 +02:00
Ivan Kravets
fb046c43ea Require authorization for package downloading 2022-01-04 14:46:51 +02:00
Ivan Kravets
73ddf80fc1 Refactor authentication part for clients 2022-01-04 14:45:14 +02:00
Ivan Kravets
a5a224ac6f Sync docs 2022-01-03 13:05:53 +02:00
Ivan Kravets
c56dfda833 Minor fixes 2022-01-02 23:08:21 +02:00
Ivan Kravets
6081f9ff1b Switch to the universal Twisted 2022-01-02 23:08:12 +02:00
Ivan Kravets
f3c7d71b3b Sync docs 2022-01-02 19:46:52 +02:00
Ivan Kravets
5748bf9549 Extend packing filters 2021-12-29 15:03:43 +02:00
Ivan Kravets
84a0a6a418 Update deps 2021-12-24 18:14:29 +02:00
Ivan Kravets
1ee9f183cc Fix test 2021-12-24 18:14:18 +02:00
Ivan Kravets
55e8523925 Improve docs for "dependencies" field of library.json 2021-12-24 15:04:54 +02:00
Ivan Kravets
c9efe24959 Switch to the new registry 2021-12-22 22:36:32 +02:00
Ivan Kravets
69aff39205 Warn about package publishing time 2021-12-20 20:57:18 +02:00
Ivan Kravets
f6e9e15253 Bump version to 5.2.5a4 2021-12-20 19:28:28 +02:00
Ivan Kravets
b7f685ed62 Fix a bug with expired account session 2021-12-20 19:27:56 +02:00
Ivan Kravets
6e03eff303 Handle base AccountError 2021-12-20 19:05:12 +02:00
Ivan Kravets
3e0b95e1e1 Fix tests 2021-12-18 14:17:22 +02:00
Ivan Kravets
a32997ceba Bump version to 5.2.5a3 2021-12-18 13:54:09 +02:00
Ivan Kravets
63674d85e8 Ignore private packages if user not authorized 2021-12-18 13:53:54 +02:00
Ivan Kravets
56848ece7a Bump version to 5.2.5a2 2021-12-18 13:45:51 +02:00
Ivan Kravets
449722f08c Improved support for private packages in PlatformIO Registry 2021-12-18 13:45:26 +02:00
Ivan Kravets
949b4562c7 Packaging: exclude extras from Arduino libraries 2021-12-15 13:46:30 +02:00
Ivan Kravets
75f68c8be1 Bump version to 5.2.5a1 2021-12-15 12:46:28 +02:00
Ivan Kravets
1b117712cf Merge branch 'release/v5.2.4' 2021-12-15 12:19:59 +02:00
Ivan Kravets
11356af502 Merge tag 'v5.2.4' into develop
Bump version to 5.2.4
2021-12-15 12:19:59 +02:00
Ivan Kravets
9dbdf7fc8d Bump version to 5.2.4 2021-12-15 12:19:51 +02:00
Ivan Kravets
dec38273b6 Cleanup code 2021-12-15 11:59:19 +02:00
Ivan Kravets
5098f5f420 Minor improvements // Issue #3865 2021-12-14 22:55:48 +02:00
Ivan Kravets
d32fd72d13 Bump version to 5.2.4rc1 2021-12-14 22:38:57 +02:00
Ivan Kravets
a4692d5457 Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) // Resolve #3865 2021-12-14 22:38:31 +02:00
Ivan Kravets
24ea7aaede Update title to PlatformIO Core 2021-12-14 22:37:42 +02:00
Ivan Kravets
b7f10982c3 Update PIO Remote deps // Issue #3865 2021-12-14 21:14:11 +02:00
Ivan Kravets
8f28d1ad43 Update uvicorn to 0.16 2021-12-08 18:45:43 +02:00
Ivan Kravets
d5db2f0eb7 Apply formatting 2021-12-08 18:45:29 +02:00
Ivan Kravets
fe69f3de04 Bump version to 5.2.4b4 2021-12-08 18:40:37 +02:00
Ivan Kravets
5534394b06 Fixed an issue with wrong detecting Windows architecture when Python 32bit is used // Resolve #4134 2021-12-08 18:40:07 +02:00
Ivan Kravets
24fc2f7e14 Sync docs 2021-12-08 18:38:16 +02:00
valeros
5b23c9a294 Add basic test for CLion integration 2021-12-08 13:48:35 +02:00
Ivan Kravets
7338a02b48 Do not pack Python bytecode by default 2021-12-07 15:05:42 +02:00
Ivan Kravets
8555e83cb1 Sync docs 2021-12-07 15:05:25 +02:00
Ivan Kravets
39494d18bf Revert "Revert "Lock "cryptography" to RUST-less 3.3.2 version""
This reverts commit 24e63e7a02.
2021-12-06 20:59:31 +02:00
Ivan Kravets
aab42c3cff Skip library.properties "paragraph" if total len >= 1000 2021-12-03 20:05:37 +02:00
Ivan Kravets
f5a23c3817 Bump version to 5.2.4b3 2021-12-03 17:02:05 +02:00
Ivan Kravets
b3eb81c3b4 Typo fix 2021-12-03 17:01:42 +02:00
Ivan Kravets
4f4c88aca9 Use SCons vars for deprecated variables 2021-12-02 22:16:37 +02:00
valeros
c3ad3ebb57 Properly replace Home Directory in CLion template on Windows
Issue #4071
2021-12-02 20:56:18 +02:00
valeros
f13734dda4 Convert Home Directory path into a cmake-style path on Windows
Resolve #4071
2021-12-02 20:05:35 +02:00
Ivan Kravets
24e63e7a02 Revert "Lock "cryptography" to RUST-less 3.3.2 version"
This reverts commit 3828e6d15e.
2021-12-02 19:30:19 +02:00
Ivan Kravets
a163048396 Bump version to 5.2.4b2 2021-12-02 16:34:35 +02:00
Ivan Kravets
55f8471aff Improved tab completion support for Bash, ZSH, and Fish shells // Resolve #4114 2021-12-02 16:34:05 +02:00
Ivan Kravets
04e9f38e0e Check for default core dir in run-time (solves issue with tests) 2021-12-02 15:06:58 +02:00
Ivan Kravets
90972e9ce0 Sync docs 2021-12-02 14:55:48 +02:00
Ivan Kravets
6e8f60a27a Bump version to 5.2.4b1 2021-12-02 14:20:46 +02:00
Ivan Kravets
014090c407 Fixed an issue when referencing "*_dir" option from a custom project configuration environment // Resolve #4110 2021-12-02 14:19:54 +02:00
Ivan Kravets
e40b251c06 Fixed a bug when the system environment variable does not override a project configuration option // Resolve #4125 2021-12-02 13:13:07 +02:00
Ivan Kravets
414a194c9d Do not claim that library.properties packages is compatible with any dev-platform if "architectures" field is not defined 2021-11-29 20:02:53 +02:00
Ivan Kravets
7bffe3993d Update deps 2021-11-29 20:01:27 +02:00
Ivan Kravets
3828e6d15e Lock "cryptography" to RUST-less 3.3.2 version 2021-11-29 14:31:38 +02:00
Ivan Kravets
85c582bc93 Use "/v3//search" endpoint when searching for packages in registry 2021-11-27 15:00:10 +02:00
Ivan Kravets
ea1c9dec12 Typo fix 2021-11-26 14:21:06 +02:00
Ivan Kravets
6753121a6a Better cleanup package manifest fields 2021-11-26 14:13:06 +02:00
Ivan Kravets
f63d899c42 Ignore duplicated manifest values 2021-11-25 22:35:44 +02:00
Ivan Kravets
7219c9f806 Ignore duplicated manifest values 2021-11-25 22:19:47 +02:00
Ivan Kravets
df2f1d10fd Sync docs 2021-11-25 22:19:01 +02:00
Ivan Kravets
3f71067b67 Update zeroconf deps to 0.37.* 2021-11-22 22:08:57 +02:00
Ivan Kravets
8dc68a01fd Do not print empty errors 2021-11-22 22:08:10 +02:00
Ivan Kravets
9e0ded958c Bump version to 5.2.4a4 2021-11-18 17:56:18 +02:00
Ivan Kravets
68243aa95b Added support for a new "headers" field in "library.json" 2021-11-18 17:55:35 +02:00
Ivan Kravets
507df1f507 Extend platform manifest test with a package owner 2021-11-18 13:31:49 +02:00
Ivan Kravets
1800c29b44 Upgraded build engine to the SCons 4.3 2021-11-18 13:17:26 +02:00
Ivan Kravets
0343548f6e Sync docs 2021-11-18 13:14:55 +02:00
valeros
5cb5c9713e Wrap the path to PlatformIO core in the NetBeans project template
This fixes a possible issue when the path to PlatformIO contains a whitespace

Resolve #4096
2021-11-15 19:22:41 +02:00
Ivan Kravets
5e2c5c793f SPDX License List v3.15 2021-11-15 11:28:57 +02:00
Ivan Kravets
3022cb6955 Bump version to 5.2.4a3 2021-11-12 15:17:55 +02:00
Ivan Kravets
4687665ff3 Improved support for projects located on a network share // Resolve #3417 , Resolve #3926 , Resolve #4102 2021-11-12 15:17:25 +02:00
Ivan Kravets
001f075a49 Bump version to 5.2.4a2 2021-11-09 22:49:21 +02:00
Ivan Kravets
7d78e4a60a Fixed an issue with the CLion project generator when a macro contains a space // Resolve #4102 2021-11-09 22:49:00 +02:00
Pedro Barreto
2786bfbeb8 Escape spaces in CLion CMakeListsPrivate template - FIXES #4085 (#4105)
This fix adds spaces to the regex substitutions on CMakeListsPrivate.txt add_definitions.

Fixes #4102
2021-11-09 22:45:12 +02:00
Ivan Kravets
d3049a8d62 Fix test 2021-11-08 20:08:18 +02:00
Ivan Kravets
831a2582ed Sync docs 2021-11-08 19:31:49 +02:00
Ivan Kravets
0919019123 Bump version to 5.2.4a1 2021-11-05 23:19:22 +02:00
Ivan Kravets
7dd9c99c91 Merge tag 'v5.2.3' into develop
Bump version to 5.2.3
2021-11-05 17:31:41 +02:00
Ivan Kravets
326c24911a Merge branch 'release/v5.2.3' 2021-11-05 17:31:40 +02:00
Ivan Kravets
133fa1495b Bump version to 5.2.3 2021-11-05 17:31:23 +02:00
Ivan Kravets
7c040ed99f Normalize Windows path with Python's pathlib 2021-11-05 17:21:15 +02:00
Ivan Kravets
f88a2de8a9 Filter duplicated recent projects on Windows 2021-11-05 17:05:30 +02:00
Ivan Kravets
a24ec8b07a Grammar fixes 2021-11-05 16:57:44 +02:00
Ivan Kravets
d6ad6f96e8 Bump version to 5.2.3rc1 2021-11-05 16:29:18 +02:00
Ivan Kravets
411764854b Add support for custom device monitor filters located in package folders // Issue #3924 2021-11-05 16:28:49 +02:00
Ivan Kravets
973f77012f Fixed an issue when VSCode's debugger does not honor default environment // Resolve #4098 2021-11-05 14:46:57 +02:00
Maximilian Gerhardt
1d80da2559 Add "inc" as sign that it's the root of the library (#4093)
* Add "inc" as sign that it's the root of the library

* Add "inc" and "Inc"

Co-authored-by: Ivan Kravets <me@ikravets.com>
2021-11-05 14:16:36 +02:00
Ivan Kravets
00d298935a Bump version to 5.2.3b5 2021-11-05 12:58:12 +02:00
Ivan Kravets
4a9a478243 Refactor PIO Home IDE RPC 2021-11-05 12:57:09 +02:00
Ivan Kravets
9040bbb75a Update deps 2021-11-05 12:56:39 +02:00
Ivan Kravets
abcc4c0a12 Bump version to 5.2.3b4 2021-11-02 20:06:08 +02:00
Ivan Kravets
ceb3a19b81 Automatically synchronize active projects between IDE and PlatformIO Home 2021-11-02 20:05:40 +02:00
Ivan Kravets
2a2f7825cc Sync docs 2021-11-01 16:21:47 +02:00
Ivan Kravets
a0e9f6a92d Docs: Sync dev-platforms 2021-11-01 15:57:17 +02:00
Ivan Kravets
dbc73f5086 Use Rust-less "cryptography" dependency for PIO Remote 2021-10-30 14:30:30 +03:00
Ivan Kravets
78a67b754e Docs: Extend a project configuration example with the common "[env]" section 2021-10-26 16:01:50 +03:00
Ivan Kravets
de4b02eaf1 Remove unused module 2021-10-26 15:52:16 +03:00
Ivan Kravets
751c82fd29 Bump version to 5.2.3b3 2021-10-26 15:42:05 +03:00
Ivan Kravets
8c8a94fc71 Run config option validation even in raw mode 2021-10-26 15:41:41 +03:00
Ivan Kravets
1174958e8b Add project.helpers.get_project_all_lib_dirs API (used by platformio-node-helpers) 2021-10-26 14:36:18 +03:00
Ivan Kravets
6399de7a66 Removed deprecated project.helpers API 2021-10-26 14:35:28 +03:00
Ivan Kravets
c0f2275b61 Restore ProjectConfig.get_optional_dir API, "platformio-node-helpers" depends on it 2021-10-26 14:34:32 +03:00
Ivan Kravets
256a9ee45d Revert "Pass system STDIN stream to SCons subprocess"
This reverts commit d7b7d2de6e.
2021-10-26 13:54:49 +03:00
Ivan Kravets
c835ce780a Fixed "UnicodeEncodeError" when a build output contains non-ASCII characters // Resolve #3971 2021-10-25 22:01:11 +03:00
Ivan Kravets
d7b7d2de6e Pass system STDIN stream to SCons subprocess 2021-10-25 21:12:29 +03:00
Ivan Kravets
1dd0635e5e Use secured bitly 2021-10-25 20:25:23 +03:00
Ivan Kravets
67506511c3 Update token for docs/deploy 2021-10-25 19:45:47 +03:00
Ivan Kravets
3fbb4cde36 Bump version to 5.2.3b2 2021-10-25 18:45:04 +03:00
Ivan Kravets
9aaa80a213 Cast Python warnings to errors when running "pytest" 2021-10-25 18:36:10 +03:00
Ivan Kravets
acb6cbffa0 Add "arduplot" to the "Community Filters" // Resolve #4058 2021-10-25 15:54:06 +03:00
Ivan Kravets
6a70ab74bc Update history 2021-10-25 15:24:24 +03:00
Ivan Kravets
852c252302 Added support for custom device monitor filters // Resolve #3924 2021-10-25 15:18:18 +03:00
Ilia Motornyi
3a670b55b6 Update CMakeLists.txt.tpl (#4089) 2021-10-25 14:56:12 +03:00
Ivan Kravets
d01435f4f2 Bump version to 5.2.3b1 2021-10-25 13:28:57 +03:00
Ivan Kravets
f1638c9cd7 Fixed an issue when PIO Remote device monitor crashes on the first keypress // Resolve #3832 2021-10-25 13:24:36 +03:00
Ivan Kravets
4943504898 Bump version to 5.2.3a3 2021-10-24 23:17:30 +03:00
Ivan Kravets
7d7480c120 Show human-readable message when infinite recursion is detected while processing "Interpolation of Values" // Resolve #3883 2021-10-24 22:21:15 +03:00
Ivan Kravets
78182fea0a Disabled resolving of SCons variables when preprocessing "Interpolation of Values" // Resolve #3933 2021-10-24 21:27:25 +03:00
Ivan Kravets
947e57b5b4 Bump version to 5.2.3a2 2021-10-24 20:00:30 +03:00
Ivan Kravets
e0e4a594e9 Fix conf tests on Windows 2021-10-24 19:59:52 +03:00
Ivan Kravets
4839fe37a3 Improved PlatformIO directory interpolation (${platformio.***_dir}) in “platformio.ini” configuration file // Resolve #3934 2021-10-24 18:19:40 +03:00
Phill Price
9914b7ea38 Typo (#4087)
showed > shown
2021-10-23 13:01:48 +03:00
Ivan Kravets
f86ed97820 Bump version to 5.2.3a1 2021-10-22 19:14:17 +03:00
Ivan Kravets
8d8b0807e2 Fixed an issue when the "$PROJECT_DIR" gets the full path to "platformio.ini", not the directory name // Resolve #4086 2021-10-22 19:13:24 +03:00
Ivan Kravets
e3c6237430 Remove unused files 2021-10-20 23:29:34 +03:00
Ivan Kravets
e964c7fa5c Merge branch 'release/v5.2.2' 2021-10-20 18:44:28 +03:00
Ivan Kravets
f1e84e145c Merge tag 'v5.2.2' into develop
Bump version to 5.2.2
2021-10-20 18:44:28 +03:00
Ivan Kravets
2e2773fa6b Bump version to 5.2.2 2021-10-20 18:44:20 +03:00
Ilia Motornyi
a9c7a27d47 Fix CLion 2021.3 support (#4085)
New CMake behavior crashes CLion with apostrophe symbols in `add_definitions` clause
see https://youtrack.jetbrains.com/issue/CPP-26719
2021-10-20 18:08:22 +03:00
Ivan Kravets
e41ecb19cf Resolve an issue with interrupting a running program 2021-10-20 16:21:48 +03:00
Ivan Kravets
5b091b602f Fixed a “TypeError” issue when extending configuration option in “platformio.ini” with the multi-line default value // Resolve #4082 2021-10-20 15:35:01 +03:00
Ivan Kravets
768681c4f2 Remove debugging code // Resolve #4083 2021-10-19 19:27:20 +03:00
Ivan Kravets
2e4e5c1873 Temporary disable CI for Windows+Python 3.10 2021-10-19 19:26:13 +03:00
Ivan Kravets
4a61806e60 Quote Python versions 2021-10-19 18:52:30 +03:00
Ivan Kravets
883187f9ac Bump version to 5.2.2a1 2021-10-19 18:21:28 +03:00
Ivan Kravets
2d9a5031e9 Test PlatformIO Core on Python 3.10 2021-10-19 18:21:21 +03:00
Ivan Kravets
39c93f6512 Override debugging firmware loading mode using `--load-mode option for pio debug` command 2021-10-19 18:20:01 +03:00
Ivan Kravets
a7905b373e Skip CI for macOS & Py 3.6 2021-10-11 16:00:09 +03:00
Ivan Kravets
a7c82ff9b9 Merge branch 'release/v5.2.1' 2021-10-11 15:07:19 +03:00
Ivan Kravets
5b4b4a4051 Merge tag 'v5.2.1' into develop
Bump version to 5.2.1
2021-10-11 15:07:19 +03:00
Ivan Kravets
c348fec609 Bump version to 5.2.1 2021-10-11 15:07:04 +03:00
Ivan Kravets
4af17356f3 Handle ".hpp" files when looking for a library root 2021-10-11 15:01:42 +03:00
Ivan Kravets
384e5052bc Bump version to 5.2.1rc2 2021-10-10 14:09:59 +03:00
Ivan Kravets
a5adae1491 Skip broken Click 8.0.2 release // Resolve #4078 2021-10-10 14:09:17 +03:00
Ivan Kravets
fe62b810db Bump version to 5.2.1rc1 2021-10-08 19:03:12 +03:00
Ivan Kravets
ee78496058 Clean a build environment and installed library dependencies using a new `cleanall` target // Resolve #4062 2021-10-08 19:02:45 +03:00
Ivan Kravets
8afe4bae87 Typo fix 2021-10-08 15:31:26 +03:00
Ivan Kravets
b04bb2b740 Fix Click's "DeprecationWarning: 'resultcallback' has been renamed to 'result_callback'" // Resolve #4075 2021-10-08 15:18:34 +03:00
Ivan Kravets
3d46f0d72f Drop support for Click < 7.1.2 2021-10-08 15:18:19 +03:00
Ivan Kravets
a65d973660 Extend library root signs with "include" and "src" dirs // Resolve #4073 2021-10-08 15:00:05 +03:00
Ivan Kravets
df83d90c06 Handle upper-cased "Include" & "Src" folders 2021-10-08 14:58:41 +03:00
Ivan Kravets
a1d55f2529 Ignore telemetry on "idedata" target 2021-10-08 14:40:23 +03:00
valeros
aa097f3fd6 Update Cppcheck to v2.6.0 // Resolve #3942 2021-10-07 16:43:06 +03:00
Ivan Kravets
e0b72202fd Bump version to 5.2.1b4 2021-09-29 19:21:55 +03:00
Ivan Kravets
e8769fff7d Improved handling of a library root based on "Conan" or "CMake" build systems // Resolve #3887 2021-09-29 19:21:31 +03:00
Ivan Kravets
ed33652534 Handle "test" folder as a part of CLion project // Resolve #4005 2021-09-29 15:44:52 +03:00
cpavot
d1c1f972a6 Propagate agent option to remote device monitor command (#4065)
Signed-off-by: Christophe PAVOT <christophe.pavot@wiifor.com>
2021-09-29 14:47:11 +03:00
Valerii Koval
6008275aae Properly handle in-progress C++ standards when invoking Cppcheck // Resolve #3944 (#4070) 2021-09-29 14:46:02 +03:00
Ivan Kravets
edf8bb3945 Bump version to 5.2.1b3 2021-09-27 22:59:58 +03:00
Ivan Kravets
dd7d133263 Dump "embedded_result.output" 2021-09-27 22:59:36 +03:00
Ivan Kravets
b6f783674b Allowed to override a default library builder via a new `builder field in a build group of library.json` // Resolve #3957 2021-09-26 15:27:41 +03:00
Valerii Koval
eab70fae3b Properly handle "--keep-build-dir" option in platformio ci command (#4061)
This fixes #4011 and possible "FileExists" errors when the "platformio ci"
command by safely copying sources to the build folder
2021-09-23 23:26:42 +03:00
Ivan Kravets
fed40ef104 Add debug information when a test fails on Win/Py3.8 2021-09-17 21:06:08 +03:00
Ivan Kravets
6d087f5a38 Bump version to 5.2.1b2 2021-09-16 22:07:01 +03:00
Ivan Kravets
0edcf33547 Use "ubuntu-18.04" for project examples (CI) 2021-09-16 22:06:45 +03:00
Ivan Kravets
443417b0f4 PyLint fix 2021-09-16 21:56:09 +03:00
Ivan Kravets
369e994b0d Check for "build.mcu" and "build.cpu" when looking for precompiled library // Issue #405 2021-09-16 21:51:53 +03:00
Ivan Kravets
55469327c6 Bump version to 5.2.1b1 2021-09-16 21:16:21 +03:00
Ivan Kravets
27f326673c Fixed a "KeyError: Invalid board option 'build.cpu'" when using a precompiled library with a board that does not have a CPU field in the manifest // Resolve #405 2021-09-16 21:13:54 +03:00
Ivan Kravets
e6fd766fff Bump version to 5.2.1a1 2021-09-14 13:03:47 +03:00
Ivan Kravets
7da3ccfacb Merge tag 'v5.2.0' into develop
Bump version to 5.2.0
2021-09-13 19:00:10 +03:00
Ivan Kravets
624d6b3b0b Merge branch 'release/v5.2.0' 2021-09-13 19:00:09 +03:00
Ivan Kravets
9528083a66 Bump version to 5.2.0 2021-09-13 18:59:53 +03:00
Ivan Kravets
55408f6ccb Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files // Resolve #4019 2021-09-13 14:56:24 +03:00
Ivan Kravets
dce5a39b10 Process "precompiled" and "ldflags" properties of the "library.properties" manifest // Resolve #3994 2021-09-13 14:48:48 +03:00
Ivan Kravets
03a23876a7 Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files // Resolve #4019 2021-09-13 14:04:33 +03:00
Ivan Kravets
775357dd94 Better error handling if git is not installed // Resolve #4013 2021-09-13 13:31:53 +03:00
Dmitry Antyneskul
d10cbb2823 Fix link to clang-tidy (#4049) 2021-09-13 12:36:56 +03:00
valeros
63a2465bac Update check tools to the latest available // Resolve #4041 2021-09-10 18:11:48 +03:00
Ivan Kravets
d97ed52e91 Sync docs 2021-09-07 15:17:59 +03:00
Ivan Kravets
e1dc12c14d Docs: Document "platformio-ide.pioHomeServerHttpHost" setting for VSCode 2021-09-02 12:47:17 +03:00
Ivan Kravets
7c755d4e2d Sync docs 2021-08-31 16:23:24 +03:00
Ivan Kravets
55b786d9f0 Use byte-mode for writing binary file 2021-08-28 13:21:46 +03:00
Ivan Kravets
131f4be4ea Fix PyLint's "use-dict-literal" and "use-list-literal" 2021-08-28 13:14:40 +03:00
Ivan Kravets
d819617d2b Specify encoding for "open()" functions 2021-08-28 13:10:07 +03:00
Ivan Kravets
b9219a2b62 Update "zeroconf" deps to 0.36 2021-08-28 12:31:02 +03:00
Ivan Kravets
554e378dd6 Sync docs 2021-08-28 12:30:38 +03:00
Ivan Kravets
cc11402bc9 Sync docs 2021-08-14 15:41:44 +03:00
Ivan Kravets
40220f92c1 Sync docs 2021-08-14 15:25:25 +03:00
Ivan Kravets
8c4d9021c2 Update deps 2021-08-14 12:53:49 +03:00
Ivan Kravets
efefb02d86 Sync docs 2021-08-14 12:53:30 +03:00
Ivan Kravets
3ee281aaf9 Update SPDX License List to 3.14 2021-08-09 17:46:56 +03:00
Ivan Kravets
097b6d5097 PyLint fixes 2021-08-05 18:13:22 +03:00
Ivan Kravets
6cdaf05f98 Sync docs 2021-08-05 18:13:00 +03:00
Ivan Kravets
3be0f58c30 Sync docs 2021-08-04 14:58:54 +03:00
Ivan Kravets
f3489a3b01 Sync docs 2021-08-02 13:52:06 +03:00
Ivan Kravets
173dbeb24a Bump version to 5.2.0b1 2021-08-02 13:11:23 +03:00
Ivan Kravets
0607b86818 Upgraded build engine to the SCons 4.2 2021-08-02 13:10:37 +03:00
Valerii Koval
1282a65bcb Update Arduino udev rule to include latest Portenta board
Resolves #4014
2021-08-02 12:12:52 +03:00
Ivan Kravets
45d3207dfe Docs: Sync dev-platforms 2021-07-31 18:48:08 +03:00
Ivan Kravets
76b46f59e9 Fix lib test 2021-07-30 20:13:53 +03:00
Ivan Kravets
19fa108f61 Docs: Add "Copy" button to CODE blocks 2021-07-30 17:32:22 +03:00
Ivan Kravets
2372d06591 Sync docs 2021-07-26 19:26:33 +03:00
Ivan Kravets
7015375892 Docs: Revert "html_favicon" path 2021-07-23 15:32:02 +03:00
Ivan Kravets
e9bf2b361f Update deps and sync docs 2021-07-23 15:05:01 +03:00
Ivan Kravets
51b790b767 Bump version to 5.2.0a9 2021-07-12 15:06:42 +03:00
Ivan Kravets
ac84431361 Take into account package's "system" when checking for duplicates 2021-07-12 15:06:06 +03:00
Mikhail
7dc8463da9 Fix charmap error (#3998)
* Fix charmap error

Fix charmap error on cyrilic in platformio.ini file #3493

* Update config.py

Co-authored-by: Ivan Kravets <me@ikravets.com>
2021-07-07 18:25:55 +03:00
Ivan Kravets
71ae579bc0 PyLint fix 2021-07-05 16:06:02 +03:00
Ivan Kravets
5036d25b60 Enable Python version auto-detection for Black formatter 2021-07-05 13:31:23 +03:00
Ivan Kravets
ff6d169862 Fix PyLint for v2.9.3 2021-07-05 13:30:37 +03:00
Otto Winter
dde8898aae Bump zeroconf to 0.32.* (#3991) 2021-07-05 12:57:30 +03:00
Ivan Kravets
72cc23ef46 Fix PyLint warning with "No exception type(s) specified (bare-except)" 2021-06-29 18:25:20 +03:00
Ivan Kravets
5390b4ed42 Add Github token for Slack notification 2021-06-29 18:24:47 +03:00
Ivan Kravets
17c7d90d52 Sync docs 2021-06-29 18:11:08 +03:00
Ivan Kravets
5c3b5be613 Fix TypeError: 'NoneType' object is not callable 2021-06-29 18:07:45 +03:00
Ivan Kravets
5ab7769745 Bump version to 5.2.0a8 2021-06-24 16:43:00 +03:00
Ivan Kravets
05374d1145 Match buffered data from debugging server 2021-06-24 16:42:45 +03:00
Ivan Kravets
311e10f91e Ensure all patterns are replaces in debug init script 2021-06-24 16:00:13 +03:00
Ivan Kravets
2b94791387 Bump version to 5.2.0a7 2021-06-22 14:28:40 +03:00
Ivan Kravets
fbcae11cd0 Fix project generator 2021-06-22 14:28:04 +03:00
Ivan Kravets
0d6eff2a9a Syn docs 2021-06-22 14:27:33 +03:00
Ivan Kravets
6a9b7fdb6d Update SPDX License List to 3.13 2021-06-03 16:32:53 +03:00
Ivan Kravets
e8f703648a Docs: Use Python 3 for CI integration 2021-06-01 18:24:17 +03:00
Ivan Kravets
710f82de0f Up uvicorn to 0.14 & click to 8.0 2021-06-01 17:59:18 +03:00
Ivan Kravets
bee35acfa6 Sync docs 2021-06-01 17:56:55 +03:00
Ivan Kravets
90fdaf80e4 Sync docs 2021-05-31 18:25:54 +03:00
Ivan Kravets
27feb1ddd7 Added support for Click 8.0; updated other deps 2021-05-19 19:43:41 +03:00
Ivan Kravets
2be7e0f7e6 Docs: Promote PlatformIO Labs blog posts 2021-05-13 15:28:09 +03:00
Valerii Koval
186ab70bf9 Add udev rule for Raspberry Pi Pico boards 2021-05-10 11:38:05 +03:00
valeros
0fa9006e45 Sync docs: CircleCI updates 2021-05-03 22:34:43 +03:00
Ivan Kravets
60c83bae93 Docs: Sync dev-platforms 2021-05-01 13:44:28 +03:00
Ivan Kravets
553c398c8e Show package "system" info before publishing 2021-04-30 18:06:35 +03:00
Ivan Kravets
1c90bb383f Sync docs 2021-04-29 19:46:17 +03:00
Ivan Kravets
4281225b02 Sync docs 2021-04-29 19:24:44 +03:00
Ivan Kravets
14dc9c6c43 Sync docs 2021-04-29 18:38:44 +03:00
Ivan Kravets
c9e10b1a3e Fix issue with broken redirect 2021-04-29 14:43:27 +03:00
Ivan Kravets
915c850760 Docs: Fix JS redirect URL 2021-04-29 12:47:57 +03:00
valeros
2c3f430203 Tidy up Docs CI 2021-04-28 20:59:01 +03:00
valeros
1a152ed7fa Add deploy step to CI configuration 2021-04-28 20:18:23 +03:00
Ivan Kravets
5953480807 Docs: Fix broken link for RTD page 2021-04-28 20:16:01 +03:00
Ivan Kravets
b5c1a195be Fix PyLint issues: consider-using-with 2021-04-28 19:59:37 +03:00
Ivan Kravets
310cc086c6 Docs: Minor fixes to "redirect" page generator 2021-04-28 19:59:12 +03:00
Ivan Kravets
61d6cd3c18 Apply black formatter 2021-04-28 19:58:50 +03:00
Ivan Kravets
cccabf5330 Add missed "sphinx-notfound-page" package for docs 2021-04-28 13:19:49 +03:00
Ivan Kravets
6f33460afd Remove debugging code 2021-04-28 13:17:22 +03:00
Ivan Kravets
603d524aaf Refactor docs to be deployed as a static content 2021-04-28 13:10:19 +03:00
Ivan Kravets
eb2cd001b6 Use private "_idedata" target when fetching data for debugging 2021-04-24 18:01:35 +03:00
Ivan Kravets
b5b57790be Validate package manifest when packing archive or publishing a package 2021-04-23 22:02:07 +03:00
Ivan Kravets
286f4ef961 Bump version to 5.2.0a6 2021-04-21 20:52:27 +03:00
Ivan Kravets
ad28d1906c Improve a package publishing process 2021-04-21 20:51:54 +03:00
Ivan Kravets
dfdccac67d Remove unnecessary "ensure_python3()" blocks 2021-04-20 20:28:49 +03:00
Ivan Kravets
b8c2752237 Dccs: Add information how to avoid extra script running when IDE fetches metadata 2021-04-16 13:36:53 +03:00
Ivan Kravets
834c7b0def Bump version to 5.2.0a5 2021-04-12 22:38:56 +03:00
Ivan Kravets
5bfe70142e Switch to project directory before starting debugging process 2021-04-12 22:38:21 +03:00
Ivan Kravets
b35c5a22bb Fix a broken support for custom configuration file for pio debug command // Resolve #3922 2021-04-11 22:21:01 +03:00
Ivan Kravets
eecc825c90 PyLint 2021-04-11 22:20:09 +03:00
valeros
3823c22dad Update Release Notes 2021-04-07 21:30:06 +03:00
Valerii Koval
551bd3dbfe Explicitly specify PROGSUFFIX when compiling final binary (#3918)
Resolves #3906
2021-04-02 17:09:38 +03:00
Ivan Kravets
7e9956963a Remove a note with using pio ci for uploading // Resolve #3903 2021-04-02 15:23:34 +03:00
Ivan Kravets
80c24a1993 Fixed an issue when "main.cpp" was generated for a new project for 8-bit development platforms // Resolve #3872 2021-04-02 15:19:18 +03:00
Ivan Kravets
66091bae24 Disable GDB "startup-with-shell" only on Unix platform 2021-04-02 14:44:38 +03:00
Ivan Kravets
73d4f10f4b Bump version to 5.2.0a4 2021-04-01 21:16:42 +03:00
Ivan Kravets
ee7ea77fc3 Fixed an error "Unknown development platform" when running unit tests on a clean machine // Resolve #3901 2021-04-01 21:15:14 +03:00
Ivan Kravets
32e1cbe2a3 Provide solution for issue #3417 2021-03-31 18:28:06 +03:00
Ivan Kravets
3539724843 Update "zeroconf" dependency to 0.29 2021-03-31 17:33:26 +03:00
Ivan Kravets
940b25f158 Sync docs & examples 2021-03-31 17:32:57 +03:00
Ivan Kravets
37e601e5b5 Ensure that a serial port is ready before running unit tests on a remote target // Resolve #3742 2021-03-24 19:07:40 +02:00
Ivan Kravets
0230374709 Document new VSCode settings: activateProjectOnTextEditorChange & autoOpenPlatformIOIniFile 2021-03-24 13:04:20 +02:00
valeros
86db237e5d Update Cppcheck and PVS-Studio packages // Resolve #3898 2021-03-23 21:17:32 +02:00
Ivan Kravets
1542b1cebb Bump version to 5.2.0a3 2021-03-20 10:32:14 +02:00
Ivan Kravets
990071af5c Fix issue with missed compat.path_to_unicode // Resolve #3894 2021-03-20 10:31:55 +02:00
Ivan Kravets
f543e00307 Bump version to 5.2.0a2 2021-03-19 20:26:26 +02:00
Ivan Kravets
34b4f8265a Debug unit tests created with PlatformIO Unit Testing solution // Resolve #948 2021-03-19 20:25:30 +02:00
Ivan Kravets
a366d1af2a Use "target remote" for mpsdebug 2021-03-19 18:26:09 +02:00
Ivan Kravets
ebe5785a91 Allow overriding default debugging flags from dev-platform 2021-03-19 17:11:25 +02:00
Ivan Kravets
887d46725b Debug native (desktop) application on a host machine // Resolve #980 2021-03-19 17:02:11 +02:00
Ivan Kravets
a326b718f2 Handle legacy $LOAD_CMD "init_cmds" 2021-03-19 16:09:38 +02:00
Ivan Kravets
c14b298cb9 Fixed an issue with silent hanging when a custom debug server is not found // Resolve #3756 2021-03-19 15:55:42 +02:00
Ivan Kravets
9cca8f3f55 Split debugging client to base and GDB // Resolve #3757 2021-03-19 15:47:20 +02:00
Ivan Kravets
f5cee56740 Fix issue when disabling "debug_init_break" did not work 2021-03-19 14:09:43 +02:00
Ivan Kravets
972d183d85 Use a cached build configuration 2021-03-19 13:46:54 +02:00
Ivan Kravets
eebdf04357 Load "idedata" configuration from a dumped file 2021-03-19 13:46:27 +02:00
Ivan Kravets
9ede20a367 Disable checking for "__PLATFORMIO_BUILD_DEBUG__" that is not available in g2 mode 2021-03-19 13:10:29 +02:00
Ivan Kravets
b0c3e22a52 Configure a custom pattern to determine when debugging server is started with a new debug_server_ready_pattern option 2021-03-19 12:30:16 +02:00
Ivan Kravets
a78db17784 Drop support for Python 2 2021-03-19 00:21:44 +02:00
Ivan Kravets
dbb9998f69 Refactor debugging configuration, add support for server_ready_pattern // Resolve #3401 2021-03-18 23:42:54 +02:00
Ivan Kravets
2745dbd124 PyLint fix 2021-03-17 23:14:22 +02:00
Ivan Kravets
c0357daf01 Remove Python 2 code 2021-03-17 21:08:06 +02:00
Ivan Kravets
064fa6027d Bump version to 5.2.0a1 2021-03-17 20:07:26 +02:00
Ivan Kravets
779e02a05e Use "connect_read_pipe" on Unix 2021-03-17 20:06:52 +02:00
Ivan Kravets
e222d0356a Merge branch 'feature/debug-async' into develop 2021-03-17 18:25:47 +02:00
Ivan Kravets
d2ae333bb8 Merge branch 'release/v5.1.1' 2021-03-17 18:17:46 +02:00
Ivan Kravets
764c42a810 Merge tag 'v5.1.1' into develop
Bump version to 5.1.1
2021-03-17 18:17:46 +02:00
Ivan Kravets
18b18f1c3d Bump version to 5.1.1 2021-03-17 18:17:40 +02:00
Ivan Kravets
b54a8b40a4 Refactor Unified Debugger to native Python Asynchronous I/O stack // Resolve #3793 , Resolve #3595 2021-03-17 17:42:11 +02:00
Ivan Kravets
edf724d20d Sync docs 2021-03-15 17:01:44 +02:00
Ivan Kravets
622a190a61 Avoid "rustup" when building cryptography for contrib-pysite // Resolve #3865 2021-03-15 17:00:16 +02:00
Ivan Kravets
5b4a78ba20 Bump version to 5.1.1b1 2021-03-11 14:49:20 +02:00
valeros
44b85f6e4b Switch Cppcheck to analyze project per file // Issue #3797
Cppcheck doesn't provide a proper report when one of the files in the check list is broken.
If we run the analysis on a per-file basis, then Cppcheck will be able report at least defects
from valid source files.
2021-03-11 13:49:27 +02:00
Valerii Koval
7f1f760645 Preserve user-specified debug configurations in VSCode integration (#3878)
* Preserve user-specified debug configurations in VSCode integration

Issue #3824

* Tidy up Python code
2021-03-10 14:54:52 +02:00
Ivan Kravets
54d8c96c30 Update SPDX license list to 3.12 2021-03-09 22:01:58 +02:00
Ivan Kravets
c6ab7827e7 Fixed incorrect size of unnecessary data // Resolve #3830 2021-03-09 19:26:22 +02:00
Ivan Kravets
ae26079e2e Fixed an issue when code inspection fails with "Bad JSON" // Resolve #3790 2021-03-09 19:20:30 +02:00
valeros
3e993156f2 Suppress printing unnecessary info in silent mode // Resolve #3837 2021-03-08 12:16:53 +02:00
valeros
3b2fafd789 Add new test for check command and project with whitespace 2021-03-04 22:27:00 +02:00
GovorovViva64
72ebaddcb8 Handle possible whitespaces in project path for PVS-Studio (#3849) 2021-03-04 22:22:09 +02:00
Ivan Kravets
5a9950cc19 Sync docs 2021-03-04 18:52:12 +02:00
Ivan Kravets
cf29d7e400 Typo fix 2021-03-04 18:52:02 +02:00
Ivan Kravets
244dba3614 JFrog shutdowns Bintray 2021-03-03 21:31:42 +02:00
Ivan Kravets
21886517e1 Bump version to 5.1.1a3 2021-03-01 17:59:58 +02:00
Ivan Kravets
3996236729 Report detailed server error to PIO Home frontend 2021-03-01 17:59:40 +02:00
Ivan Kravets
560cb3ac82 Sync docs 2021-02-27 19:57:40 +02:00
Ivan Kravets
81c7e23ae9 Bump version to 5.1.1a2 2021-02-27 19:44:11 +02:00
Ivan Kravets
0b8bd6d4fc Migrate to Async JSON-RPC package 2021-02-27 19:43:43 +02:00
Ivan Kravets
7c271c8207 Better detecting of native dev-platform for unit testing // Resolve #3851 2021-02-27 18:53:26 +02:00
Ivan Kravets
58947d91a6 PyLint fixes 2021-02-27 17:13:30 +02:00
Ivan Kravets
20096be990 Sync docs 2021-02-26 13:39:13 +02:00
Ivan Kravets
7c8508b651 Fixed an issue with device monitor when the “send_on_enter” filter didn’t send EOL chars // Resolve #3787 2021-02-10 14:43:50 +02:00
Ivan Kravets
b56d0fdd9b Sync docs & examples 2021-02-10 14:43:12 +02:00
Ivan Kravets
d0cc06f766 Move isort setttings to "tox.ini" 2021-02-06 16:56:44 +02:00
Ivan Kravets
d8d2b215d1 Minor improvement 2021-02-03 23:11:47 +02:00
Ivan Kravets
c478d383b4 Sync docs 2021-02-03 23:10:01 +02:00
Ivan Kravets
e01cd1c037 Bump version to 5.1.1a1 2021-02-01 13:01:31 +02:00
Ivan Kravets
e63019c469 Fixed a "The command line is too long" issue with a linking process on Windows // Resolve #3827 2021-02-01 12:52:00 +02:00
Ivan Kravets
90a325a1b2 Merge branch 'release/v5.1.0' 2021-01-28 19:23:14 +02:00
Ivan Kravets
698594525f Merge tag 'v5.1.0' into develop
Bump version to 5.1.0
2021-01-28 19:23:14 +02:00
Ivan Kravets
fd540148f3 Bump version to 5.1.0 2021-01-28 19:23:06 +02:00
Ivan Kravets
078a024931 Configure default debug_speed 2021-01-28 13:52:11 +02:00
Ivan Kravets
f8193b2419 Bump version to 5.1.0rc3 2021-01-27 23:06:42 +02:00
Ivan Kravets
808ba603c5 Fixed an issue when "pio device monitor –eol" and “send_on_enter” filter do not work properly // Resolve #3787 2021-01-27 23:06:18 +02:00
Ivan Kravets
61d70fa688 Include Unity framework for IDE data only if there are tests in project 2021-01-27 22:40:19 +02:00
Ivan Kravets
493a33e754 Drop support for Python 2 2021-01-27 22:25:42 +02:00
Ivan Kravets
bd75c3e559 Bump version to 5.1.0rc2 2021-01-27 20:58:13 +02:00
Ivan Kravets
cb9e72a879 Dump build flags using SCons.Subst.SUBST_CMD 2021-01-27 20:57:53 +02:00
Ivan Kravets
9d2fd4982f Cleanup code 2021-01-27 20:40:25 +02:00
Ivan Kravets
eed9a0e376 Merge branch 'feature/3792-maxleng-cmd' into develop 2021-01-27 20:30:39 +02:00
Ivan Kravets
d77dbb2cca Use "TEMPFILEARGESCFUNC" for GCC workaround on Windows 2021-01-27 20:30:28 +02:00
Ivan Kravets
7810946484 Use project build folder for tempfile workaround with command maxlen 2021-01-27 18:47:54 +02:00
Ivan Kravets
e2906e3be5 Refactored a workaround for a maximum command line character limitation // Resolve #3792 2021-01-27 16:10:13 +02:00
Ivan Kravets
0a8b66ee95 Configure a custom debug adapter speed using a new debug_speed option // Resolve #3799 2021-01-26 21:21:41 +02:00
Ivan Kravets
8ff270c5f7 Skip non-existing package when checking for update// Resolve #3818 2021-01-26 17:05:37 +02:00
Ivan Kravets
4012a86cac Fixed a "ValueError: Invalid simple block" when uninstalling a package with a custom name and external source // Resolve #3816 2021-01-26 16:15:11 +02:00
Ivan Kravets
dd4fff3a79 Bump version to 5.1.0rc1 2021-01-25 23:50:41 +02:00
Ivan Kravets
0ed99b7687 Added a new `--session-id option to pio home` // Resolve #3397 2021-01-25 23:44:26 +02:00
Ivan Kravets
2c389ae11e Added new check_prune_system_threshold setting 2021-01-24 17:21:22 +02:00
Ivan Kravets
15ff8f9d2a Bump version to 5.0.5b5 2021-01-24 15:58:07 +02:00
Ivan Kravets
bd4d3b914b Revert "lib_compat_mode" changes // Resolve #3811 Resolve #3806 2021-01-24 15:49:56 +02:00
Ivan Kravets
59b02120b6 New options for system prune command: remove unnecessary core and development platform packages // Resolve #923 2021-01-23 23:20:53 +02:00
Ivan Kravets
92655c30c1 Disabled automatic removal of unnecessary development platform packages // Resolve #3708 , Resolve #/3770 2021-01-23 22:34:48 +02:00
Ivan Kravets
484567f242 Project's "lib_compat_mode" has higher priority than "library.json" 2021-01-23 15:54:52 +02:00
Ivan Kravets
ef6e70a38b Fixed an issue when unnecessary packages were removed in `update --dry-run` mode // Resolve #3809 2021-01-23 15:24:32 +02:00
Ivan Kravets
e695e30a9b Fixed an issue with compiler driver for ".ccls" language server // Resolve #3808 2021-01-23 14:44:53 +02:00
Ivan Kravets
65e67b64bd Remove unnecessary dependencies from contrib-pysite 2021-01-22 22:55:45 +02:00
Ivan Kravets
ddbe339541 Update to iSort 5.0 2021-01-22 22:55:02 +02:00
Ivan Kravets
b2c0e6a8c2 Sync docs 2021-01-22 22:46:09 +02:00
Ivan Kravets
f9384ded27 Fixed an issue when “strict” compatibility mode was not used for a library with custom “platforms” field in library.json manifest // Resolve #3806 2021-01-22 22:45:36 +02:00
Ivan Kravets
4488f25ce0 Bump version to 5.0.5b4 2021-01-20 23:26:22 +02:00
Ivan Kravets
52b22b5784 Fixed a "UnicodeDecodeError: 'utf-8' codec can't decode byte" // Resolve #3804 , Resolve #3417 2021-01-20 20:45:23 +02:00
Ivan Kravets
5a356140d6 Sync examples and docs 2021-01-20 20:44:43 +02:00
Ivan Kravets
e79de0108c Upgraded build engine to the SCons 4.1 2021-01-20 16:15:05 +02:00
Ivan Kravets
985f31877c Automatically install tool-unity when there are tests and "idedata" target is called 2021-01-20 15:14:45 +02:00
Ivan Kravets
11a71b7fbb Bump version to 5.0.5b3 2021-01-20 14:37:19 +02:00
Ivan Kravets
7f26c11c9d Fix an issue with "coroutine' object has no attribute 'addCallback'" 2021-01-20 14:36:45 +02:00
Ivan Kravets
9b93fcd947 Do not install tool-unity for even non-test proejct 2021-01-20 14:27:03 +02:00
Ivan Kravets
733ca5174b Bump version to 5.0.5b2 2021-01-18 21:19:57 +02:00
Ivan Kravets
bd897d780b Implement "__shutdown__" endpoint for PIO Home server 2021-01-18 21:19:15 +02:00
Ivan Kravets
429065d2b9 Legacy support for PIO Home "__shutdown__" query request 2021-01-18 20:53:19 +02:00
Ivan Kravets
b90734f1e2 List multicast DNS services only when PY3 2021-01-18 20:51:50 +02:00
Ivan Kravets
db97a7d9d3 Bump version to 5.0.5b1 2021-01-18 18:21:27 +02:00
Ivan Kravets
6ff67aeadf Significantly speedup PlatformIO Home loading time by migrating to native Python 3 Asynchronous I/O 2021-01-18 18:20:26 +02:00
Ivan Kravets
dd7d282d17 Improved listing of multicast DNS services 2021-01-18 18:17:10 +02:00
Ivan Kravets
4e637ae58a Drop Python 2 from PIO Core test 2021-01-18 18:15:15 +02:00
sephalon
1ec2e55322 Add udev rule for Atmel AVR Dragon (#3786) 2021-01-04 13:46:09 +02:00
Ivan Kravets
556eb3f8c1 Docs: Update "Wiring Connections" section for ST-Link debugging probe 2020-12-31 13:47:05 +02:00
Ivan Kravets
76b49ebc95 Increase timeout to 60sec when starting debug server and "ready_pattern" is used 2020-12-30 14:38:18 +02:00
Ivan Kravets
e82443a302 Bump version to 5.0.5a1 2020-12-30 14:29:41 +02:00
Ivan Kravets
5de86a6416 Check for debug server's "ready_pattern" in "stderr" 2020-12-30 14:29:19 +02:00
Ivan Kravets
3f3c8cabb8 Merge branch 'release/v5.0.4' 2020-12-30 13:23:11 +02:00
Ivan Kravets
cd59aa9afb Merge tag 'v5.0.4' into develop
Bump version to 5.0.4
2020-12-30 13:23:11 +02:00
Ivan Kravets
34e12e575b Bump version to 5.0.4 2020-12-30 13:23:04 +02:00
Ivan Kravets
4c8c261ab4 Raise an exception when trying to pack a package from tar.gz on Windows // Resolve #3776 2020-12-28 20:12:53 +02:00
Ivan Kravets
099bb3b9ff Sync dev-platforms: docs + examples 2020-12-28 13:51:34 +02:00
Ivan Kravets
c623a6aacc Fixed an issue with package publishing on Windows when Unix permissions are not preserved // Resolve // #3776 2020-12-28 13:08:12 +02:00
Ivan Kravets
ce7356794d Test examples from the official dev-platforms 2020-12-26 21:43:41 +02:00
Ivan Kravets
523494f9cf Ignore CI tests from tokisaki dev-platform 2020-12-26 20:18:15 +02:00
Ivan Kravets
0edc867d45 Bump version to 5.0.4rc1 2020-12-26 16:10:44 +02:00
Ivan Kravets
ce4c45a075 Show a warning message about deprecated support for Python 2 and Python 3.5 2020-12-26 16:10:07 +02:00
valeros
e29941e3eb Update release notes with check tools updates 2020-12-22 21:30:01 +02:00
valeros
86ce3595f6 Update check tools packages // Resolve #3758
Updated tools: Cppcheck v2.3, PVS-Studio v7.11
2020-12-22 00:44:09 +02:00
valeros
6e958b8415 Handle possible issues when check tool cannot be executed // Resolve #3753
Now, each tool individually decides under what conditions the check is considered failed.
2020-12-22 00:21:32 +02:00
Ivan Kravets
d485703768 Use "Updating to X.Y.Z" instead of "Outdated" when doing a real package updating 2020-12-11 17:53:48 +02:00
Ivan Kravets
109e2107d1 Sync docs 2020-12-11 16:14:08 +02:00
Ivan Kravets
3469905365 Decode subprocess output only for byte-strings 2020-12-02 15:15:17 +02:00
Ivan Kravets
75b3846f8f Sync docs & examples 2020-12-02 15:15:02 +02:00
Ivan Kravets
a9ec38208c Bump version to 5.0.4b1 2020-11-30 20:24:45 +02:00
Ivan Kravets
c38b9a4144 Fixed a "git-sh-setup: file not found" error when installing project dependencies from Git VCS // Resolve #3740 2020-11-30 20:23:30 +02:00
Ivan Kravets
b6128aeaa1 Apply formatting 2020-11-22 22:32:03 +02:00
Ivan Kravets
881782be05 Allow spaces and dots in example's name ([package manifest) 2020-11-22 21:42:25 +02:00
Ivan Kravets
0c05930501 Sync docs 2020-11-22 21:41:47 +02:00
Ivan Kravets
b96f2a19b5 Bump version to 5.0.4a2 2020-11-14 20:10:45 +02:00
Ivan Kravets
c1906714ee Give a constant "PlatformIO" name for the C/C++ configuration 2020-11-14 20:10:22 +02:00
Ivan Kravets
32181d1bd2 Improved `.ccls` configuration file for Emacs, Vim, and Sublime Text integrations // Issue #3735 2020-11-14 19:55:24 +02:00
Ivan Kravets
7dfb413d87 Typo fix 2020-11-12 21:42:53 +02:00
Ivan Kravets
7934a96ad1 Added "Core" suffix when showing PlatformIO Core version using `pio --version` command 2020-11-12 20:42:27 +02:00
Ivan Kravets
abddbf9c7d Bump version to 5.0.4a1 2020-11-12 18:56:55 +02:00
Ivan Kravets
77e66241f7 Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension 2020-11-12 18:56:34 +02:00
Ivan Kravets
4b3f2e19a4 Merge branch 'release/v5.0.3' 2020-11-12 17:57:30 +02:00
Ivan Kravets
b29c6485a8 Merge tag 'v5.0.3' into develop
Bump version to 5.0.3
2020-11-12 17:57:30 +02:00
Ivan Kravets
f4dba7a68c Bump version to 5.0.3 2020-11-12 17:56:12 +02:00
Ivan Kravets
2817408db3 Fixed an issue when pio package pack ignores some folders // Resolve #3730 2020-11-12 16:06:54 +02:00
Ivan Kravets
9ff3c758eb Fix tests 2020-11-12 15:35:37 +02:00
Ivan Kravets
3dcc189740 Use custom Pre-Debug task only for multi-env project 2020-11-12 15:35:19 +02:00
Ivan Kravets
4a12d1954e Fixed an issue when the package manager tries to install a built-in library from the registry // Resolve #3662 2020-11-12 15:27:34 +02:00
Ivan Kravets
e4d645110a Merge branch 'develop' of https://github.com/platformio/platformio-core into develop
# Conflicts:
#	HISTORY.rst
2020-11-12 15:25:51 +02:00
valeros
01a32067d5 Print ignored environments and test suites in only in verbose mode
Resolve #3726
2020-11-12 15:22:47 +02:00
Ivan Kravets
fc5ce4739c Added an error selector for Sublime Text build runner // Resolve #3733 2020-11-12 15:05:01 +02:00
Ivan Kravets
ae7b8f9ecf Fix tests 2020-11-11 20:52:23 +02:00
Ivan Kravets
0f5d2d6821 Sync docs 2020-11-11 19:44:39 +02:00
Ivan Kravets
48eca22a00 Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used 2020-11-11 14:19:58 +02:00
Ivan Kravets
5e164493a8 Sync docs 2020-11-09 11:39:26 +02:00
Ivan Kravets
ead99208f2 Increase example name in manifest to 255 chars 2020-11-09 11:38:46 +02:00
Ivan Kravets
4f5ad05792 Docs: Document "Introducing Strict SSL/TLS" in migration 2020-11-04 14:07:40 +02:00
Ivan Kravets
bc52e72605 Bump version to 5.0.3a2 2020-11-03 15:11:52 +02:00
Ivan Kravets
038674835a Workaround for a broken locale 2020-11-02 12:27:17 +02:00
Ivan Kravets
00f21c17ca Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2020-11-01 21:06:47 +02:00
Ivan Kravets
818a1508a0 Docs: Use native ProjectConfig in the advanced scripting examples 2020-11-01 21:06:23 +02:00
Ivan Kravets
2d9480a6a7 Support for GitPod environment 2020-11-01 21:05:03 +02:00
valeros
0bec4e25c8 Add support for C++ language standard in QtCreator template
Resolve #3719
2020-11-01 19:03:14 +02:00
Ivan Kravets
950a540df4 Bump version to 5.0.3a1 2020-10-31 19:07:45 +02:00
Ivan Kravets
2e66c5f807 Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode 2020-10-31 19:07:04 +02:00
Ivan Kravets
7033c2616b Docs: Add info how to access PlatformIO Core CLI in VSCode 2020-10-31 12:44:37 +02:00
Ivan Kravets
7292024ee6 Merge branch 'release/v5.0.2' 2020-10-30 18:10:54 +02:00
Ivan Kravets
8d4cde4534 Merge tag 'v5.0.2' into develop
Bump version to 5.0.2
2020-10-30 18:10:54 +02:00
Ivan Kravets
d6df6cbb5d Bump version to 5.0.2 2020-10-30 18:10:47 +02:00
Ivan Kravets
344e94d8a1 Bump version to 5.0.2rc2 2020-10-30 17:51:02 +02:00
Ivan Kravets
5cf73a9165 Remove all hooks when dumping data to JSON and Python 3 is used 2020-10-30 17:50:43 +02:00
Ivan Kravets
96b1a1c79c Fixed an issue with a "wrong" timestamp in device monitor output using "time" filter // Resolve #3712 2020-10-30 14:11:27 +02:00
Ivan Kravets
0bbe7f8c73 Sync docs 2020-10-29 23:48:44 +02:00
Ivan Kravets
e333bb1cca Tests: skip dev-plalform without examples 2020-10-29 23:42:15 +02:00
Ivan Kravets
454cd8d784 Bump version to 5.0.2rc1 2020-10-29 23:18:39 +02:00
Ivan Kravets
743a43ae17 Fixed an issue when multiple pio lib install command with the same local library results in duplicates in `lib_deps` // Resolve #3715 2020-10-29 23:17:47 +02:00
Ivan Kravets
5a1b0e19b2 Initialize a new project or update existing passing working environment name and its options // Resolve #3686 2020-10-29 22:59:48 +02:00
Ivan Kravets
da6cde5cbd Sync docs 2020-10-29 18:09:08 +02:00
Ivan Kravets
5ea864da39 Add py39 env 2020-10-29 18:08:58 +02:00
Ivan Kravets
175448deda Fix tests on PY2 2020-10-29 14:37:50 +02:00
valeros
16f90dd821 Ignore possible empty defines when exporting IDE data // Resolve #3690 2020-10-29 12:22:26 +02:00
Ivan Kravets
9efac669e6 Bump version to 5.0.2b5 2020-10-28 22:53:29 +02:00
Ivan Kravets
adf9ba29df Fixed an issue when "pio package publish" command removes original archive after submitting to the registry // Resolve #3716 2020-10-28 22:52:48 +02:00
Ivan Kravets
cacddb9abb Support package packing on the Python 3+ only 2020-10-28 22:33:24 +02:00
Ivan Kravets
edbe213410 Sync docs 2020-10-28 22:32:48 +02:00
Ivan Kravets
891f78be37 Use "ensure_python3" util 2020-10-28 22:32:27 +02:00
Ivan Kravets
175be346a8 Extend package filters 2020-10-28 20:57:26 +02:00
Ivan Kravets
9ae981614f Add pack/sdist target 2020-10-28 20:56:53 +02:00
Ivan Kravets
16f5f3ef46 Do not pack binary files and docs to the package source archive 2020-10-28 14:18:09 +02:00
Ivan Kravets
2cd19b0273 Bump version to 5.0.2b4 2020-10-27 23:00:33 +02:00
Ivan Kravets
e158e54a26 Fix issue with data decoding when calling PIO Core via PIO Home 2020-10-27 22:57:51 +02:00
Ivan Kravets
63a6fe9133 Improved "core.call" RPC for PlatformIO Home // Resolve #3671 2020-10-27 21:07:02 +02:00
Ivan Kravets
779eaee310 Bump version to 5.0.2b3 2020-10-26 22:25:47 +02:00
Ivan Kravets
0ecfe8105f Docs: Unify CLI commands to use "pio" short version command 2020-10-26 22:24:05 +02:00
Ivan Kravets
b8cc867ba4 Allow dev-platform to provide extra debug configuration using BsePlatform::configure_debug_options API 2020-10-26 18:24:46 +02:00
Ivan Kravets
7230556d1b Move extra IDE data to "extra" section 2020-10-26 18:23:28 +02:00
Ivan Kravets
afd79f4655 Improve initiating manifest parser from a package archive 2020-10-22 19:08:20 +03:00
Ivan Kravets
5d87fb8757 Add "Articles" section to Zephyr RTOS 2020-10-22 19:07:44 +03:00
Ivan Kravets
23e9596506 Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry // Resolve #3700 2020-10-20 21:06:53 +03:00
Ivan Kravets
428f46fafe Typo test 2020-10-16 17:05:19 +03:00
Ivan Kravets
ee847e03a6 Fix an issue with "'NoneType' object has no attribute 'status_code'" 2020-10-16 14:23:10 +03:00
Ivan Kravets
a870981266 Docs: Fix custom "platform_packages" for ESP8266/32 2020-10-14 23:21:59 +03:00
Ivan Kravets
411bf1107d Disable "lattice_ice40" examples for macOS 2020-10-14 22:33:04 +03:00
Ivan Kravets
5b74c8a942 Minor fixes 2020-10-14 19:53:30 +03:00
Ivan Kravets
a24bab0a27 Fix badge 2020-10-14 17:55:45 +03:00
Ivan Kravets
1cb7764b0e Highlight PlatformIO Labs Technology 2020-10-14 17:53:41 +03:00
Ivan Kravets
d835f52a18 Sync docs 2020-10-10 20:48:56 +03:00
Michele Campeotto
9c20ab81cb fix quoting of defines in ccls template (#3692) 2020-10-02 13:28:02 +03:00
Ivan Kravets
14de3e79c5 Sync docs 2020-09-26 22:52:43 +03:00
Ivan Kravets
21c12030d5 Bump version to 5.0.2b2 2020-09-19 19:30:40 +03:00
Ivan Kravets
2370e16f1b Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses // Resolve #3677 2020-09-19 19:29:51 +03:00
Ivan Kravets
a384411a28 Bump version to 5.0.2b1 2020-09-17 20:41:10 +03:00
Ivan Kravets
1e0ca8f79c Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies // Resolve #3669 2020-09-17 20:40:11 +03:00
Ivan Kravets
2b5e590819 Docs: Explain how to install custom Python packages // Resolve #3673 2020-09-17 19:21:12 +03:00
Ivan Kravets
bf57b777bf Docs: Update docs for PlatformIO IDE 2.0 for VSCode 2020-09-16 19:33:55 +03:00
valeros
f656d19ed5 Docs: Added new section about Arduino STM32L0 core 2020-09-14 22:31:11 +03:00
Ivan Kravets
eb09af06ed Bump version to 5.0.2a2 2020-09-12 23:21:33 +03:00
Ivan Kravets
687c339f20 Fixed a "PermissionError: [WinError 5]" on Windows when external repository is used with lib_deps option // Resolve #3664 2020-09-12 23:20:46 +03:00
Ivan Kravets
7bc170a53e Fixed an issue with "KeyError: 'versions'" when dependency does not exist in the registry // Resolve #3666 2020-09-11 21:16:18 +03:00
Ivan Kravets
65297c24d4 Merge branch 'release/v5.0.1' 2020-09-10 17:46:56 +03:00
Ivan Kravets
ea21f3fba0 Merge tag 'v5.0.1' into develop
Bump version to 5.0.1
2020-09-10 17:46:56 +03:00
Ivan Kravets
b515a004d3 Bump version to 5.0.1 2020-09-10 17:46:49 +03:00
Ivan Kravets
7d3fc1ec1a Catch exception if folder already exists 2020-09-09 18:38:55 +03:00
Ivan Kravets
6987d6c1c6 Fixed an issue when can not remove update or remove external dev-platform using PlatformIO Home // Resolve #3663 2020-09-09 17:53:04 +03:00
Ivan Kravets
de2b5ea905 Bump version to 5.0.1b1 2020-09-09 16:27:47 +03:00
Ivan Kravets
f946a0bc08 Reformat code with black==20.8b1 2020-09-09 16:27:36 +03:00
Ivan Kravets
4f47ca5742 Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) // Resolve #3658 2020-09-09 16:13:39 +03:00
Ivan Kravets
54b51fc2fd Merge branch 'develop' of https://github.com/platformio/platformio-core into develop
# Conflicts:
#	HISTORY.rst
2020-09-09 14:38:29 +03:00
Ivan Kravets
1f284e853d Fixed an issue when the package manager tries to install a built-in library from the registry // Resolve #3662 2020-09-09 14:36:01 +03:00
valeros
2a30ad0fdf Allow in-progress language standards in IDE templates // Resolve #3653
Note: VS Code only supports finalized names
2020-09-09 13:56:00 +03:00
Ivan Kravets
c454ae336d Added support for "owner" requirement when declaring `dependencies using library.json` 2020-09-09 13:10:42 +03:00
Ivan Kravets
cd59c829e0 Fixed an issue when pio package unpublish command crashes // Resolve #3660 2020-09-09 12:17:09 +03:00
Ivan Kravets
429f416b38 Generate current date for a custom contrib-pysite package 2020-09-07 16:38:20 +03:00
Ivan Kravets
0a881d582d Docs: Add info how to list publish packages 2020-09-07 16:37:37 +03:00
Ivan Kravets
65b1029216 Host SPDX licenses on Bintray, Github is blocked in multiple countries 2020-09-07 13:16:08 +03:00
Ivan Kravets
c7758fd30e Docs: Minor fixes 2020-09-06 21:06:16 +03:00
Ivan Kravets
46f300d62f Docs: Add "Publishing" section to the instruction on how to create own dev-platform 2020-09-06 21:00:45 +03:00
Ivan Kravets
4234dfb6f9 Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using Remote Development // Resolve #3652 2020-09-06 18:37:27 +03:00
Ivan Kravets
9695720343 Bump version to 5.0.1a1 2020-09-04 22:48:21 +03:00
Ivan Kravets
1f28056459 Fixed an issue when using a custom git/ssh package with platform_packages // Resolve #3624 2020-09-04 22:47:49 +03:00
Jake
7dacceef04 Exclude tests from python package (#3650) 2020-09-04 18:55:30 +03:00
Ivan Kravets
39883e8d68 Docs: Document "--without-testing" option for pio test command 2020-09-04 18:39:52 +03:00
Ivan Kravets
949ef2c48a Merge tag 'v5.0.0' into develop
Bump version to 5.0.0
2020-09-03 14:43:11 +03:00
Ivan Kravets
ada3f8b270 Merge branch 'release/v5.0.0' 2020-09-03 14:43:10 +03:00
Ivan Kravets
cf4b835b0c Bump version to 5.0.0 2020-09-03 14:42:59 +03:00
Ivan Kravets
fec4569ada Docs: Update docs with new owner-based dependency form 2020-09-03 14:37:24 +03:00
Ivan Kravets
083edc4c76 Refactor to os.path 2020-09-02 20:52:11 +03:00
Ivan Kravets
fe4112a2a3 Follow SemVer complaint version constraints when checking library updates // Resolve #1281 2020-09-02 20:36:56 +03:00
Dirk Mueller
c8ea64edab Fix link to FAQ sections (#3642)
* Fix link to FAQ sections

Use consistently the same host and url and fix one unmatched anchor.

* Update HISTORY.rst

Co-authored-by: Ivan Kravets <me@ikravets.com>
2020-09-02 19:13:20 +03:00
Ivan Kravets
6e5198f373 Minor improvements 2020-09-02 18:49:00 +03:00
Ivan Kravets
44c2b65372 Show ignored project environments only in the verbose mode // Resolve #3641 2020-09-02 17:31:32 +03:00
Ivan Kravets
5cc21511ad Show owner name for packages 2020-09-02 16:07:16 +03:00
valeros
2edd7ae649 Update PVS-Studio to the latest v7.09 2020-08-31 15:40:25 +03:00
Ivan Kravets
7a49a74135 Bump version to 5.0.0b3 2020-08-28 21:55:55 +03:00
Ivan Kravets
be487019f5 Fix a broken handling multi-configuration project // Resolve #3615 2020-08-28 21:54:47 +03:00
Ivan Kravets
5dee0a31e6 Do not test for package owner if resource is external 2020-08-28 21:40:17 +03:00
Ivan Kravets
9f2c134e44 Do not detach a new package even if it comes from external source 2020-08-28 21:24:48 +03:00
Ivan Kravets
cdbb837948 Minor fixes 2020-08-28 18:45:52 +03:00
Ivan Kravets
80c1774a19 Docs: PlatformIO Core 5.0: new commands, migration guide, other improvements 2020-08-28 14:08:26 +03:00
valeros
1aaa9b6707 Update changelog with static analysis section 2020-08-26 17:44:01 +03:00
Ivan Kravets
4a7f578649 Sync docs and history 2020-08-26 15:40:24 +03:00
Ivan Kravets
d59416431d Parse npm-like "repository" data from a package manifest // Resolve #3637 2020-08-26 15:40:03 +03:00
Ivan Kravets
8625fdc571 Minor imperovements 2020-08-26 14:51:53 +03:00
Ivan Kravets
3c91e3c1e1 Move build dir to the disk root (should fix issue with long path for Zephyr RTOS on WIndows) 2020-08-26 14:51:01 +03:00
Ivan Kravets
1560fb724c Bump version to 5.0.0b2 2020-08-26 06:40:46 +03:00
Ivan Kravets
0db39ccfbd Automatically accept PIO. Core 4.0 compatible dev-platforms 2020-08-26 06:40:22 +03:00
Ivan Kravets
5086b96ede Bump version to 5.0.0b1 2020-08-25 22:22:35 +03:00
Ivan Kravets
210cd76042 Rename "idedata" sub-command to "data" 2020-08-25 22:01:22 +03:00
Ivan Kravets
f77978a295 Apply formatting 2020-08-25 22:01:08 +03:00
Valerii Koval
3e72f098fe Updates for PIO Check (#3640)
* Update check tools to the latest versions

* Use language standard when exporting defines to check tools

* Buffer Cppcheck output to detect multiline messages

* Add new test for PIO Check

* Pass include paths to Clang-Tidy as individual compiler arguments

Clang-tidy doesn't support response files which can exceed command
length limitations on Windows

* Simplify tests for PIO Check

* Update history

* Sync changelog
2020-08-25 21:19:21 +03:00
Ivan Kravets
b9fe493336 Sync docs 2020-08-25 19:18:26 +03:00
Ivan Kravets
79bfac29ba Update history and sync docs 2020-08-25 18:57:20 +03:00
Ivan Kravets
2ea80d91f8 Minor fixes 2020-08-25 15:55:17 +03:00
Ivan Kravets
fa90251714 Fixed an issue when Unit Testing engine fails with a custom project configuration file // Resolve #3583 2020-08-25 14:35:01 +03:00
Ivan Kravets
ff19109787 Fix test 2020-08-25 14:34:03 +03:00
Ivan Kravets
091ba4346d Bump version to 4.4.0b6 2020-08-24 23:11:43 +03:00
Ivan Kravets
e43176e33a Typo fix 2020-08-24 23:11:24 +03:00
Ivan Kravets
655e2856d1 Bump version to 4.4.0b5 2020-08-24 23:05:01 +03:00
Ivan Kravets
c6a37ef880 Get real path of just installed core-package 2020-08-24 23:04:17 +03:00
Ivan Kravets
6af2bad123 Make PIO Core 4.0 automatically compatible with dev-platforms for PIO Core 2.0 & 3.0 // Resolve #3638 2020-08-24 22:56:31 +03:00
Ivan Kravets
3e7e9e2b3d Remove unused data using a new `pio system prune // Resolve #3522 2020-08-24 15:22:05 +03:00
Ivan Kravets
13db51a556 Install/Uninstall dependencies only for library-type packages // Resolve #3637 2020-08-24 15:10:38 +03:00
Ivan Kravets
d6d95e05e8 Rename "fs.format_filesize" to "fs.humanize_file_size" 2020-08-24 15:09:37 +03:00
Ivan Kravets
b44bc80bd1 PyLint fix for PY2 2020-08-23 21:41:53 +03:00
Ivan Kravets
f39c9fb597 Bump version to 4.4.0b4 2020-08-23 21:07:40 +03:00
Ivan Kravets
24f85a337f Fix "AttributeError: module 'platformio.exception' has no attribute 'InternetIsOffline'" 2020-08-23 21:07:14 +03:00
Ivan Kravets
a069bae1fb Fix a bug with package updating when version is not in SemVer format // Resolve #3635 2020-08-23 15:26:58 +03:00
Ivan Kravets
1c8aca2f6a Check ALL possible version for the first matched package 2020-08-23 15:25:03 +03:00
Ivan Kravets
620241e067 Move package "version" related things to "platformio.package.version" module 2020-08-23 15:24:31 +03:00
Ivan Kravets
da179cb33f Enhance configuration variables 2020-08-23 14:29:31 +03:00
Ivan Kravets
8ea10a18d3 Bump version to 4.4.0b3 2020-08-23 13:22:38 +03:00
Ivan Kravets
e2bb81bae4 Restore legacy util.cd API 2020-08-23 13:22:11 +03:00
Ivan Kravets
dcf91c49ac Remove debug code 2020-08-22 22:56:26 +03:00
Ivan Kravets
c2caf8b839 Bump version to 4.4.0b2 2020-08-22 22:53:41 +03:00
Ivan Kravets
95151062f5 Implement mirroring for HTTP client 2020-08-22 22:52:29 +03:00
Ivan Kravets
7e4bfb1959 Move CacheContent API to "cache.py" module 2020-08-22 20:05:14 +03:00
Ivan Kravets
abae9c7e77 Cache base registry requests 2020-08-22 17:52:12 +03:00
Ivan Kravets
102aa5f22b Port legacy API requests to the new registry client 2020-08-22 17:49:29 +03:00
Ivan Kravets
d92c1d3442 Refactor HTTP related operations 2020-08-22 17:48:49 +03:00
Ivan Kravets
aa186382a8 Upgraded to SCons 4.0 2020-08-22 14:22:37 +03:00
Ivan Kravets
70366d34b9 Sync docs 2020-08-22 13:57:18 +03:00
Ivan Kravets
49b70f44ca Ignore legacy tmp pkg folders 2020-08-22 13:56:57 +03:00
Ivan Kravets
f79fb4190e Sync docs 2020-08-21 14:25:59 +03:00
Ivan Kravets
d980194600 Bump version to 4.4.0b1 2020-08-17 15:34:02 +03:00
Ivan Kravets
fb6e1fd33c PyLint fixes 2020-08-17 15:33:08 +03:00
Ivan Kravets
6f7fc638c7 Fix PyLint errors in tests 2020-08-17 12:56:57 +03:00
Ivan Kravets
2459e85c1d Fix a bug with the custom platform packages // Resolve #3628 2020-08-17 12:13:25 +03:00
Ivan Kravets
74e27a2edc Enable "cyclic reference" for GCC linker only for the embedded dev-platforms // Resolve #3570 2020-08-16 20:26:59 +03:00
Ivan Kravets
808852f4cc Set default timeout for http requests // Resolve #3623 2020-08-16 20:21:30 +03:00
Ivan Kravets
67e6d177b4 Minor fixes for dev-platform factory 2020-08-16 18:48:05 +03:00
Ivan Kravets
04694b4126 Switch legacy platform manager to the new 2020-08-15 23:11:01 +03:00
Ivan Kravets
bb6fb3fdf8 Fix bug with parsing detached packages 2020-08-15 15:24:35 +03:00
Ivan Kravets
4ec64f8980 Fix a test for examples 2020-08-14 17:00:18 +03:00
Ivan Kravets
332874cd4b Fix relative import of platform module on Py27 2020-08-14 16:48:12 +03:00
Ivan Kravets
276ca61cde Refactor dev-platform API 2020-08-14 16:39:15 +03:00
Ivan Kravets
5f3ad70190 Rename meta.PackageSourceItem or PackageItem 2020-08-14 16:38:46 +03:00
Ivan Kravets
ff8ec43a28 Ensure tool-type package is compatible with a host system 2020-08-13 21:46:46 +03:00
Ivan Kravets
ecc369c2f8 Minor fixes 2020-08-13 20:19:27 +03:00
Ivan Kravets
26fdd0a62c Bump version to 4.4.0a8 2020-08-13 18:30:33 +03:00
Ivan Kravets
64ff6a0ff5 Switch legacy core package manager to the new 2020-08-13 18:30:04 +03:00
Ivan Kravets
fd7dba1d74 Package Manifest: increase package author.name field to the 100 chars 2020-08-13 17:50:44 +03:00
Ivan Kravets
38ec517200 Update history 2020-08-12 21:09:42 +03:00
Ivan Kravets
20a74d1654 Merge branch 'feature/pkg-next' into develop 2020-08-12 20:09:18 +03:00
Ivan Kravets
d5451756fd Minor improvements 2020-08-12 20:09:10 +03:00
Ivan Kravets
893ca1b328 Switch library manager to the new package manager 2020-08-12 13:27:05 +03:00
Ivan Kravets
2dd69e21c0 Implement package removing with dependencies 2020-08-01 20:17:07 +03:00
Ivan Kravets
a01b3a2473 Do not raise exception when package is not found (404), return None 2020-08-01 19:58:59 +03:00
Ivan Kravets
6ac538fba4 Remove unused import 2020-08-01 15:49:10 +03:00
Ivan Kravets
41c2d64ef0 Fix "PermissionError: [WinError 32] The process cannot access the file" on Windows 2020-08-01 15:36:28 +03:00
Ivan Kravets
a1970bbfe3 Allow a forced package installation with removing existing package 2020-08-01 14:38:28 +03:00
Ivan Kravets
d329aef876 Initial version of a new package manager 2020-07-31 15:42:26 +03:00
Valerii Koval
abc0489ac6 Update changelog 2020-07-28 15:59:02 +03:00
Valerii Koval
2bc47f4e97 PyLint fix 2020-07-28 15:55:25 +03:00
Valerii Koval
933a09f981 Update unit testing support for mbed framework
- Take into account Mbed OS6 API changes
- RawSerial is used with Mbed OS 5 since Serial doesn't support putc with baremetal profile
2020-07-28 15:22:36 +03:00
Valerii Koval
adc2d5fe7c Update VSCode template
Starting with cpptools v0.29 escaped paths in compilerArgs field don't work on Windows.
2020-07-28 15:10:52 +03:00
Ivan Kravets
def149a29e Use updated registry API 2020-07-25 17:13:05 +03:00
Ivan Kravets
39cb23813f Allow ignoring "platforms" and "frameworks" fields in "library.json" and treat a library as compatible with all 2020-07-25 11:51:47 +03:00
Ivan Kravets
85f5a6a84a Bump version to 4.4.0a7 2020-07-24 21:00:58 +03:00
Ivan Kravets
6ace5668b8 Update the registry publish endpoints 2020-07-24 20:57:18 +03:00
Ivan Kravets
c193a4ceb7 Handle proxy environment variables in lower case // Resolve #3606 2020-07-23 19:07:29 +03:00
Ivan Kravets
1abc110f8a Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2020-07-23 17:57:00 +03:00
Ivan Kravets
73740aea89 Sync docs and examples 2020-07-23 17:56:41 +03:00
Valerii Koval
83110975fa Docs: Sync 2020-07-23 17:42:46 +03:00
Ivan Kravets
881c5ea308 Remove unused code 2020-07-23 17:37:23 +03:00
Ivan Kravets
22f1b94062 Bump version to 4.4.0a6 2020-07-21 12:42:26 +03:00
Ivan Kravets
ea30d94324 Automatically enable LDF dependency chain+ mode (evaluates C/C++ Preprocessor conditional syntax) for Arduino library when “library.properties” has “depends” field // Resolve #3607 2020-07-21 12:41:38 +03:00
Ivan Kravets
1ed462a29a PyLint fix 2020-07-16 01:00:38 +03:00
Ivan Kravets
a2efd7f7c5 Bump version to 4.4.0a5 2020-07-15 23:18:07 +03:00
Ivan Kravets
ca33058637 New commands for the registry package management (pack, publish, unpublish) 2020-07-15 23:16:46 +03:00
Ivan Kravets
a6f143d1ca Dump data intended for IDE extensions/plugins using a new platformio project idedata command 2020-07-15 14:20:29 +03:00
Ivan Kravets
1368fa4c3b Implement new fields (id, ownername, url, requirements) for PackageSpec API 2020-07-14 21:07:09 +03:00
Ivan Kravets
cca3099d13 Ensure that module.json keywords are lowercased 2020-07-14 18:55:29 +03:00
Ivan Kravets
368c66727b Fix issue with package packing when re-map is used and manifest is missed in "include" (copy it now) 2020-07-12 22:39:32 +03:00
Ivan Kravets
a688edbdf1 Fix an issue with manifest parser when "new_from_archive" API is used 2020-07-09 21:53:46 +03:00
Ivan Kravets
e570aadd72 Docs: Sync 2020-07-09 17:17:34 +03:00
Ivan Kravets
f85cf61d68 Revert back max length of author name to 50 chars 2020-07-08 23:23:14 +03:00
Ivan Kravets
f27c71a0d4 Increase author name length to 100 chars for manifest 2020-07-08 22:56:14 +03:00
Ivan Kravets
940682255d Lock Python's isort package to isort<5 2020-07-08 22:16:52 +03:00
Ivan Kravets
a00722bef4 Ignore maintainer's broken email in library.properties manifest 2020-07-08 21:53:28 +03:00
Ivan Kravets
84132d9459 Fix tests 2020-07-08 21:52:34 +03:00
Ivan Kravets
42fd284560 Improve parsing "author" field of library.properties manfiest 2020-07-08 20:21:10 +03:00
Ivan Kravets
40d6847c96 Add option to pass a custom path where to save package archive 2020-07-08 13:46:36 +03:00
Ivan Kravets
abd3f8b3b5 Docs: Remove legacy library dependency syntax for github 2020-07-07 22:53:01 +03:00
Ivan Kravets
3c986ed681 Remove recursively .pio folders when packing a package 2020-07-07 16:28:51 +03:00
Ivan Kravets
8b24b0f657 Sync docs & examples 2020-07-06 23:37:28 +03:00
Ivan Kravets
0f8042eeb4 Implement PackagePacker.get_archive_name API 2020-07-06 15:57:49 +03:00
Ivan Kravets
f97632202b Fix issue with KeyError 2020-07-06 15:57:10 +03:00
Ivan Kravets
a79e933c37 Ignore author's broken email in a package manifest 2020-07-06 14:22:35 +03:00
Ivan Kravets
ef53bcf601 Ignore empty fields in library.properties manifest 2020-07-06 14:17:00 +03:00
Ivan Kravets
08a87f3a21 Do not allow [;.<>] chars for a package name 2020-07-03 19:14:58 +03:00
Ivan Kravets
b3dabb221d Allow "+" in a package name 2020-07-03 16:07:36 +03:00
Rosen Stoyanov
899a6734ee Add .ccls to .gitignore (vim and emacs) (#3576)
* Add .ccls to .gitignore (vim)

* Add .ccls to .gitignore (emacs)
2020-06-30 21:48:44 +03:00
Ivan Kravets
7f48c8c14e Fix PyLint for PY 2.7 2020-06-30 15:06:40 +03:00
Ivan Kravets
2c24e9eff6 Fall back to latin-1 encoding when failed with UTF-8 while parsing manifest 2020-06-30 14:28:37 +03:00
Ivan Kravets
5cdca9d490 Optimize tests 2020-06-29 21:14:34 +03:00
Valerii Koval
1ac6c50334 Update multi-environment test for PIO test command 2020-06-29 20:52:15 +03:00
Valerii Koval
4cbad399f7 Remove mbed framework from several tests 2020-06-29 19:22:22 +03:00
Ivan Kravets
2b8aebbdf9 Extend test for parsing package manifest when "system" is used as a list 2020-06-29 15:06:21 +03:00
Ivan Kravets
e9a15b4e9b Parse package.json manifest keywords 2020-06-27 21:42:13 +03:00
Ivan Kravets
dd18abcac3 Fix tests 2020-06-27 12:59:12 +03:00
Ivan Kravets
b046f21e0d Fix "RuntimeError: dictionary keys changed during iteration" when parsing "library.json" dependencies 2020-06-27 12:46:04 +03:00
Ivan Kravets
29fb803be1 Enable PIO Core tests on Python 3.8 2020-06-27 12:36:57 +03:00
Ivan Kravets
bc2eb0d79f Parse dev-platform keywords 2020-06-26 19:49:25 +03:00
Ivan Kravets
0bec1f1585 Extend system info with "file system" and "locale" encodings 2020-06-26 18:38:17 +03:00
Ivan Kravets
a1ec3e0a22 Remove "vendor_url" and "docs_url" from Platform API 2020-06-25 23:23:55 +03:00
Ivan Kravets
7bc22353cc Docs: Sync dev-platforms 2020-06-25 18:04:04 +03:00
Ivan Kravets
efc2242046 Remove empty data from board information 2020-06-25 14:51:53 +03:00
Ivan Kravets
5dadb8749e Change slogan to "PlatformIO is a professional collaborative platform for embedded development" 2020-06-23 12:33:00 +03:00
Ivan Kravets
82735dd571 Fixed an issue with improper processing of source files added via multiple Build Middlewares // Resolve #3531 2020-06-23 11:46:00 +03:00
Ivan Kravets
9fb4cde2a5 Do not generate ".travis.yml" for a new project, let the user have a choice 2020-06-23 11:26:22 +03:00
Ivan Kravets
164ae2bcbc Extend system info with Python and PIO Core executables // Issue #3521 2020-06-23 11:20:29 +03:00
Ivan Kravets
a172a17c81 Bump version to 4.4.0a4 2020-06-22 23:09:28 +03:00
Ivan Kravets
5ee90f4e61 Display system-wide information using platformio system info command // Resolve #3521 2020-06-22 23:04:36 +03:00
Ivan Kravets
3aae791bee Change slogan to "collaborative platform" 2020-06-22 20:02:43 +03:00
Ivan Kravets
9f05519ccd List available project targets with a new "platformio run –list-targets" command // Resolve #3544 2020-06-22 19:53:31 +03:00
Ivan Kravets
f19491f909 Docs: Sync articles 2020-06-22 17:55:02 +03:00
Ivan Kravets
967a856061 Do not allow ":" and "/" chars in a package name 2020-06-22 15:25:02 +03:00
Valerii Koval
87d5997b46 Add a test that ensures setUp and tearDown functions can be compiled 2020-06-22 14:42:45 +03:00
Valerii Koval
c20a1f24cd Don't print relative paths with double-dot 2020-06-18 20:36:59 +03:00
Shahrustam
260c36727c fix pio access urn format 2020-06-17 23:56:22 +03:00
ShahRustam
03d9965758 Replace urn with prn (#3565)
* Replace urn with prn

* fix

* fix text
2020-06-17 23:46:50 +03:00
ShahRustam
e853d61e16 Add orgname filter for access list (#3564)
* add orgname filter for access list

* fix

* fix namings
2020-06-17 18:55:40 +03:00
ShahRustam
42e8ea29ff CLI to manage access level on PlatformIO resources. Resolve #3534 (#3563) 2020-06-17 13:53:53 +03:00
ShahRustam
1e90c821dc Disable package upload test (#3562) 2020-06-17 00:24:55 +03:00
Ivan Kravets
cad0ae0113 Update slogan to "No more vendor lock-in!" 2020-06-16 15:06:04 +03:00
Valerii Koval
21f3dd11f4 Fix printing relative paths on Windows // Resolve #3542
Fixes "ValueError" when running "clean" target if "build_dir"
points to a folder on a different logical drive
2020-06-16 12:27:49 +03:00
Ivan Kravets
a9c13aa20e Implement "ManifestParserFactory.new_from_archive" API 2020-06-15 22:05:59 +03:00
Ivan Kravets
d3fd115743 Black format 2020-06-15 22:05:28 +03:00
Valerii Koval
df0e6016bb Handle possible NodeList in source files when processing Middlewares // Resolve #3531
env.Object() returns a list of objects that breaks the processing of
subsequent middlewares since we only expected File nodes.
2020-06-15 21:25:24 +03:00
Ivan Kravets
cb70e51016 Update changelog for Custom Targets 2020-06-13 16:21:15 +03:00
Ivan Kravets
cf2fa37e56 Bump version to 4.4.0a3 2020-06-13 13:18:54 +03:00
Ivan Kravets
28d9f25f9a Added a new "-e, --environment" option to "platformio project init" command 2020-06-12 23:47:12 +03:00
Valerii Koval
fdb83c24be Clean autogenerated files before running tests // Resolve #3523
Fixes possible conflicts between auxiliary test transport files when
project contains multiple environments with different platforms
2020-06-11 23:53:52 +03:00
Ivan Kravets
660b57cdd3 Update PIO Home front-end to 3.2.3 2020-06-11 21:16:06 +03:00
ShahRustam
405dcda824 Feature/update account tests (#3556)
* update account tests

* change second user

* refactoring

* clean

* fix tests email receiving

* fix
2020-06-11 16:02:38 +03:00
Valerii Koval
266612bbdf Run CI on pull requests 2020-06-11 15:27:51 +03:00
Ivan Kravets
2722e27415 Sync docs 2020-06-11 15:15:46 +03:00
Valerii Koval
f571ad9d47 Sync docs 2020-06-11 11:03:48 +03:00
Ivan Kravets
ef8a9835b0 Bump version to 4.4.0a2 2020-06-10 14:26:48 +03:00
Ivan Kravets
b71b939307 Rename "AddSystemTarget" to "AddPlatformTarget" 2020-06-10 14:25:53 +03:00
Shahrustam
9e3ba11e8a skip account tests 2020-06-10 12:36:07 +03:00
Shahrustam
91e9406304 cleaning 2020-06-10 12:22:28 +03:00
Shahrustam
0d8272890c merge account, org and team tests into one file 2020-06-10 12:02:34 +03:00
ShahRustam
a182cca5e9 tests fix (#3555)
* replace timestamp with randint in tests

* replace pop3 with imap
2020-06-10 11:07:19 +03:00
Valerii Koval
e6fbd6acf1 Remove debug code 2020-06-09 23:26:49 +03:00
Ivan Kravets
062a82c89e Sync docs 2020-06-09 20:59:23 +03:00
Ivan Kravets
89cc6f9bf3 Bump version to 4.4.0a1 2020-06-09 18:44:49 +03:00
Ivan Kravets
3c8e0b17a7 Added support for custom targets 2020-06-09 18:43:50 +03:00
Shahrustam
e0023bb908 increase tests email receiving time 2020-06-09 17:05:11 +03:00
ShahRustam
a5547491ed Add account and org destroy commands. Fix tests (#3552)
* Add account and org destroy commands. Fix tests

* fix tests

* fix

* fix texts
2020-06-09 15:50:37 +03:00
Ivan Kravets
78546e9246 Docs: Add "TensorFlow, Meet The ESP32" to articles list 2020-06-08 19:26:48 +03:00
Ivan Kravets
7457ef043b Docs: Sync ASR Micro dev-platform 2020-06-08 12:00:19 +03:00
Ivan Kravets
e0e97a3629 Cache the latest news in PIO Home for 180 days 2020-06-05 18:29:11 +03:00
Ivan Kravets
f5e6820903 Bump version to 4.3.5a2 2020-06-05 14:18:24 +03:00
Ivan Kravets
27fd3b0b14 Improve detecting if PlatformIO Core is run in container 2020-06-05 14:17:19 +03:00
Valerii Koval
ced244d30a Sync docs 2020-06-05 11:30:15 +03:00
Ivan Kravets
6fa7cb4af5 Add new dev-platform "ASR Microelectronics ASR605x" 2020-06-04 22:59:05 +03:00
ShahRustam
94cb808285 CLI to manage teams. Resolve #3533 (#3547)
* CLI to manage teams.Minor fixes. Resolve #3533

* fix teams tests

* disable org and team tests

* minor fixes. fix error texts

* fix split compatibility
2020-06-04 19:31:30 +03:00
Ivan Kravets
42df3c9c3f Sync docs 2020-06-04 15:27:46 +03:00
Ivan Kravets
0c4c113b0a Fix account shpw command when PLATFORMIO_AUTH_TOKEN is used 2020-06-04 14:09:42 +03:00
Ivan Kravets
3c1b08daab Ignore empty PLATFORMIO_AUTH_TOKEN 2020-06-04 13:57:56 +03:00
Ivan Kravets
d7f4eb5955 Minor grammar fix 2020-06-03 22:40:37 +03:00
Ivan Kravets
87b5fbd237 More cosmetic changes to Org CLI 2020-06-03 22:34:37 +03:00
Ivan Kravets
6c97cc6192 Cosmetic changes to Org CLI 2020-06-03 22:22:13 +03:00
Ivan Kravets
cbcd3f7c4d Fix cmd.org test 2020-06-03 21:40:03 +03:00
Ivan Kravets
f7dceb782c Fix PY2.7 when PermissionError is not avialable 2020-06-03 21:24:01 +03:00
ShahRustam
140fff9c23 CLI to manage organizations. Resolve #3532 (#3540)
* CLI to manage organizations. Resolve #3532

* fix tests

* fix test

* add org owner test

* fix org test

* fix invalid username/orgname error text

* refactor auth request in clients

* fix

* fix send auth request

* fix regexp

* remove duplicated code. minor fixes.

* Remove space

Co-authored-by: Ivan Kravets <me@ikravets.com>
2020-06-03 17:41:30 +03:00
Ivan Kravets
8c586dc360 Sync docs 2020-06-03 17:16:59 +03:00
Ivan Kravets
fe52f60389 Bypass PermissionError when cleaning the cache 2020-06-03 14:33:53 +03:00
Ivan Kravets
9064fcbc77 Sync docs 2020-06-03 14:33:03 +03:00
Ivan Kravets
9a1d2970cc Sync docs 2020-05-30 01:10:04 +03:00
Ivan Kravets
26ba6e4756 Add new option to package publishing CLI which allows to disable email notiication 2020-05-28 17:06:36 +03:00
Ivan Kravets
ae58cc74bd Rename checksum header to X-PIO-Content-SHA256 2020-05-28 16:08:37 +03:00
Ivan Kravets
37e795d539 Send package checksum when publishing 2020-05-28 16:07:20 +03:00
Ivan Kravets
49960b257d Implement fs.calculate_file_hashsum 2020-05-28 16:07:02 +03:00
Shahrustam
25a421402b fix package type detector 2020-05-28 12:49:32 +03:00
Shahrustam
8e72c48319 fix datetime validation in package publish command 2020-05-27 22:30:16 +03:00
Ivan Kravets
c1965b607b Add binary stream to package publishing request 2020-05-27 17:27:05 +03:00
Ivan Kravets
d38f5aca5c Fix metavar for package CLI 2020-05-27 16:20:02 +03:00
Ivan Kravets
c06859aa9f Add package type to unpublish command 2020-05-27 14:30:27 +03:00
ShahRustam
e706a2cfe2 Refactor pio account client. Resolve #3525 (#3529) 2020-05-27 13:39:58 +03:00
Ivan Kravets
0c301b2f5d Fix order of arguments 2020-05-27 01:14:07 +03:00
Ivan Kravets
deb12972fb Implement "package unpublish" CLI 2020-05-27 01:10:35 +03:00
Ivan Kravets
8346b9822d Implement "package pack" command 2020-05-26 22:17:55 +03:00
Ivan Kravets
19cdc7d34a Initial support for package publishing in to the registry 2020-05-26 22:01:32 +03:00
Ivan Kravets
49cc5d606b Sync docs 2020-05-26 21:58:58 +03:00
Ivan Kravets
58470e8911 PY2 lint fix 2020-05-26 14:30:43 +03:00
Ivan Kravets
38699cca8f Bump version to 4.3.5a1 2020-05-26 14:26:42 +03:00
Ivan Kravets
0eb8895959 Add support for “globstar/**” (recursive) pattern 2020-05-26 14:25:28 +03:00
Ivan Kravets
99d4e0c390 Merge branch 'release/v4.3.4' 2020-05-23 20:35:59 +03:00
Ivan Kravets
6d32aeb310 Merge tag 'v4.3.4' into develop
Bump version to 4.3.4	81843087	Ivan Kravets <me@ikravets.com>	23 May 2020, 20:33
Bump version to 4.3.4
2020-05-23 20:35:59 +03:00
Ivan Kravets
8184308755 Bump version to 4.3.4 2020-05-23 20:33:13 +03:00
Ivan Kravets
b68953b733 Bump version to 4.3.4b1 2020-05-23 20:01:25 +03:00
Ivan Kravets
7dce494ad6 Rename "misc" command to "system", do not append completion code for Fish shell // Resolve 3435 2020-05-23 20:00:56 +03:00
Valerii Koval
4921bf8b6a PyLint fix 2020-05-22 14:22:41 +03:00
Valerii Koval
32cb0d6e4d Handle possible issue on Python 2.x when writing to thread buffer
The problem happens when value has type "unicode" that shouldn't be decoded
2020-05-22 14:17:17 +03:00
Ivan Kravets
e2c5a3c498 Add Python 3.8 for Tox 2020-05-22 14:12:27 +03:00
Ivan Kravets
ec34a65cff Bump version to 4.3.4a5 2020-05-21 15:40:38 +03:00
Ivan Kravets
9296615dbf Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2020-05-21 15:39:59 +03:00
Ivan Kravets
56795940b9 Sync teensy dev-platform 2020-05-21 15:39:24 +03:00
Valerii Koval
09a5952248 Add new record to history log
Mention issues about permission error on Windows when cloning
package from Git repository
2020-05-20 21:51:13 +03:00
Valerii Koval
735435306d Copy and remove cloned package instead of moving // Resolve #2844, Resolve #3328
On Windows, it’s not possible to move a file which is used by another
process (e.g. Git extension in VSCode)
2020-05-20 21:32:55 +03:00
Ivan Kravets
bdd57bf356 Ensure that copytree preserves symlinks 2020-05-20 20:57:55 +03:00
Valerii Koval
8840b28968 Handle possible issue on Python 2.x when writing to thread buffer
The problem happens when value has type "unicode" that shouldn't be decoded
2020-05-20 17:04:50 +03:00
Valerii Koval
e31591a35e Print warning about an issue with mapped network drives on Windows // Issue #3417
Starting with Python 3.8 paths to mapped network drives are resolved
to their real path in the system, e.g.: "Z:\path" becomes "\\path" which
causes weird errors in the default terminal with a message that UNC
paths are not supported
2020-05-19 22:37:05 +03:00
Ivan Kravets
457a218723 Sync docs 2020-05-19 13:27:54 +03:00
Ivan Kravets
9724660dda Update SPDX licenses to 3.9 2020-05-19 13:27:44 +03:00
ShahRustam
eac6c1c552 Handle error when internet is offline. Resolve # 3503 (#3505)
* Handle error when internet is offline.

* Fix

* minor fix
2020-05-17 22:44:27 +03:00
Ivan Kravets
54d73e834b Github Actions: Checkout submodules recursive 2020-05-14 18:08:51 +03:00
Ivan Kravets
099e3c7198 Use original MongoDB license for "compilation_db.py" 2020-05-13 00:48:11 +03:00
Ivan Kravets
96a68c6b14 Docs: Sync Atmel AVR dev-platform 2020-05-11 22:10:32 +03:00
Valerii Koval
2a0a1247e3 Revert "Add initialization config for new simavr tool"
This reverts commit 16966a4957.
2020-05-11 18:21:48 +03:00
Valerii Koval
7555d66748 Revert "Add special debug port for simavr tool"
This reverts commit 7b43444d81.
2020-05-11 18:21:26 +03:00
Ivan Kravets
c76940f7ce PyLint fix 2020-05-10 19:11:06 +03:00
Ivan Kravets
b2ed027bc3 Bump version to 4.3.4a4 2020-05-10 18:36:16 +03:00
Ivan Kravets
01a1981ca1 Added PlatformIO CLI Shell Completion for Fish, Zsh, Bash, and PowerShell // Resolve #3435 2020-05-10 18:35:50 +03:00
Ivan Kravets
03228c528e Bump version to 4.3.4a3 2020-05-09 16:36:05 +03:00
ShahRustam
ac510c1553 fix summary caching (#3500) 2020-05-09 16:35:21 +03:00
ShahRustam
a1ff5e1a4f Save summary data to local session. (#3497)
* Save summary data to local session.

* naming

* fix account summary test

* add ttl for summary cache

* refactoring get_account_info

* fix
2020-05-08 21:34:52 +03:00
Valerii Koval
7b43444d81 Add special debug port for simavr tool 2020-05-08 12:31:48 +03:00
Valerii Koval
16966a4957 Add initialization config for new simavr tool 2020-05-08 01:14:15 +03:00
ShahRustam
7e7a6d7807 Skip account tests if env variables not presented (#3494)
* added skip if env variables not presented. fix exception texts

* fix texts

* fix texts
2020-05-06 19:25:24 +03:00
Ivan Kravets
f32dbeeb6d Bump version to 4.3.4a2 2020-05-06 12:30:41 +03:00
Ivan Kravets
f78ffaded0 Remove local PIO Account session from PIO Remote when token is broken 2020-05-06 12:29:01 +03:00
Ivan Kravets
8480ebde89 Rename "authenticating" to "authorizing" wording for PIO Account 2020-05-06 12:27:29 +03:00
Ivan Kravets
44ee7d6a6b Docs: Sync ESP8266 dev-platform 2020-05-04 15:50:11 +03:00
ShahRustam
2ab47b7968 Remove account state item if refreshing token failed (#3487) 2020-05-04 13:14:52 +03:00
Ivan Kravets
7181b7632b Docs: Increase content width 2020-05-03 11:06:26 +03:00
Ivan Kravets
b82eaca45e Enable caching for PIP when building contrib-pysite 2020-05-02 15:29:24 +03:00
Ivan Kravets
fd04f31c5f Update link to CLA provider 2020-05-01 23:37:38 +03:00
Ivan Kravets
75abe8a0af Sync docs 2020-05-01 23:36:29 +03:00
Ivan Kravets
f7995ce49a PyLint fix 2020-04-29 12:56:54 +03:00
Ivan Kravets
2c2309acac Bump version to 4.3.4a1 2020-04-29 12:41:56 +03:00
Ivan Kravets
5f79ab34f5 Automatically build `contrib-pysite` package on a target machine when pre-built package is not compatible // Resolve #3482 2020-04-29 12:40:04 +03:00
Ivan Kravets
5bcbee7423 Merge branch 'release/v4.3.3' 2020-04-28 18:06:53 +03:00
Ivan Kravets
961049cf9b Merge tag 'v4.3.3' into develop
Bump version to 4.3.3
2020-04-28 18:06:53 +03:00
Ivan Kravets
72e7492a78 Bump version to 4.3.3 2020-04-28 18:06:46 +03:00
Ivan Kravets
5e4b4bbacd Fix "UnicodeDecodeError: 'utf-8' codec can't decode byte" when non-Latin chars are used in project path // Resolve #3481 2020-04-28 18:05:08 +03:00
Ivan Kravets
a64d368de2 Merge branch 'release/v4.3.2' 2020-04-28 13:27:23 +03:00
Ivan Kravets
6146b58520 Merge tag 'v4.3.2' into develop
Bump version to 4.3.2
2020-04-28 13:27:23 +03:00
Ivan Kravets
f35e6e99af Bump version to 4.3.2 2020-04-28 13:25:52 +03:00
Ivan Kravets
5d8440fdd1 PyLint fixes 2020-04-28 12:48:15 +03:00
Ivan Kravets
d1b394b20a Bump version to 4.3.2rc2 2020-04-27 23:42:22 +03:00
Ivan Kravets
520d6decac Nominate some exceptions to UserSideException 2020-04-27 23:42:02 +03:00
Ivan Kravets
4a251f0ab0 Fix JSONDecodeError when bottle.SimpleTemplate is used 2020-04-27 23:41:36 +03:00
Ivan Kravets
c215abb50c Bump version to 4.3.2rc1 2020-04-26 19:46:33 +03:00
Ivan Kravets
31ca47837d Disable build cache for Github Actions 2020-04-26 12:58:32 +03:00
Ivan Kravets
560699fc6b Apply formatting 2020-04-26 12:58:05 +03:00
Valerii Koval
51ec94f78c Add new test for PIO Check with --skip-packages option 2020-04-26 01:38:25 +03:00
Valerii Koval
ac1210fbea Add -imacros files to forcedInclude field in VSCode template 2020-04-26 00:35:22 +03:00
Valerii Koval
c03f93521b Refactor PIO Check feature (#3478)
* Add new option --skip-packages for check command

Check tools might fail if they're not able to preprocess source
files, for example, Cppcheck uses a custom preprocessor that is
not able to parse complex preprocessor code in zephyr framework).
Instead user can specify this option to skip headers included from
packages and only check project sources.

* Fix toolchain built-in include paths order
C++ and fixed directories should have higher priority

* Refactor check feature

The main purpose is to prepare a more comprehensive build environment.
It's crucial for cppcheck to be able to check complex frameworks like
zephyr, esp-idf, etc. Also detect a special case when cppcheck fails to check
the entire project (e.g. a syntax error due to custom preprocessor)

* Add new test for check feature

Tests ststm32 platform all tools and the main frameworks

* Update check tools to the latest available versions

* Test check tools and Zephyr framework only with Python 3

* Tidy up code

* Add history entry
2020-04-26 00:10:41 +03:00
Ivan Kravets
62ede23b0e Bump version to 4.3.2b1 2020-04-25 15:48:55 +03:00
Ivan Kravets
60f28599d9 Echo what is typed when "send_on_enter" device monitor filter is used // Resolve #3452 2020-04-25 15:48:37 +03:00
Ivan Kravets
629f23c4f3 Bump version to 4.3.2a4 2020-04-25 13:20:56 +03:00
Ivan Kravets
27db344739 Bump version to 4.3.2a3 2020-04-25 13:15:55 +03:00
Ivan Kravets
777a47fd99 Minor improvements 2020-04-25 13:14:54 +03:00
Ivan Kravets
e913159cb4 Sync docs 2020-04-24 21:48:57 +03:00
Ivan Kravets
b285c3137a Extend remote hosts with PlatformIO when checking internet connection 2020-04-24 16:25:02 +03:00
Ivan Kravets
f0576ddcd9 Docs: Fix incorrect type for library.json "libCompatMode" field 2020-04-24 13:14:03 +03:00
Shahrustam
6e2cc333f2 disable pio account change password and username update tests 2020-04-24 11:57:17 +03:00
ShahRustam
18c7c5a9be Refactor pio account tests. (#3473) 2020-04-24 11:25:09 +03:00
Ivan Kravets
01945716d3 Sync docs 2020-04-24 00:43:32 +03:00
ShahRustam
2f5b231dc3 Disable pio account tests (#3472)
* minor fix pio account test

* disable pio account change password and username update tests
2020-04-24 00:13:46 +03:00
Shahrustam
75c1aafaef fix pio account tests 2020-04-23 20:45:51 +03:00
ShahRustam
b9714d0ac1 Add pio account tests (#3470)
* add pio account tests

* update tests
2020-04-23 16:05:00 +03:00
Ivan Kravets
5774654582 Switch to Github Actions (#3471) 2020-04-23 16:04:15 +03:00
ShahRustam
0a46b8ab6a add login with code method for account client. add new account rpc handler. (#3468) 2020-04-21 22:44:32 +03:00
Valerii Koval
a556573a4f Move env dependent directories to appropriate CMAKE_BUILD_TYPE // Issue #3460
This will allow to dynamically populate list of sources depending on
selected environment. At the same time "src" and "lib" folders remain
common for all environments
2020-04-21 22:01:07 +03:00
Valerii Koval
fd91819b2c Fix missing include paths for check tools
Includes are now split by scopes and imported as a dictionary
2020-04-21 19:26:00 +03:00
Valerii Koval
24c04057e9 CLion: Add paths to libraries specified via lib_extra_dirs option (#3463)
* Add paths to libraries specified via lib_extra_dirs option

Besides, global folders in SRC_LIST seem a bit unnecessary
since there might be unused libraries in these folders

* Refactor processing of includes when exporting IDE/Editor projects

Split includes according to their source. That will help export includes in a more flexible way.
For example some IDEs don't need include paths from toolchains

* Add new record to history log

* Typo fix
2020-04-21 17:37:55 +03:00
Ivan Kravets
2960b73da5 Fix an issue when PIO Remote agent was not reconnected automatically 2020-04-21 12:32:03 +03:00
Ivan Kravets
c4645a9a96 Sync docs 2020-04-21 10:43:34 +03:00
Ivan Kravets
877e84ea1d Bump version to 4.3.2a2 2020-04-19 20:05:21 +03:00
Ivan Kravets
cb1058c693 New PIO Account with "username" and profile support 2020-04-19 20:03:46 +03:00
Ivan Kravets
be6bf5052e Open source PIO Remote client 2020-04-19 19:26:56 +03:00
ShahRustam
7780003d01 New Account Management System (#3443)
* add login for PIO account to account cli

* Remove PyJWT lib. Fixes.

* Add password change for account

* Refactoring. Add Account Client.

* Fixes.

* http -> https.

* adding error handling for expired session.

* Change broker requests from json to form-data.

* Add pio accoint register command. fixes

* Fixes.

* Fixes.

* Add username and password validation

* fixes

* Add token, forgot commands to pio account

* fix domain

* add update command for pio account

* fixes

* refactor profile update output

* lint

* Update exception text.

* Fix logout

* Add custom user-agent for pio account

* add profile show command. minor fixes.

* Fix pio account show output format.

* Move account related exceptions

* cleaning

* minor fix

* Remove try except for account command authenticated/non-authenticated errors

* fix profile update cli command

* rename first name and last name vars to 'firstname' and 'lastname'
2020-04-19 19:06:06 +03:00
Ivan Kravets
445ca937fd Sync docs 2020-04-18 22:40:27 +03:00
Ivan Kravets
1f4aff7f27 Sync docs 2020-04-16 16:07:02 +03:00
valeros
788351a0cd Fixed an incorrect node path used for pattern matching when processing middleware nodes 2020-04-13 16:41:03 +03:00
Ivan Kravets
5ba7753bfa Sync docs 2020-04-12 17:43:27 +03:00
Ivan Kravets
ae57829190 Generate user agent based on PIO Core environment 2020-04-10 17:59:58 +03:00
Ivan Kravets
030ddf4ea1 Apply black formatting 2020-04-10 17:08:16 +03:00
Ivan Kravets
ccc43633b7 Support for a new dev-platform NXP i.MX RT 2020-04-10 13:14:10 +03:00
Ivan Kravets
aba2ea9746 Temporary disable "infineonxmc" from CI due to a broken dev-platform 2020-04-09 12:44:52 +03:00
Valerii Koval
d5ebbb99a7 Dynamically choose extension for file with unit test transports (#3454)
C file should be used by default as only Arduino and mbed require C++ files.
There might be a lot of legacy projects so custom transport is also set to use C++.
2020-04-09 12:02:38 +03:00
Ivan Kravets
a636a60e00 Sort examples 2020-04-08 22:34:04 +03:00
Ivan Kravets
ad7e3f83aa Fix tests/commands/test_init.py 2020-04-08 17:18:59 +03:00
valeros
baa7aab1d7 Specify C++ as the language for .ino files when preprocessing them for PVS-Studio // Resolve #3450 2020-04-07 11:35:17 +03:00
Ivan Kravets
2e320c01b3 Fix test 2020-04-06 18:19:34 +03:00
Ivan Kravets
3cd6c618a4 Docs: Sync STM32 dev-platform 2020-04-06 16:56:05 +03:00
Ivan Kravets
5ea759bc3e Document PIO Core: Integration with custom applications (extensions, plugins) 2020-04-05 19:48:50 +03:00
Ivan Kravets
11cb3a1bf7 Docs: Add info about stm32pio tool for STM32Cube framework 2020-04-04 00:45:45 +03:00
Ivan Kravets
7412cf586b Update docs for Zephyr RTOS 2.2 2020-04-02 00:59:35 +03:00
Ivan Kravets
f976cf7ae5 Docs: Extend tutorials list 2020-03-30 17:15:18 +03:00
Ivan Kravets
e92b498b68 Fixed an issue when saving libraries in new project results in error "No option 'lib_deps' in section" // Resolve #3442 2020-03-27 13:34:14 +02:00
Ivan Kravets
1b0810ec87 Docs: Fix broken link for creating dev-platform // Resolve #123 2020-03-26 22:31:15 +02:00
Ivan Kravets
45e523a468 Docs: Sync with Atmel SAM dev-platform 2020-03-25 17:01:12 +02:00
Ivan Kravets
d42481d196 Sync docs 2020-03-24 18:03:23 +02:00
Ivan Kravets
11c946bfe4 Sync Espressif 32 dev-platform 2020-03-23 19:52:57 +02:00
Ivan Kravets
589d6f9e12 Docs: Sync Espressif 32 dev-platform 2020-03-23 19:35:18 +02:00
Ivan Kravets
79b3a232fc Move debug client and server implementations to "process" folder 2020-03-21 22:00:14 +02:00
Ivan Kravets
f95230b86e Fixed UnicodeDecodeError on Windows when network drive (NAS) is used // Resolve #3417 2020-03-21 21:53:42 +02:00
Ivan Kravets
fc9a16aa81 Merge branch 'feature/issue-3417-unicodeerror-nas' into develop 2020-03-21 21:44:13 +02:00
Ivan Kravets
81a4d28918 Docs: Remove duplicate demo image of PlatformIO for CLion 2020-03-21 16:39:17 +02:00
Ivan Kravets
fd137fe054 Bump version to 4.3.2a1 2020-03-21 13:28:31 +02:00
Ivan Kravets
efd3b244e1 Force PIPE reader to UTF-8 on Windows // Issue #3417 2020-03-21 13:27:46 +02:00
Richard Coleman
dbeaaf270c fix typo in URL (#3432) 2020-03-21 00:02:50 +02:00
Ivan Kravets
32642b7ec8 Fix broken link to Renode in history 2020-03-20 17:16:49 +02:00
Ivan Kravets
096c2f6165 Typo fix in docs 2020-03-20 17:11:31 +02:00
Ivan Kravets
91ae8b4cc7 Fixed typo in history 2020-03-20 15:15:30 +02:00
Ivan Kravets
cc52890d45 Merge branch 'release/v4.3.1' 2020-03-20 15:13:46 +02:00
Ivan Kravets
5a12f1f56e Merge tag 'v4.3.1' into develop
Bump version to 4.3.1
2020-03-20 15:13:46 +02:00
Ivan Kravets
b7b9ee5a80 Bump version to 4.3.1 2020-03-20 15:13:40 +02:00
Ivan Kravets
97a0cbdd18 Skip Click 7.1 and 7.1.1 on Windows due to broken releases 2020-03-20 15:11:14 +02:00
Ivan Kravets
b8f43732fe Docs: update What's PlatformIO and PIO IDE pages 2020-03-20 14:44:24 +02:00
Ivan Kravets
658b3df123 Fixed an TypeError "super(type, obj): obj must be an instance or subtype of type" when device monitor is used with a custom dev-platform filter // Resolve #3431 2020-03-20 13:56:30 +02:00
Ivan Kravets
d32312e738 Fixed an issue when lib_archive = no was not honored in "platformio.ini" 2020-03-20 13:34:35 +02:00
Ivan Kravets
20023f8d8a Bump version to 4.3.1a1 2020-03-20 13:02:11 +02:00
Ivan Kravets
6b2ff04bbf Fixed an error "SyntaxError: 'return' with argument inside generator" for PIO Unified Debugger when Python 2.7 is used 2020-03-20 13:01:33 +02:00
Ivan Kravets
d80a9c820d Merge branch 'release/v4.3.0' 2020-03-19 22:38:05 +02:00
Ivan Kravets
4b62af1675 Merge tag 'v4.3.0' into develop
Bump version to 4.3.0
2020-03-19 22:38:05 +02:00
Ivan Kravets
6414e1d9e3 Bump version to 4.3.0 2020-03-19 22:37:16 +02:00
Ivan Kravets
a55f04dc28 Warn that can't allocate socket for PIO Home 2020-03-19 22:36:55 +02:00
Ivan Kravets
2d68e28a70 Fix auto-ready logic for debugging server 2020-03-19 21:33:23 +02:00
Ivan Kravets
4c2a157dce Bump version to 4.3.0rc1 2020-03-19 19:28:13 +02:00
Ivan Kravets
d9647dec95 Add support for debugging server "ready_pattern" 2020-03-19 19:17:54 +02:00
Ivan Kravets
15647c81f0 New standalone (1-script) PlatformIO Core Installer 2020-03-19 18:26:30 +02:00
Ivan Kravets
24a0d9123e Update history with initial support for Renode 2020-03-19 17:04:05 +02:00
Ivan Kravets
720c29350d Add docs for Renode debugging tool // Issue #3401 2020-03-19 16:58:18 +02:00
valeros
aa939b07b1 Update default init config for Renode 2020-03-19 16:17:51 +02:00
Ivan Kravets
0e3c3abf73 GDB init commands for Renode simulation framework // Issue #3401 2020-03-19 15:16:55 +02:00
Ivan Kravets
a8606f4efa Refactor debug GDB initial configurations 2020-03-19 14:49:25 +02:00
ShahRustam
475f898222 Replace installer script with a new one // Resolve #3420 (#3428)
* Replace installer script with a new one. Resolve #3420

* temp file name fix

* get-platformio.py script update.

* small fix
2020-03-19 13:26:51 +02:00
Ivan Kravets
69f5fdf8e1 Remove debug code 2020-03-19 01:05:12 +02:00
Ivan Kravets
fe1ad35cad Merge branch 'feature/issue-3401-renode-support' into develop 2020-03-19 00:50:21 +02:00
Ivan Kravets
352a0b7377 Wait for an output from debug server 2020-03-19 00:46:23 +02:00
Ivan Kravets
52689bc5e8 Wait until debug server is ready 2020-03-19 00:19:59 +02:00
Ivan Kravets
3dd3ea1c35 Show a hexadecimal representation of the data (code point of each character) with `hexlify` filter 2020-03-18 18:55:54 +02:00
Ivan Kravets
fff33d8c29 Do not send CR+NL for "send_on_enter" device monitor filter 2020-03-18 17:25:40 +02:00
Ivan Kravets
db9829a11e Sync docs 2020-03-18 00:36:07 +02:00
Ivan Kravets
9a1b5d869d Bump version to 4.3.0b2 2020-03-18 00:13:03 +02:00
Ivan Kravets
605cd36e27 Send a text to device on ENTER with `send_on_enter` filter // Resolve #926 2020-03-18 00:09:40 +02:00
Ivan Kravets
24a23b67dd Fix formatting issue 2020-03-17 23:10:06 +02:00
Ivan Kravets
0df72411a0 Device Monitor Filter API, implement "time" and "log2file" filters // Resolve #981 Resolve #670 2020-03-17 23:08:57 +02:00
Ivan Kravets
5a72033622 Fixed an issue when unknown transport is used for PIO Unit Testing // Resolve #3422 2020-03-17 17:42:54 +02:00
Ivan Kravets
4e6095ca13 Update docs and history 2020-03-17 17:39:11 +02:00
Matthew Mirvish
f81b0b2a84 Ensure all commands in compilation_commands.json use absolute paths. (#3415)
* Fix resolving of absolute path for toolchain

By placing the `where_is_program` call into this function, all references to the compiler will be made absolute, instead of just ones in the top environment. Previously, all references to the compiler for user source code would not use the full path in the compilation database, which broke `clangd`'s detection of system includes.

* Linting issue
2020-03-17 16:30:28 +02:00
Ivan Kravets
314f634e16 Docs: Improvements for CLion docs. 2020-03-15 00:41:16 +02:00
Ivan Kravets
ba040ba2ba Docs: Workaround for ReadTheDocs bug 2020-03-14 20:32:42 +02:00
Ivan Kravets
a22ed40256 Added initial support for an official "PlatformIO for CLion IDE" plugin // Resolve #2201 2020-03-14 19:31:00 +02:00
Ivan Kravets
58a4ff8246 Skip broken Click 7.1 & 7.1.1, see Click's issue #1501 2020-03-14 12:18:00 +02:00
Ivan Kravets
9a5ebfb642 Bump version to 4.3.0b1 2020-03-12 15:10:25 +02:00
Ivan Kravets
5d0faaa5a8 Refactor docs structure 2020-03-12 15:09:20 +02:00
Ivan Kravets
108b892e30 Control device monitor output with filters and text transformations 2020-03-12 14:28:54 +02:00
Ivan Kravets
0ff37c9999 Implement universal "get_object_members" helper 2020-03-12 14:24:20 +02:00
Vojtěch Boček
8c3de609ab Add ESP crash trace decoding to monitor (#3383)
* Implement mechanism for adding platform filters into miniterm

Updates platformio/platform-espressif8266#31

* DeviceMonitorFilter: fixes for Windows and Python2
2020-03-11 13:22:01 +02:00
valeros
073efef2a1 Explicitly use Python-x64 with Appveyor CI 2020-03-10 15:54:01 +02:00
Ilia Motornyi
b9fd97dae4 Changes required for CLion PlatformIO plugin (#3298) 2020-03-09 15:47:41 +02:00
Ivan Kravets
60a7af6a8c Docs: Update recent articles 2020-03-09 14:58:35 +02:00
Ivan Kravets
0f02b3b653 Improved support for Arduino "library.properties" `depends` field 2020-03-07 17:44:28 +02:00
Ivan Kravets
620335631f Bump version to 4.2.2b1 2020-03-06 22:08:38 +02:00
Ivan Kravets
3ef96cb215 Minor fixes 2020-03-06 00:43:57 +02:00
Ivan Kravets
59e1c88726 Fixed an issue when `"libArchive": false` in "library.json" does not work // Resolve #3403 2020-03-06 00:37:48 +02:00
Ivan Kravets
3a27fbc883 Fixed an issue when Python 2 does not keep encoding when converting .INO file // Resolve #3393 2020-03-05 23:52:46 +02:00
Ivan Kravets
ce6b96ea84 Use native open/io.open for file contents reading/writing 2020-03-05 23:52:13 +02:00
Ivan Kravets
3275bb59bf Fix test 2020-03-04 18:14:51 +02:00
Ivan Kravets
fbb62fa8a6 Bump version to 4.2.2a3 2020-03-03 23:10:54 +02:00
Ivan Kravets
261c46d4ef Add support for Arm Mbed "module.json" `dependencies` field // Resolve #3400 2020-03-03 23:10:19 +02:00
Ivan Kravets
0c0ceb2caa Sync docs 2020-03-03 23:03:14 +02:00
Ivan Kravets
de60f20c21 Sync docs 2020-03-03 14:59:03 +02:00
valeros
314fe7d309 Initial support for menuconfig target 2020-03-03 00:58:07 +02:00
Ivan Kravets
a271143c52 Sync docs 2020-03-02 23:25:28 +02:00
Ivan Kravets
2d4a3db250 Fixed an issue with expanding $WORKSPACE_DIR for library manager 2020-02-29 23:08:08 +02:00
Ivan Kravets
7fba6f78d6 Bump version to 4.2.2a2 2020-02-29 21:59:58 +02:00
Ivan Kravets
eee12b9b66 Fixed an issue "the JSON object must be str, not 'bytes'" when PIO Home is used with Python 3.5 // Resolve #3396 2020-02-29 21:59:10 +02:00
Ivan Kravets
d3e151feeb Sync docs 2020-02-29 18:44:37 +02:00
Ivan Kravets
dd1fe74956 PyLint fix 2020-02-21 15:44:55 +02:00
Ivan Kravets
49aed34325 Rename PIO Plus to Professional 2020-02-21 15:44:24 +02:00
Ivan Kravets
81ba2a5a74 Sync docs 2020-02-20 18:22:12 +02:00
Ivan Kravets
1c87f83463 Parse package dependencies declared as a list of strings 2020-02-18 21:55:01 +02:00
Ivan Kravets
e15f227c48 Docs: Sync Atmel SAM dev-platform 2020-02-18 14:45:54 +02:00
Ivan Kravets
ea5f2742f8 Bump version to 4.2.2a1 2020-02-18 00:05:20 +02:00
Ivan Kravets
9fd0943b75 Fixed an issue when quitting from PlatformIO IDE does not shutdown PIO Home server 2020-02-18 00:03:23 +02:00
Ivan Kravets
b8312d545c Merge tag 'v4.2.1' into develop
Bump version to 4.2.1
2020-02-17 14:25:27 +02:00
323 changed files with 26050 additions and 12735 deletions

View File

@@ -1,29 +0,0 @@
build: off
platform:
- x64
environment:
matrix:
- TOXENV: "py27"
PLATFORMIO_BUILD_CACHE_DIR: C:\Temp\PIO_Build_Cache_P2_{build}
- TOXENV: "py36"
PLATFORMIO_BUILD_CACHE_DIR: C:\Temp\PIO_Build_Cache_P3_{build}
install:
- cmd: git submodule update --init --recursive
- cmd: SET PATH=C:\MinGW\bin;%PATH%
- cmd: SET PLATFORMIO_CORE_DIR=C:\.pio
- cmd: pip install --force-reinstall tox
test_script:
- cmd: tox
notifications:
- provider: Slack
incoming_webhook:
secure: E9H0SU0Ju7WLDvgxsV8cs3J62T3nTTX7QkEjsczN0Sto/c9hWkVfhc5gGWUkxhlD975cokHByKGJIdwYwCewqOI+7BrcT8U+nlga4Uau7J8=
on_build_success: false
on_build_failure: true
on_build_status_changed: true

50
.github/workflows/core.yml vendored Normal file
View File

@@ -0,0 +1,50 @@
name: Core
on: [push, pull_request]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
exclude:
- os: macos-latest
python-version: "3.6"
- os: windows-latest
python-version: "3.10"
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Python Lint
run: |
tox -e lint
- name: Integration Tests
env:
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
run: |
tox -e testcore
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Core*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
token: ${{ secrets.SLACK_GITHUB_TOKEN }}

109
.github/workflows/docs.yml vendored Normal file
View File

@@ -0,0 +1,109 @@
name: Docs
on: [push, pull_request]
jobs:
build:
name: Build Docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Build docs
run: |
tox -e docs
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Docs*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
- name: Preserve Docs
if: ${{ github.event_name == 'push' }}
run: |
tar -czvf docs.tar.gz -C docs/_build html rtdpage
- name: Save artifact
if: ${{ github.event_name == 'push' }}
uses: actions/upload-artifact@v2
with:
name: docs
path: ./docs.tar.gz
deploy:
name: Deploy Docs
needs: build
runs-on: ubuntu-latest
env:
DOCS_REPO: platformio/platformio-docs
DOCS_DIR: platformio-docs
LATEST_DOCS_DIR: latest-docs
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
if: ${{ github.event_name == 'push' }}
steps:
- name: Download artifact
uses: actions/download-artifact@v2
with:
name: docs
- name: Unpack artifact
run: |
mkdir ./${{ env.LATEST_DOCS_DIR }}
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
- name: Delete Artifact
uses: geekyeggo/delete-artifact@v1
with:
name: docs
- name: Select Docs type
id: get-destination-dir
run: |
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
echo "::set-output name=dst_dir::stable"
else
echo "::set-output name=dst_dir::latest"
fi
- name: Checkout latest Docs
continue-on-error: true
uses: actions/checkout@v2
with:
repository: ${{ env.DOCS_REPO }}
path: ${{ env.DOCS_DIR }}
ref: gh-pages
- name: Synchronize Docs
run: |
rm -rf ${{ env.DOCS_DIR }}/.git
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
rm -rf ${{ env.DOCS_DIR }}/page
mkdir -p ${{ env.DOCS_DIR }}/page
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
fi
- name: Validate Docs
run: |
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
echo "Docs folder is empty. Aborting!"
exit 1
fi
- name: Deploy to Github Pages
uses: peaceiris/actions-gh-pages@v3
with:
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
external_repository: ${{ env.DOCS_REPO }}
publish_dir: ./${{ env.DOCS_DIR }}
commit_message: Sync Docs

68
.github/workflows/examples.yml vendored Normal file
View File

@@ -0,0 +1,68 @@
name: Examples
on: [push, pull_request]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-18.04, windows-latest, macos-latest]
python-version: [3.7]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Run on Linux
if: startsWith(matrix.os, 'ubuntu')
env:
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51"
run: |
# ChipKIT issue: install 32-bit support for GCC PIC32
sudo apt-get install libc6-i386
# Free space
sudo apt clean
docker rmi $(docker image ls -aq)
df -h
# Run
tox -e testexamples
- name: Run on macOS
if: startsWith(matrix.os, 'macos')
env:
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,lattice_ice40,gd32v"
run: |
df -h
tox -e testexamples
- name: Run on Windows
if: startsWith(matrix.os, 'windows')
env:
PLATFORMIO_CORE_DIR: C:/pio
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
run: |
tox -e testexamples
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Examples*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
token: ${{ secrets.SLACK_GITHUB_TOKEN }}

2
.gitignore vendored
View File

@@ -1,6 +1,6 @@
*.egg-info
*.pyc
.pioenvs
__pycache__
.tox
docs/_build
dist

View File

@@ -1,3 +0,0 @@
[settings]
line_length=88
known_third_party=SCons, twisted, autobahn, jsonrpc

View File

@@ -1,3 +1,6 @@
[REPORTS]
output-format=colorized
[MESSAGES CONTROL]
disable=
bad-continuation,
@@ -11,5 +14,10 @@ disable=
too-few-public-methods,
useless-object-inheritance,
useless-import-alias,
fixme,
bad-option-value
bad-option-value,
consider-using-dict-items,
consider-using-f-string,
; PY2 Compat
super-with-arguments,
raise-missing-from

View File

@@ -1,12 +0,0 @@
# See https://docs.readthedocs.io/en/stable/config-file/index.html
version: 2
sphinx:
configuration: docs/conf.py
formats:
- pdf
submodules:
include: all

View File

@@ -1,3 +0,0 @@
[style]
blank_line_before_nested_class_or_def = true
allow_multiline_lambdas = true

View File

@@ -1,39 +0,0 @@
language: python
matrix:
include:
- os: linux
sudo: false
python: 2.7
env: TOX_ENV=docs
- os: linux
sudo: required
python: 2.7
env: TOX_ENV=py27 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
- os: linux
sudo: required
python: 3.6
env: TOX_ENV=py36 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
- os: osx
language: generic
env: TOX_ENV=skipexamples
install:
- git submodule update --init --recursive
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
- pip install -U tox
# ChipKIT issue: install 32-bit support for GCC PIC32
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
script:
- tox -e $TOX_ENV
notifications:
email: false
slack:
rooms:
secure: JD6VGfN4+SLU2CwDdiIOr1VgwD+zbYUCE/srwyGuHavnjIkPItkl6T6Bn8Y4VrU6ysbuKotfdV2TAJJ82ivFbY8BvZBc7FBcYp/AGQ4FaCCV5ySv8RDAcQgdE12oaGzMdODiLqsB85f65zOlAFa+htaXyEiRTcotn6Y2hupatrI=
on_failure: always
on_success: change

View File

@@ -1,21 +1,21 @@
Contributing
------------
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
To get started, <a href="https://cla-assistant.io/platformio/platformio-core">sign the Contributor License Agreement</a>.
1. Fork the repository on GitHub.
1. Fork the repository on GitHub
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
3. Run `pip install tox`
4. Go to the root of project where is located `tox.ini` and run `tox -e py27`
4. Go to the root of project where is located `tox.ini` and run `tox -e py37`
5. Activate current development environment:
* Windows: `.tox\py27\Scripts\activate`
* Bash/ZSH: `source .tox/py27/bin/activate`
* Fish: `source .tox/py27/bin/activate.fish`
* Windows: `.tox\py37\Scripts\activate`
* Bash/ZSH: `source .tox/py37/bin/activate`
* Fish: `source .tox/py37/bin/activate.fish`
6. Make changes to code, documentation, etc.
7. Lint source code `make lint`
7. Lint source code `make before-commit`
8. Run the tests `make test`
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
10. Commit changes to your forked repository
11. Submit a Pull Request on GitHub.
11. Submit a Pull Request on GitHub

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,19 @@
lint:
pylint --rcfile=./.pylintrc ./platformio
pylint -j 6 --rcfile=./.pylintrc ./tests
pylint -j 6 --rcfile=./.pylintrc ./platformio
isort:
isort -rc ./platformio
isort -rc ./tests
isort ./platformio
isort ./tests
format:
black --target-version py27 ./platformio
black --target-version py27 ./tests
black ./platformio
black ./tests
test:
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
before-commit: isort format lint test
before-commit: isort format lint
clean-docs:
rm -rf docs/_build
@@ -27,8 +28,11 @@ clean: clean-docs
profile:
# Usage $ > make PIOARGS="boards" profile
python -m cProfile -o .tox/.tmp/cprofile.prof $(shell which platformio) ${PIOARGS}
python -m cProfile -o .tox/.tmp/cprofile.prof -m platformio ${PIOARGS}
snakeviz .tox/.tmp/cprofile.prof
pack:
python setup.py sdist
publish:
python setup.py sdist upload

View File

@@ -1,133 +1,83 @@
PlatformIO
==========
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
:alt: SWUbanner
.. image:: https://travis-ci.org/platformio/platformio-core.svg?branch=develop
:target: https://travis-ci.org/platformio/platformio-core
:alt: Travis.CI Build Status
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
:alt: AppVeyor.CI Build Status
PlatformIO Core
===============
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
:target: https://docs.platformio.org/en/latest/core/index.html
:alt: CI Build for PlatformIO Core
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
:target: https://github.com/platformio/platformio-examples
:alt: CI Build for dev-platform examples
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
:alt: CI Build for Docs
.. image:: https://img.shields.io/pypi/v/platformio.svg
:target: https://pypi.python.org/pypi/platformio/
:alt: Latest Version
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
:target: https://pypi.python.org/pypi/platformio/
:alt: License
.. image:: https://img.shields.io/badge/PlatformIO-Community-orange.svg
:alt: Community Forums
:target: https://community.platformio.org?utm_source=github&utm_medium=core
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
:alt: PlatformIO Labs
:target: https://piolabs.com/?utm_source=github&utm_medium=core
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
**Quick Links:** `Homepage <https://platformio.org?utm_source=github&utm_medium=core>`_ |
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
`Registry <https://registry.platformio.org?utm_source=github&utm_medium=core>`_ |
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
`Contact Us <https://piolabs.com/?utm_source=github&utm_medium=core>`_
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
**Social:** `LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
`Twitter <https://twitter.com/PlatformIO_Org>`_ |
`Facebook <https://www.facebook.com/platformio>`_ |
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
`Bintray <https://bintray.com/platformio>`_ |
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
`Community Forums <https://community.platformio.org?utm_source=github&utm_medium=core>`_
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
:target: https://platformio.org?utm_source=github&utm_medium=core
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ a new generation ecosystem for embedded development
`PlatformIO <https://platformio.org>`_ is a professional collaborative platform for embedded development.
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
* Open source, maximum permissive Apache 2.0 license
* Cross-platform IDE and Unified Debugger
* Static Code Analyzer and Remote Unit Testing
* Multi-platform and Multi-architecture Build System
* Firmware File Explorer and Memory Inspection.
* Firmware File Explorer and Memory Inspection
Get Started
-----------
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
Instruments
-----------
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
PIO Plus
--------
Solutions
---------
* `PIO Check <https://docs.platformio.org/page/plus/pio-check.html?utm_source=github&utm_medium=core>`_
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
* `Library Management <https://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
* `Desktop IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core>`_
* `Continuous Integration <https://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
**Advanced**
* `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
* `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html?utm_source=github&utm_medium=core>`_
* `Static Code Analysis <https://docs.platformio.org/en/latest/plus/pio-check.html?utm_source=github&utm_medium=core>`_
* `Remote Development <https://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
Registry
--------
* `Libraries <https://platformio.org/lib?utm_source=github&utm_medium=core>`_
* `Development Platforms <https://platformio.org/platforms?utm_source=github&utm_medium=core>`_
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
Development Platforms
---------------------
* `Aceinna IMU <https://platformio.org/platforms/aceinna_imu?utm_source=github&utm_medium=core>`_
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
* `Kendryte K210 <https://platformio.org/platforms/kendryte210?utm_source=github&utm_medium=core>`_
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
* `Nuclei <https://platformio.org/platforms/nuclei?utm_source=github&utm_medium=core>`_
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
* `RISC-V GAP <https://platformio.org/platforms/riscv_gap?utm_source=github&utm_medium=core>`_
* `Shakti <https://platformio.org/platforms/shakti?utm_source=github&utm_medium=core>`_
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
* `ST STM8 <https://platformio.org/platforms/ststm8?utm_source=github&utm_medium=core>`_
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
* `WIZNet W7500 <https://platformio.org/platforms/wiznet7500?utm_source=github&utm_medium=core>`_
Frameworks
----------
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
* `Freedom E SDK <https://platformio.org/frameworks/freedom-e-sdk?utm_source=github&utm_medium=core>`_
* `GigaDevice GD32V SDK <https://platformio.org/frameworks/gd32vf103-sdk?utm_source=github&utm_medium=core>`_
* `Kendryte Standalone SDK <https://platformio.org/frameworks/kendryte-standalone-sdk?utm_source=github&utm_medium=core>`_
* `Kendryte FreeRTOS SDK <https://platformio.org/frameworks/kendryte-freertos-sdk?utm_source=github&utm_medium=core>`_
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
* `Mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
* `Nuclei SDK <https://platformio.org/frameworks/nuclei-sdk?utm_source=github&utm_medium=core>`_
* `PULP OS <https://platformio.org/frameworks/pulp-os?utm_source=github&utm_medium=core>`_
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
* `Shakti SDK <https://platformio.org/frameworks/shakti-sdk?utm_source=github&utm_medium=core>`_
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
* `Zephyr <https://platformio.org/frameworks/zephyr?utm_source=github&utm_medium=core>`_
* `Libraries <https://registry.platformio.org/search?t=library&utm_source=github&utm_medium=core>`_
* `Development Platforms <https://registry.platformio.org/search?t=platform&utm_source=github&utm_medium=core>`_
* `Development Tools <https://registry.platformio.org/search?t=tool&utm_source=github&utm_medium=core>`_
Contributing
------------
@@ -141,7 +91,6 @@ Share minimal diagnostics and usage information to help us make PlatformIO bette
It is enabled by default. For more information see:
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
* `SSL Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
License
-------

2
docs

Submodule docs updated: 4b50528d78...5bf0037c66

View File

@@ -12,25 +12,51 @@
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 2, 1)
import sys
VERSION = (6, 0, 1)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A new generation ecosystem for embedded development. "
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"RISC-V, FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3"
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "contact@piolabs.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__apiurl__ = "https://api.platformio.org"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.1",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__

View File

@@ -18,16 +18,16 @@ from traceback import format_exc
import click
from platformio import __version__, exception, maintenance, util
from platformio import __version__, exception, maintenance
from platformio.commands import PlatformioCLI
from platformio.compat import CYGWIN
from platformio.compat import IS_CYGWIN, ensure_python3
@click.command(
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
)
@click.version_option(__version__, prog_name="PlatformIO")
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
@click.version_option(__version__, prog_name="PlatformIO Core")
@click.option("--force", "-f", is_flag=True, help="DEPRECATED")
@click.option("--caller", "-c", help="Caller ID (service)")
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
@click.pass_context
@@ -56,15 +56,14 @@ def cli(ctx, force, caller, no_ansi):
maintenance.on_platformio_start(ctx, force, caller)
@cli.resultcallback()
@cli.result_callback()
@click.pass_context
def process_result(ctx, result, *_, **__):
maintenance.on_platformio_end(ctx, result)
@util.memoized()
def configure():
if CYGWIN:
if IS_CYGWIN:
raise exception.CygwinEnvDetected()
# https://urllib3.readthedocs.org
@@ -98,6 +97,7 @@ def main(argv=None):
assert isinstance(argv, list)
sys.argv = argv
try:
ensure_python3(raise_exception=True)
configure()
cli() # pylint: disable=no-value-for-parameter
except SystemExit as e:

View File

@@ -12,59 +12,43 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
from __future__ import absolute_import
import getpass
import hashlib
import json
import os
import platform
import socket
import uuid
from os import environ, getenv, listdir, remove
from os.path import dirname, isdir, isfile, join, realpath
from time import time
import requests
from platformio import exception, fs, lockfile
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
from platformio.proc import is_ci
from platformio.project.helpers import (
get_default_projects_dir,
get_project_cache_dir,
get_project_core_dir,
)
from platformio import __version__, exception, fs, proc
from platformio.compat import IS_WINDOWS, hashlib_encode_data
from platformio.package.lockfile import LockFile
from platformio.project.config import ProjectConfig
from platformio.project.helpers import get_default_projects_dir
def projects_dir_validate(projects_dir):
assert isdir(projects_dir)
return realpath(projects_dir)
assert os.path.isdir(projects_dir)
return os.path.abspath(projects_dir)
DEFAULT_SETTINGS = {
"auto_update_libraries": {
"description": "Automatically update libraries (Yes/No)",
"value": False,
},
"auto_update_platforms": {
"description": "Automatically update platforms (Yes/No)",
"value": False,
},
"check_libraries_interval": {
"description": "Check for the library updates interval (days)",
"value": 7,
},
"check_platformio_interval": {
"description": "Check for the new PlatformIO interval (days)",
"value": 3,
},
"check_platforms_interval": {
"description": "Check for the platform updates interval (days)",
"description": "Check for the new PlatformIO Core interval (days)",
"value": 7,
},
"check_prune_system_threshold": {
"description": "Check for pruning unnecessary data threshold (megabytes)",
"value": 1024,
},
"enable_cache": {
"description": "Enable caching for API requests and Library Manager",
"description": "Enable caching for HTTP API requests",
"value": True,
},
"strict_ssl": {"description": "Strict SSL for PlatformIO Services", "value": False},
"enable_telemetry": {
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"description": ("Telemetry service <https://bit.ly/pio-telemetry> (Yes/No)"),
"value": True,
},
"force_verbose": {
@@ -72,7 +56,7 @@ DEFAULT_SETTINGS = {
"value": False,
},
"projects_dir": {
"description": "Default location for PlatformIO projects (PIO Home)",
"description": "Default location for PlatformIO projects (PlatformIO Home)",
"value": get_default_projects_dir(),
"validator": projects_dir_validate,
},
@@ -91,7 +75,10 @@ class State(object):
self.path = path
self.lock = lock
if not self.path:
self.path = join(get_project_core_dir(), "appstate.json")
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
if not os.path.isdir(core_dir):
os.makedirs(core_dir)
self.path = os.path.join(core_dir, "appstate.json")
self._storage = {}
self._lockfile = None
self.modified = False
@@ -99,7 +86,7 @@ class State(object):
def __enter__(self):
try:
self._lock_state_file()
if isfile(self.path):
if os.path.isfile(self.path):
self._storage = fs.load_json(self.path)
assert isinstance(self._storage, dict)
except (
@@ -114,20 +101,20 @@ class State(object):
def __exit__(self, type_, value, traceback):
if self.modified:
try:
with open(self.path, "w") as fp:
fp.write(dump_json_to_unicode(self._storage))
with open(self.path, mode="w", encoding="utf8") as fp:
fp.write(json.dumps(self._storage))
except IOError:
raise exception.HomeDirPermissionsError(get_project_core_dir())
raise exception.HomeDirPermissionsError(os.path.dirname(self.path))
self._unlock_state_file()
def _lock_state_file(self):
if not self.lock:
return
self._lockfile = lockfile.LockFile(self.path)
self._lockfile = LockFile(self.path)
try:
self._lockfile.acquire()
except IOError:
raise exception.HomeDirPermissionsError(dirname(self.path))
raise exception.HomeDirPermissionsError(os.path.dirname(self.path))
def _unlock_state_file(self):
if hasattr(self, "_lockfile") and self._lockfile:
@@ -141,6 +128,9 @@ class State(object):
def as_dict(self):
return self._storage
def keys(self):
return self._storage.keys()
def get(self, key, default=True):
return self._storage.get(key, default)
@@ -166,146 +156,6 @@ class State(object):
return item in self._storage
class ContentCache(object):
def __init__(self, cache_dir=None):
self.cache_dir = None
self._db_path = None
self._lockfile = None
self.cache_dir = cache_dir or get_project_cache_dir()
self._db_path = join(self.cache_dir, "db.data")
def __enter__(self):
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
def _lock_dbindex(self):
if not self.cache_dir:
os.makedirs(self.cache_dir)
self._lockfile = lockfile.LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return join(self.cache_dir, key[-2:], key)
@staticmethod
def key_from_args(*args):
h = hashlib.md5()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get(self, key):
cache_path = self.get_cache_path(key)
if not isfile(cache_path):
return None
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if isfile(cache_path):
self.delete(key)
if not data:
return False
if not isdir(self.cache_dir):
os.makedirs(self.cache_dir)
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
except UnicodeError:
if isfile(cache_path):
try:
remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
if not isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, path = line.split("=")
try:
if (
time() < int(expire)
and isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
pass
found = True
if isfile(path):
try:
remove(path)
if not listdir(dirname(path)):
fs.rmtree(dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return True
def clean(self):
if not self.cache_dir or not isdir(self.cache_dir):
return
fs.rmtree(self.cache_dir)
def clean_cache():
with ContentCache() as cc:
cc.clean()
def sanitize_setting(name, value):
if name not in DEFAULT_SETTINGS:
raise exception.InvalidSettingName(name)
@@ -343,8 +193,8 @@ def delete_state_item(name):
def get_setting(name):
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
if _env_name in environ:
return sanitize_setting(name, getenv(_env_name))
if _env_name in os.environ:
return sanitize_setting(name, os.getenv(_env_name))
with State() as state:
if "settings" in state and name in state["settings"]:
@@ -380,8 +230,8 @@ def is_disabled_progressbar():
return any(
[
get_session_var("force_option"),
is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
proc.is_ci(),
os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
]
)
@@ -391,26 +241,43 @@ def get_cid():
if cid:
return cid
uid = None
if getenv("C9_UID"):
uid = getenv("C9_UID")
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
try:
uid = (
requests.get(
"{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"),
)
)
.json()
.get("id")
)
except: # pylint: disable=bare-except
pass
if os.getenv("GITHUB_USER"):
uid = os.getenv("GITHUB_USER")
elif os.getenv("GITPOD_GIT_USER_NAME"):
uid = os.getenv("GITPOD_GIT_USER_NAME")
if not uid:
uid = uuid.getnode()
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
cid = str(cid)
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
set_state_item("cid", cid)
return cid
def get_user_agent():
data = [
"PlatformIO/%s" % __version__,
"CI/%d" % int(proc.is_ci()),
"Container/%d" % int(proc.is_container()),
]
if get_session_var("caller_id"):
data.append("Caller/%s" % get_session_var("caller_id"))
if os.getenv("PLATFORMIO_IDE"):
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
data.append("Python/%s" % platform.python_version())
data.append("Platform/%s" % platform.platform())
return " ".join(data)
def get_host_id():
h = hashlib.sha1(hashlib_encode_data(get_cid()))
try:
username = getpass.getuser()
h.update(hashlib_encode_data(username))
except: # pylint: disable=bare-except
pass
return h.hexdigest()
def get_host_name():
return str(socket.gethostname())[:255]

View File

@@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
from os import environ, makedirs
from os.path import isdir, join
from time import time
import click
@@ -28,9 +28,8 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from SCons.Script import Import # pylint: disable=import-error
from SCons.Script import Variables # pylint: disable=import-error
from platformio import fs
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformBase
from platformio import compat, fs
from platformio.platform.base import PlatformBase
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_dir
@@ -45,46 +44,58 @@ clivars.AddVariables(
("PIOENV",),
("PIOTEST_RUNNING_NAME",),
("UPLOAD_PORT",),
("PROGRAM_ARGS",),
)
DEFAULT_ENV_OPTIONS = dict(
tools=[
"ar",
"as",
"cc",
"c++",
"link",
"pioasm",
"platformio",
"pioplatform",
"pioproject",
"pioplatform",
"piotest",
"piotarget",
"piomaxlen",
"piolib",
"pioupload",
"piomisc",
"pioide",
"piosize",
"pioino",
"piomisc",
"piointegration",
],
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
variables=clivars,
# Propagating External Environment
ENV=environ,
ENV=os.environ,
UNIX_TIME=int(time()),
BUILD_DIR=join("$PROJECT_BUILD_DIR", "$PIOENV"),
BUILD_SRC_DIR=join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=join("$BUILD_DIR", "test"),
COMPILATIONDB_PATH=join("$BUILD_DIR", "compile_commands.json"),
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
LIBPATH=["$BUILD_DIR"],
PROGNAME="program",
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PROG_PATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PYTHONEXE=get_pythonexe_path(),
IDE_EXTRA_DATA={},
)
# Declare command verbose messages
command_strings = dict(
ARCOM="Archiving",
LINKCOM="Linking",
RANLIBCOM="Indexing",
ASCOM="Compiling",
ASPPCOM="Compiling",
CCCOM="Compiling",
CXXCOM="Compiling",
)
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
DEFAULT_ENV_OPTIONS["ARCOMSTR"] = "Archiving $TARGET"
DEFAULT_ENV_OPTIONS["LINKCOMSTR"] = "Linking $TARGET"
DEFAULT_ENV_OPTIONS["RANLIBCOMSTR"] = "Indexing $TARGET"
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
for name, value in command_strings.items():
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
@@ -101,53 +112,70 @@ env.Replace(
config = env.GetProjectConfig()
env.Replace(
PROJECT_DIR=get_project_dir(),
PROJECT_CORE_DIR=config.get_optional_dir("core"),
PROJECT_PACKAGES_DIR=config.get_optional_dir("packages"),
PROJECT_WORKSPACE_DIR=config.get_optional_dir("workspace"),
PROJECT_LIBDEPS_DIR=config.get_optional_dir("libdeps"),
PROJECT_INCLUDE_DIR=config.get_optional_dir("include"),
PROJECT_SRC_DIR=config.get_optional_dir("src"),
PROJECTSRC_DIR=config.get_optional_dir("src"), # legacy for dev/platform
PROJECT_TEST_DIR=config.get_optional_dir("test"),
PROJECT_DATA_DIR=config.get_optional_dir("data"),
PROJECTDATA_DIR=config.get_optional_dir("data"), # legacy for dev/platform
PROJECT_BUILD_DIR=config.get_optional_dir("build"),
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
PROJECT_CORE_DIR=config.get("platformio", "core_dir"),
PROJECT_PACKAGES_DIR=config.get("platformio", "packages_dir"),
PROJECT_WORKSPACE_DIR=config.get("platformio", "workspace_dir"),
PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"),
PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"),
PROJECT_SRC_DIR=config.get("platformio", "src_dir"),
PROJECTSRC_DIR="$PROJECT_SRC_DIR", # legacy for dev/platform
PROJECT_TEST_DIR=config.get("platformio", "test_dir"),
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
LIBSOURCE_DIRS=[
config.get_optional_dir("lib"),
join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
config.get_optional_dir("globallib"),
config.get("platformio", "lib_dir"),
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
config.get("platformio", "globallib_dir"),
],
)
if env.subst("$BUILD_CACHE_DIR"):
if not isdir(env.subst("$BUILD_CACHE_DIR")):
makedirs(env.subst("$BUILD_CACHE_DIR"))
env.CacheDir("$BUILD_CACHE_DIR")
if int(ARGUMENTS.get("ISATTY", 0)):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
if env.GetOption("clean"):
env.PioClean(env.subst("$BUILD_DIR"))
if compat.IS_WINDOWS and sys.version_info >= (3, 8) and os.getcwd().startswith("\\\\"):
click.secho("!!! WARNING !!!\t\t" * 3, fg="red")
click.secho(
"Your project is located on a mapped network drive but the "
"current command-line shell does not support the UNC paths.",
fg="yellow",
)
click.secho(
"Please move your project to a physical drive or check this workaround: "
"https://bit.ly/3kuU5mP\n",
fg="yellow",
)
if env.subst("$BUILD_CACHE_DIR"):
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
env.CacheDir("$BUILD_CACHE_DIR")
is_clean_all = "cleanall" in COMMAND_LINE_TARGETS
if env.GetOption("clean") or is_clean_all:
env.PioClean(is_clean_all)
env.Exit(0)
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
# Dynamically load dependent tools
if "compiledb" in COMMAND_LINE_TARGETS:
env.Tool("compilation_db")
if not isdir(env.subst("$BUILD_DIR")):
makedirs(env.subst("$BUILD_DIR"))
if not os.path.isdir(env.subst("$BUILD_DIR")):
os.makedirs(env.subst("$BUILD_DIR"))
env.LoadProjectOptions()
env.LoadPioPlatform()
env.SConscriptChdir(0)
env.SConsignFile(
join("$BUILD_DIR", ".sconsign.py%d%d" % (sys.version_info[0], sys.version_info[1]))
os.path.join(
"$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1])
)
)
for item in env.GetExtraScripts("pre"):
@@ -188,7 +216,7 @@ env.AddPreAction(
),
)
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
##############################################################################
@@ -197,17 +225,21 @@ if "envdump" in COMMAND_LINE_TARGETS:
click.echo(env.Dump())
env.Exit(0)
if "idedata" in COMMAND_LINE_TARGETS:
if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS):
projenv = None
try:
Import("projenv")
except: # pylint: disable=bare-except
projenv = env
click.echo(
"\n%s\n"
% dump_json_to_unicode(
projenv.DumpIDEData() # pylint: disable=undefined-variable
)
)
data = projenv.DumpIntegrationData(env)
# dump to file for the further reading by project.helpers.load_build_metadata
with open(
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
mode="w",
encoding="utf8",
) as fp:
json.dump(data, fp)
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
env.Exit(0)
if "sizedata" in COMMAND_LINE_TARGETS:

View File

@@ -1,17 +1,24 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
# Copyright 2015 MongoDB Inc.
# Copyright 2020 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# http://www.apache.org/licenses/LICENSE-2.0
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
@@ -34,7 +41,7 @@ from platformio.proc import where_is_program
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
# which is the name that most clang tools search for by default.
# TODO: Is there a better way to do this than this global? Right now this exists so that the
# Is there a better way to do this than this global? Right now this exists so that the
# emitter we add can record all of the things it emits, so that the scanner for the top level
# compilation database can access the complete list, and also so that the writer has easy
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
@@ -51,7 +58,7 @@ class __CompilationDbNode(SCons.Node.Python.Value):
def changed_since_last_build_node(*args, **kwargs):
""" Dummy decider to force always building"""
"""Dummy decider to force always building"""
return True
@@ -76,6 +83,16 @@ def makeEmitCompilationDbEntry(comstr):
:return: target(s), source(s)
"""
# Resolve absolute path of toolchain
for cmd in ("CC", "CXX", "AS"):
if cmd not in env:
continue
if os.path.isabs(env[cmd]):
continue
env[cmd] = where_is_program(
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
)
dbtarget = __CompilationDbNode(source)
entry = env.__COMPILATIONDB_Entry(
@@ -87,7 +104,7 @@ def makeEmitCompilationDbEntry(comstr):
__COMPILATIONDB_ENV=env,
)
# TODO: Technically, these next two lines should not be required: it should be fine to
# Technically, these next two lines should not be required: it should be fine to
# cache the entries. However, they don't seem to update properly. Since they are quick
# to re-generate disable caching and sidestep this problem.
env.AlwaysBuild(entry)
@@ -135,7 +152,7 @@ def WriteCompilationDb(target, source, env):
item["file"] = os.path.abspath(item["file"])
entries.append(item)
with open(str(target[0]), "w") as target_file:
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
json.dump(
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
)
@@ -195,14 +212,6 @@ def generate(env, **kwargs):
)
def CompilationDatabase(env, target):
# Resolve absolute path of toolchain
for cmd in ("CC", "CXX", "AS"):
if cmd not in env:
continue
env[cmd] = where_is_program(
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
)
result = env.__COMPILATIONDB_Database(target=target, source=[])
env.AlwaysBuild(result)

View File

@@ -12,17 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import reactor # pylint: disable=import-error
from twisted.web import static # pylint: disable=import-error
from __future__ import absolute_import
import SCons.Tool.asm # pylint: disable=import-error
#
# Resolve https://github.com/platformio/platformio-core/issues/3917
# Avoid forcing .S to bare assembly on Windows OS
#
if ".S" in SCons.Tool.asm.ASSuffixes:
SCons.Tool.asm.ASSuffixes.remove(".S")
if ".S" not in SCons.Tool.asm.ASPPSuffixes:
SCons.Tool.asm.ASPPSuffixes.append(".S")
class WebRoot(static.File):
def render_GET(self, request):
if request.args.get("__shutdown__", False):
reactor.stop()
return "Server has been stopped"
request.setHeader("cache-control", "no-cache, no-store, must-revalidate")
request.setHeader("pragma", "no-cache")
request.setHeader("expires", "0")
return static.File.render_GET(self, request)
generate = SCons.Tool.asm.generate
exists = SCons.Tool.asm.exists

View File

@@ -0,0 +1,254 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import atexit
import glob
import io
import os
import re
import tempfile
import click
from platformio.compat import get_filesystem_encoding, get_locale_encoding
class InoToCPPConverter(object):
PROTOTYPE_RE = re.compile(
r"""^(
(?:template\<.*\>\s*)? # template
([a-z_\d\&]+\*?\s+){1,2} # return type
([a-z_\d]+\s*) # name of prototype
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
)\s*(\{|;) # must end with `{` or `;`
""",
re.X | re.M | re.I,
)
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
def __init__(self, env):
self.env = env
self._main_ino = None
self._safe_encoding = None
def read_safe_contents(self, path):
error_reported = False
for encoding in (
"utf-8",
None,
get_filesystem_encoding(),
get_locale_encoding(),
"latin-1",
):
try:
with io.open(path, encoding=encoding) as fp:
contents = fp.read()
self._safe_encoding = encoding
return contents
except UnicodeDecodeError:
if not error_reported:
error_reported = True
click.secho(
"Unicode decode error has occurred, please remove invalid "
"(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8"
% path,
fg="yellow",
err=True,
)
return ""
def write_safe_contents(self, path, contents):
with io.open(
path, "w", encoding=self._safe_encoding, errors="backslashreplace"
) as fp:
return fp.write(contents)
def is_main_node(self, contents):
return self.DETECTMAIN_RE.search(contents)
def convert(self, nodes):
contents = self.merge(nodes)
if not contents:
return None
return self.process(contents)
def merge(self, nodes):
assert nodes
lines = []
for node in nodes:
contents = self.read_safe_contents(node.get_path())
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
if self.is_main_node(contents):
lines = _lines + lines
self._main_ino = node.get_path()
else:
lines.extend(_lines)
if not self._main_ino:
self._main_ino = nodes[0].get_path()
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
def process(self, contents):
out_file = self._main_ino + ".cpp"
assert self._gcc_preprocess(contents, out_file)
contents = self.read_safe_contents(out_file)
contents = self._join_multiline_strings(contents)
self.write_safe_contents(out_file, self.append_prototypes(contents))
return out_file
def _gcc_preprocess(self, contents, out_file):
tmp_path = tempfile.mkstemp()[1]
self.write_safe_contents(tmp_path, contents)
self.env.Execute(
self.env.VerboseAction(
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
out_file, tmp_path
),
"Converting " + os.path.basename(out_file[:-4]),
)
)
atexit.register(_delete_file, tmp_path)
return os.path.isfile(out_file)
def _join_multiline_strings(self, contents):
if "\\\n" not in contents:
return contents
newlines = []
linenum = 0
stropen = False
for line in contents.split("\n"):
_linenum = self._parse_preproc_line_num(line)
if _linenum is not None:
linenum = _linenum
else:
linenum += 1
if line.endswith("\\"):
if line.startswith('"'):
stropen = True
newlines.append(line[:-1])
continue
if stropen:
newlines[len(newlines) - 1] += line[:-1]
continue
elif stropen and line.endswith(('",', '";')):
newlines[len(newlines) - 1] += line
stropen = False
newlines.append(
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
)
continue
newlines.append(line)
return "\n".join(newlines)
@staticmethod
def _parse_preproc_line_num(line):
if not line.startswith("#"):
return None
tokens = line.split(" ", 3)
if len(tokens) > 2 and tokens[1].isdigit():
return int(tokens[1])
return None
def _parse_prototypes(self, contents):
prototypes = []
reserved_keywords = set(["if", "else", "while"])
for match in self.PROTOTYPE_RE.finditer(contents):
if (
set([match.group(2).strip(), match.group(3).strip()])
& reserved_keywords
):
continue
prototypes.append(match)
return prototypes
def _get_total_lines(self, contents):
total = 0
if contents.endswith("\n"):
contents = contents[:-1]
for line in contents.split("\n")[::-1]:
linenum = self._parse_preproc_line_num(line)
if linenum is not None:
return total + linenum
total += 1
return total
def append_prototypes(self, contents):
prototypes = self._parse_prototypes(contents) or []
# skip already declared prototypes
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
if not prototypes:
return contents
prototype_names = set(m.group(3).strip() for m in prototypes)
split_pos = prototypes[0].start()
match_ptrs = re.search(
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
contents[:split_pos],
re.M,
)
if match_ptrs:
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
result = []
result.append(contents[:split_pos].strip())
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
result.append(
'#line %d "%s"'
% (
self._get_total_lines(contents[:split_pos]),
self._main_ino.replace("\\", "/"),
)
)
result.append(contents[split_pos:].strip())
return "\n".join(result)
def ConvertInoToCpp(env):
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
os.path.join(src_dir, "*.pde")
)
if not ino_nodes:
return
c = InoToCPPConverter(env)
out_file = c.convert(ino_nodes)
atexit.register(_delete_file, out_file)
def _delete_file(path):
try:
if os.path.isfile(path):
os.remove(path)
except: # pylint: disable=bare-except
pass
def generate(env):
env.AddMethod(ConvertInoToCpp)
def exists(_):
return True

View File

@@ -14,53 +14,48 @@
from __future__ import absolute_import
import glob
import os
from glob import glob
from SCons.Defaults import processDefines # pylint: disable=import-error
import SCons.Defaults # pylint: disable=import-error
import SCons.Subst # pylint: disable=import-error
from platformio.compat import glob_escape
from platformio.managers.core import get_core_package_dir
from platformio.proc import exec_command, where_is_program
def _dump_includes(env):
includes = []
def DumpIntegrationIncludes(env):
result = dict(build=[], compatlib=[], toolchain=[])
for item in env.get("CPPPATH", []):
includes.append(env.subst(item))
result["build"].extend(
[
env.subst("$PROJECT_INCLUDE_DIR"),
env.subst("$PROJECT_SRC_DIR"),
]
)
result["build"].extend(
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
)
# installed libs
for lb in env.GetLibBuilders():
includes.extend(lb.get_include_dirs())
result["compatlib"].extend(
[os.path.abspath(inc) for inc in lb.get_include_dirs()]
)
# includes from toolchains
p = env.PioPlatform()
for name in p.get_installed_packages():
if p.get_package_type(name) != "toolchain":
for pkg in p.get_installed_packages(with_optional=False):
if p.get_package_type(pkg.metadata.name) != "toolchain":
continue
toolchain_dir = glob_escape(p.get_package_dir(name))
toolchain_dir = glob.escape(pkg.path)
toolchain_incglobs = [
os.path.join(toolchain_dir, "*", "include*"),
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
os.path.join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
os.path.join(toolchain_dir, "*", "include*"),
]
for g in toolchain_incglobs:
includes.extend(glob(g))
unity_dir = get_core_package_dir("tool-unity")
if unity_dir:
includes.append(unity_dir)
includes.extend([env.subst("$PROJECT_INCLUDE_DIR"), env.subst("$PROJECT_SRC_DIR")])
# remove duplicates
result = []
for item in includes:
item = os.path.realpath(item)
if item not in result:
result.append(item)
result["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)])
return result
@@ -91,8 +86,10 @@ def _get_gcc_defines(env):
def _dump_defines(env):
defines = []
# global symbols
for item in processDefines(env.get("CPPDEFINES", [])):
defines.append(env.subst(item).replace("\\", ""))
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
item = item.strip()
if item:
defines.append(env.subst(item).replace("\\", ""))
# special symbol for Atmel AVR MCU
if env["PIOPLATFORM"] == "atmelavr":
@@ -119,7 +116,7 @@ def _dump_defines(env):
def _get_svd_path(env):
svd_path = env.GetProjectOption("debug_svd_path")
if svd_path:
return os.path.realpath(svd_path)
return os.path.abspath(svd_path)
if "BOARD" not in env:
return None
@@ -134,48 +131,43 @@ def _get_svd_path(env):
# default file from ./platform/misc/svd folder
p = env.PioPlatform()
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
return os.path.realpath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
return os.path.abspath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
return None
def _escape_build_flag(flags):
return [flag if " " not in flag else '"%s"' % flag for flag in flags]
def _subst_cmd(env, cmd):
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
def DumpIDEData(env):
env["__escape_build_flag"] = _escape_build_flag
LINTCCOM = (
"${__escape_build_flag(CFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
)
LINTCXXCOM = (
"${__escape_build_flag(CXXFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
)
def DumpIntegrationData(env, globalenv):
"""env here is `projenv`"""
data = {
"env_name": env["PIOENV"],
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
"libsource_dirs": [env.subst(item) for item in env.GetLibSourceDirs()],
"defines": _dump_defines(env),
"includes": _dump_includes(env),
"cc_flags": env.subst(LINTCCOM),
"cxx_flags": env.subst(LINTCXXCOM),
"includes": env.DumpIntegrationIncludes(),
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path": env.subst("$PROG_PATH"),
"flash_extra_images": [
{"offset": item[0], "path": env.subst(item[1])}
for item in env.get("FLASH_EXTRA_IMAGES", [])
],
"svd_path": _get_svd_path(env),
"compiler_type": env.GetCompilerType(),
"targets": globalenv.DumpTargets(),
"extra": dict(
flash_images=[
{"offset": item[0], "path": env.subst(item[1])}
for item in env.get("FLASH_EXTRA_IMAGES", [])
]
),
}
data["extra"].update(env.get("IDE_EXTRA_DATA", {}))
env_ = env.Clone()
# https://github.com/platformio/platformio-atom-ide/issues/34
_new_defines = []
for item in processDefines(env_.get("CPPDEFINES", [])):
for item in SCons.Defaults.processDefines(env_.get("CPPDEFINES", [])):
item = item.replace('\\"', '"')
if " " in item:
_new_defines.append(item.replace(" ", "\\\\ "))
@@ -183,7 +175,13 @@ def DumpIDEData(env):
_new_defines.append(item)
env_.Replace(CPPDEFINES=_new_defines)
data.update({"cc_flags": env_.subst(LINTCCOM), "cxx_flags": env_.subst(LINTCXXCOM)})
# export C/C++ build flags
data.update(
{
"cc_flags": _subst_cmd(env_, "$CFLAGS $CCFLAGS $CPPFLAGS"),
"cxx_flags": _subst_cmd(env_, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
}
)
return data
@@ -193,5 +191,6 @@ def exists(_):
def generate(env):
env.AddMethod(DumpIDEData)
env.AddMethod(DumpIntegrationIncludes)
env.AddMethod(DumpIntegrationData)
return env

View File

@@ -12,31 +12,37 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=no-member, no-self-use, unused-argument, too-many-lines
# pylint: disable=no-self-use, unused-argument, too-many-lines
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# pylint: disable=assignment-from-no-return
from __future__ import absolute_import
import hashlib
import io
import os
import re
import sys
from os.path import basename, commonprefix, isdir, isfile, join, realpath, sep
import click
import SCons.Scanner # pylint: disable=import-error
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.builder.tools import platformio as piotool
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
from platformio.managers.lib import LibraryManager
from platformio.clients.http import HTTPClientError, InternetIsOffline
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
from platformio.package.exception import (
MissingPackageManifestError,
UnknownPackageError,
)
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manifest.parser import (
ManifestParserError,
ManifestParserFactory,
)
from platformio.package.meta import PackageItem
from platformio.project.options import ProjectOptions
@@ -44,29 +50,39 @@ class LibBuilderFactory(object):
@staticmethod
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
clsname = "UnknownLibBuilder"
if isfile(join(path, "library.json")):
if os.path.isfile(os.path.join(path, "library.json")):
clsname = "PlatformIOLibBuilder"
else:
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
common_frameworks = set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks)
if common_frameworks:
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
clsname = "%sLibBuilder" % list(common_frameworks)[0].capitalize()
elif used_frameworks:
clsname = "%sLibBuilder" % used_frameworks[0].title()
clsname = "%sLibBuilder" % used_frameworks[0].capitalize()
obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose)
# Handle PlatformIOLibBuilder.manifest.build.builder
# pylint: disable=protected-access
if isinstance(obj, PlatformIOLibBuilder) and obj._manifest.get("build", {}).get(
"builder"
):
obj = getattr(
sys.modules[__name__], obj._manifest.get("build", {}).get("builder")
)(env, path, verbose=verbose)
assert isinstance(obj, LibBuilderBase)
return obj
@staticmethod
def get_used_frameworks(env, path):
if any(
isfile(join(path, fname))
os.path.isfile(os.path.join(path, fname))
for fname in ("library.properties", "keywords.txt")
):
return ["arduino"]
if isfile(join(path, "module.json")):
if os.path.isfile(os.path.join(path, "module.json")):
return ["mbed"]
include_re = re.compile(
@@ -82,7 +98,10 @@ class LibBuilderFactory(object):
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
):
continue
content = fs.get_file_contents(join(root, fname))
with io.open(
os.path.join(root, fname), encoding="utf8", errors="ignore"
) as fp:
content = fp.read()
if not content:
continue
if "Arduino.h" in content and include_re.search(content):
@@ -108,7 +127,7 @@ class LibBuilderBase(object):
def __init__(self, env, path, manifest=None, verbose=False):
self.env = env.Clone()
self.envorigin = env.Clone()
self.path = realpath(env.subst(path))
self.path = os.path.abspath(env.subst(path))
self.verbose = verbose
try:
@@ -119,11 +138,13 @@ class LibBuilderBase(object):
)
self._manifest = {}
self._is_dependent = False
self._is_built = False
self._depbuilders = list()
self._circular_deps = list()
self._processed_files = list()
self.is_dependent = False
self.is_built = False
self.depbuilders = []
self._deps_are_processed = False
self._circular_deps = []
self._processed_files = []
# reset source filter, could be overridden with extra script
self.env["SRC_FILTER"] = ""
@@ -137,21 +158,31 @@ class LibBuilderBase(object):
def __contains__(self, path):
p1 = self.path
p2 = path
if WINDOWS:
if IS_WINDOWS:
p1 = p1.lower()
p2 = p2.lower()
if p1 == p2:
return True
return commonprefix((p1 + sep, p2)) == p1 + sep
if os.path.commonprefix([p1 + os.path.sep, p2]) == p1 + os.path.sep:
return True
# try to resolve paths
p1 = os.path.os.path.realpath(p1)
p2 = os.path.os.path.realpath(p2)
return os.path.commonprefix([p1 + os.path.sep, p2]) == p1 + os.path.sep
@property
def name(self):
return self._manifest.get("name", basename(self.path))
return self._manifest.get("name", os.path.basename(self.path))
@property
def version(self):
return self._manifest.get("version")
@property
def dependent(self):
"""Backward compatibility with ESP-IDF"""
return self.is_dependent
@property
def dependencies(self):
return self._manifest.get("dependencies")
@@ -167,13 +198,19 @@ class LibBuilderBase(object):
@property
def include_dir(self):
if not all(isdir(join(self.path, d)) for d in ("include", "src")):
return None
return join(self.path, "include")
for name in ("include", "Include"):
d = os.path.join(self.path, name)
if os.path.isdir(d):
return d
return None
@property
def src_dir(self):
return join(self.path, "src") if isdir(join(self.path, "src")) else self.path
for name in ("src", "Src"):
d = os.path.join(self.path, name)
if os.path.isdir(d):
return d
return self.path
def get_include_dirs(self):
items = []
@@ -186,7 +223,9 @@ class LibBuilderBase(object):
@property
def build_dir(self):
lib_hash = hashlib.sha1(hashlib_encode_data(self.path)).hexdigest()[:3]
return join("$BUILD_DIR", "lib%s" % lib_hash, basename(self.path))
return os.path.join(
"$BUILD_DIR", "lib%s" % lib_hash, os.path.basename(self.path)
)
@property
def build_flags(self):
@@ -200,18 +239,6 @@ class LibBuilderBase(object):
def extra_script(self):
return None
@property
def depbuilders(self):
return self._depbuilders
@property
def dependent(self):
return self._is_dependent
@property
def is_built(self):
return self._is_built
@property
def lib_archive(self):
return self.env.GetProjectOption("lib_archive")
@@ -265,14 +292,15 @@ class LibBuilderBase(object):
if self.extra_script:
self.env.SConscriptChdir(1)
self.env.SConscript(
realpath(self.extra_script),
os.path.abspath(self.extra_script),
exports={"env": self.env, "pio_lib_builder": self},
)
self.env.ProcessUnFlags(self.build_unflags)
def process_dependencies(self):
if not self.dependencies:
if not self.dependencies or self._deps_are_processed:
return
self._deps_are_processed = True
for item in self.dependencies:
found = False
for lb in self.env.GetLibBuilders():
@@ -280,7 +308,7 @@ class LibBuilderBase(object):
continue
found = True
if lb not in self.depbuilders:
self.depend_recursive(lb)
self.depend_on(lb)
break
if not found and self.verbose:
@@ -291,14 +319,14 @@ class LibBuilderBase(object):
def get_search_files(self):
items = [
join(self.src_dir, item)
os.path.join(self.src_dir, item)
for item in self.env.MatchSourceFiles(self.src_dir, self.src_filter)
]
include_dir = self.include_dir
if include_dir:
items.extend(
[
join(include_dir, item)
os.path.join(include_dir, item)
for item in self.env.MatchSourceFiles(include_dir)
]
)
@@ -367,7 +395,7 @@ class LibBuilderBase(object):
continue
_f_part = _h_path[: _h_path.rindex(".")]
for ext in piotool.SRC_C_EXT + piotool.SRC_CXX_EXT:
if not isfile("%s.%s" % (_f_part, ext)):
if not os.path.isfile("%s.%s" % (_f_part, ext)):
continue
_c_path = self.env.File("%s.%s" % (_f_part, ext))
if _c_path not in result:
@@ -375,7 +403,29 @@ class LibBuilderBase(object):
return result
def depend_recursive(self, lb, search_files=None):
def search_deps_recursive(self, search_files=None):
self.process_dependencies()
# when LDF is disabled
if self.lib_ldf_mode == "off":
return
if self.lib_ldf_mode.startswith("deep"):
search_files = self.get_search_files()
lib_inc_map = {}
for inc in self._get_found_includes(search_files):
for lb in self.env.GetLibBuilders():
if inc.get_abspath() in lb:
if lb not in lib_inc_map:
lib_inc_map[lb] = []
lib_inc_map[lb].append(inc.get_abspath())
break
for lb, lb_search_files in lib_inc_map.items():
self.depend_on(lb, search_files=lb_search_files)
def depend_on(self, lb, search_files=None, recursive=True):
def _already_depends(_lb):
if self in _lb.depbuilders:
return True
@@ -393,38 +443,17 @@ class LibBuilderBase(object):
"between `%s` and `%s`\n" % (self.path, lb.path)
)
self._circular_deps.append(lb)
elif lb not in self._depbuilders:
self._depbuilders.append(lb)
elif lb not in self.depbuilders:
self.depbuilders.append(lb)
lb.is_dependent = True
LibBuilderBase._INCLUDE_DIRS_CACHE = None
lb.search_deps_recursive(search_files)
def search_deps_recursive(self, search_files=None):
if not self._is_dependent:
self._is_dependent = True
self.process_dependencies()
if self.lib_ldf_mode.startswith("deep"):
search_files = self.get_search_files()
# when LDF is disabled
if self.lib_ldf_mode == "off":
return
lib_inc_map = {}
for inc in self._get_found_includes(search_files):
for lb in self.env.GetLibBuilders():
if inc.get_abspath() in lb:
if lb not in lib_inc_map:
lib_inc_map[lb] = []
lib_inc_map[lb].append(inc.get_abspath())
break
for lb, lb_search_files in lib_inc_map.items():
self.depend_recursive(lb, lb_search_files)
if recursive:
lb.search_deps_recursive(search_files)
def build(self):
libs = []
for lb in self._depbuilders:
for lb in self.depbuilders:
libs.extend(lb.build())
# copy shared information to self env
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
@@ -433,9 +462,9 @@ class LibBuilderBase(object):
for lb in self._circular_deps:
self.env.PrependUnique(CPPPATH=lb.get_include_dirs())
if self._is_built:
if self.is_built:
return libs
self._is_built = True
self.is_built = True
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
@@ -446,12 +475,22 @@ class LibBuilderBase(object):
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
self.env.PrependUnique(**{key: lb.env.get(key)})
if self.lib_archive:
libs.append(
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
do_not_archive = not self.lib_archive
if not do_not_archive:
nodes = self.env.CollectBuildFiles(
self.build_dir, self.src_dir, self.src_filter
)
else:
if nodes:
libs.append(
self.env.BuildLibrary(
self.build_dir, self.src_dir, self.src_filter, nodes
)
)
else:
do_not_archive = True
if do_not_archive:
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
return libs
@@ -461,23 +500,32 @@ class UnknownLibBuilder(LibBuilderBase):
class ArduinoLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "library.properties")
if not isfile(manifest_path):
manifest_path = os.path.join(self.path, "library.properties")
if not os.path.isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
@property
def include_dir(self):
if not all(
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
):
return None
return os.path.join(self.path, "include")
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
if isdir(join(self.path, "src")):
if os.path.isdir(os.path.join(self.path, "src")):
return include_dirs
if isdir(join(self.path, "utility")):
include_dirs.append(join(self.path, "utility"))
if os.path.isdir(os.path.join(self.path, "utility")):
include_dirs.append(os.path.join(self.path, "utility"))
return include_dirs
@property
def src_filter(self):
src_dir = join(self.path, "src")
if isdir(src_dir):
src_dir = os.path.join(self.path, "src")
if os.path.isdir(src_dir):
# pylint: disable=no-member
src_filter = LibBuilderBase.src_filter.fget(self)
for root, _, files in os.walk(src_dir, followlinks=True):
found = False
@@ -488,50 +536,80 @@ class ArduinoLibBuilder(LibBuilderBase):
if not found:
continue
rel_path = root.replace(src_dir, "")
if rel_path.startswith(sep):
rel_path = rel_path[1:] + sep
if rel_path.startswith(os.path.sep):
rel_path = rel_path[1:] + os.path.sep
src_filter.append("-<%s*.[aA][sS][mM]>" % rel_path)
return src_filter
src_filter = []
is_utility = isdir(join(self.path, "utility"))
is_utility = os.path.isdir(os.path.join(self.path, "utility"))
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
# arduino ide ignores files with .asm or .ASM extensions
if ext.lower() == "asm":
continue
src_filter.append("+<*.%s>" % ext)
if is_utility:
src_filter.append("+<utility%s*.%s>" % (sep, ext))
src_filter.append("+<utility%s*.%s>" % (os.path.sep, ext))
return src_filter
@property
def dependencies(self):
# do not include automatically all libraries for build
# chain+ will decide later
return None
@property
def lib_ldf_mode(self):
# pylint: disable=no-member
if not self._manifest.get("dependencies"):
return LibBuilderBase.lib_ldf_mode.fget(self)
missing = object()
global_value = self.env.GetProjectConfig().getraw(
"env:" + self.env["PIOENV"], "lib_ldf_mode", missing
)
if global_value != missing:
return LibBuilderBase.lib_ldf_mode.fget(self)
# automatically enable C++ Preprocessing in runtime
# (Arduino IDE has this behavior)
return "chain+"
def is_frameworks_compatible(self, frameworks):
return util.items_in_list(frameworks, ["arduino", "energia"])
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms", [])
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return util.items_in_list(platforms, items)
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
@property
def build_flags(self):
ldflags = [
LibBuilderBase.build_flags.fget(self), # pylint: disable=no-member
self._manifest.get("ldflags"),
]
if self._manifest.get("precompiled") in ("true", "full"):
# add to LDPATH {build.mcu} folder
board_config = self.env.BoardConfig()
for key in ("build.mcu", "build.cpu"):
libpath = os.path.join(self.src_dir, board_config.get(key, ""))
if not os.path.isdir(libpath):
continue
self.env.PrependUnique(LIBPATH=libpath)
break
ldflags = [flag for flag in ldflags if flag] # remove empty
return " ".join(ldflags) if ldflags else None
class MbedLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "module.json")
if not isfile(manifest_path):
manifest_path = os.path.join(self.path, "module.json")
if not os.path.isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
@property
def include_dir(self):
if isdir(join(self.path, "include")):
return join(self.path, "include")
return None
@property
def src_dir(self):
if isdir(join(self.path, "source")):
return join(self.path, "source")
return LibBuilderBase.src_dir.fget(self)
if os.path.isdir(os.path.join(self.path, "source")):
return os.path.join(self.path, "source")
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
@@ -540,13 +618,13 @@ class MbedLibBuilder(LibBuilderBase):
# library with module.json
for p in self._manifest.get("extraIncludes", []):
include_dirs.append(join(self.path, p))
include_dirs.append(os.path.join(self.path, p))
# old mbed library without manifest, add to CPPPATH all folders
if not self._manifest:
for root, _, __ in os.walk(self.path):
part = root.replace(self.path, "").lower()
if any(s in part for s in ("%s." % sep, "test", "example")):
if any(s in part for s in ("%s." % os.path.sep, "test", "example")):
continue
if root not in include_dirs:
include_dirs.append(root)
@@ -558,11 +636,11 @@ class MbedLibBuilder(LibBuilderBase):
def process_extra_options(self):
self._process_mbed_lib_confs()
return super(MbedLibBuilder, self).process_extra_options()
return super().process_extra_options()
def _process_mbed_lib_confs(self):
mbed_lib_paths = [
join(root, "mbed_lib.json")
os.path.join(root, "mbed_lib.json")
for root, _, files in os.walk(self.path)
if "mbed_lib.json" in files
]
@@ -571,8 +649,8 @@ class MbedLibBuilder(LibBuilderBase):
mbed_config_path = None
for p in self.env.get("CPPPATH"):
mbed_config_path = join(self.env.subst(p), "mbed_config.h")
if isfile(mbed_config_path):
mbed_config_path = os.path.join(self.env.subst(p), "mbed_config.h")
if os.path.isfile(mbed_config_path):
break
mbed_config_path = None
if not mbed_config_path:
@@ -639,7 +717,7 @@ class MbedLibBuilder(LibBuilderBase):
def _mbed_conf_append_macros(self, mbed_config_path, macros):
lines = []
with open(mbed_config_path) as fp:
with open(mbed_config_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if line == "#endif":
@@ -658,36 +736,37 @@ class MbedLibBuilder(LibBuilderBase):
if len(tokens) < 2 or tokens[1] not in macros:
lines.append(line)
lines.append("")
with open(mbed_config_path, "w") as fp:
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
fp.write("\n".join(lines))
class PlatformIOLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "library.json")
if not isfile(manifest_path):
manifest_path = os.path.join(self.path, "library.json")
if not os.path.isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
def _has_arduino_manifest(self):
return isfile(join(self.path, "library.properties"))
return os.path.isfile(os.path.join(self.path, "library.properties"))
@property
def include_dir(self):
if "includeDir" in self._manifest.get("build", {}):
with fs.cd(self.path):
return realpath(self._manifest.get("build").get("includeDir"))
return LibBuilderBase.include_dir.fget(self)
return os.path.abspath(self._manifest.get("build").get("includeDir"))
return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member
@property
def src_dir(self):
if "srcDir" in self._manifest.get("build", {}):
with fs.cd(self.path):
return realpath(self._manifest.get("build").get("srcDir"))
return LibBuilderBase.src_dir.fget(self)
return os.path.abspath(self._manifest.get("build").get("srcDir"))
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
@property
def src_filter(self):
# pylint: disable=no-member
if "srcFilter" in self._manifest.get("build", {}):
return self._manifest.get("build").get("srcFilter")
if self.env["SRC_FILTER"]:
@@ -700,32 +779,38 @@ class PlatformIOLibBuilder(LibBuilderBase):
def build_flags(self):
if "flags" in self._manifest.get("build", {}):
return self._manifest.get("build").get("flags")
return LibBuilderBase.build_flags.fget(self)
return LibBuilderBase.build_flags.fget(self) # pylint: disable=no-member
@property
def build_unflags(self):
if "unflags" in self._manifest.get("build", {}):
return self._manifest.get("build").get("unflags")
return LibBuilderBase.build_unflags.fget(self)
return LibBuilderBase.build_unflags.fget(self) # pylint: disable=no-member
@property
def extra_script(self):
if "extraScript" in self._manifest.get("build", {}):
return self._manifest.get("build").get("extraScript")
return LibBuilderBase.extra_script.fget(self)
return LibBuilderBase.extra_script.fget(self) # pylint: disable=no-member
@property
def lib_archive(self):
unique_value = "_not_declared_%s" % id(self)
global_value = self.env.GetProjectOption("lib_archive", unique_value)
if global_value != unique_value:
return global_value
missing = object()
global_value = self.env.GetProjectConfig().getraw(
"env:" + self.env["PIOENV"], "lib_archive", missing
)
if global_value != missing:
return self.env.GetProjectConfig().get(
"env:" + self.env["PIOENV"], "lib_archive"
)
# pylint: disable=no-member
return self._manifest.get("build", {}).get(
"libArchive", LibBuilderBase.lib_archive.fget(self)
)
@property
def lib_ldf_mode(self):
# pylint: disable=no-member
return self.validate_ldf_mode(
self._manifest.get("build", {}).get(
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self)
@@ -734,6 +819,7 @@ class PlatformIOLibBuilder(LibBuilderBase):
@property
def lib_compat_mode(self):
# pylint: disable=no-member
return self.validate_compat_mode(
self._manifest.get("build", {}).get(
"libCompatMode", LibBuilderBase.lib_compat_mode.fget(self)
@@ -741,16 +827,10 @@ class PlatformIOLibBuilder(LibBuilderBase):
)
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms")
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return util.items_in_list(platforms, items)
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
def is_frameworks_compatible(self, frameworks):
items = self._manifest.get("frameworks")
if not items:
return LibBuilderBase.is_frameworks_compatible(self, frameworks)
return util.items_in_list(frameworks, items)
return util.items_in_list(frameworks, self._manifest.get("frameworks") or ["*"])
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
@@ -759,10 +839,10 @@ class PlatformIOLibBuilder(LibBuilderBase):
if (
"build" not in self._manifest
and self._has_arduino_manifest()
and not isdir(join(self.path, "src"))
and isdir(join(self.path, "utility"))
and not os.path.isdir(os.path.join(self.path, "src"))
and os.path.isdir(os.path.join(self.path, "utility"))
):
include_dirs.append(join(self.path, "utility"))
include_dirs.append(os.path.join(self.path, "utility"))
for path in self.env.get("CPPPATH", []):
if path not in self.envorigin.get("CPPPATH", []):
@@ -775,13 +855,13 @@ class ProjectAsLibBuilder(LibBuilderBase):
def __init__(self, env, *args, **kwargs):
# backup original value, will be reset in base.__init__
project_src_filter = env.get("SRC_FILTER")
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
super().__init__(env, *args, **kwargs)
self.env["SRC_FILTER"] = project_src_filter
@property
def include_dir(self):
include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
return include_dir if isdir(include_dir) else None
return include_dir if os.path.isdir(include_dir) else None
@property
def src_dir(self):
@@ -790,7 +870,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
def get_include_dirs(self):
include_dirs = []
project_include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
if isdir(project_include_dir):
if os.path.isdir(project_include_dir):
include_dirs.append(project_include_dir)
for include_dir in LibBuilderBase.get_include_dirs(self):
if include_dir not in include_dirs:
@@ -801,10 +881,10 @@ class ProjectAsLibBuilder(LibBuilderBase):
# project files
items = LibBuilderBase.get_search_files(self)
# test files
if "__test" in COMMAND_LINE_TARGETS:
if "test" in self.env.GetBuildType():
items.extend(
[
join("$PROJECT_TEST_DIR", item)
os.path.join("$PROJECT_TEST_DIR", item)
for item in self.env.MatchSourceFiles(
"$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
)
@@ -814,7 +894,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def lib_ldf_mode(self):
mode = LibBuilderBase.lib_ldf_mode.fget(self)
mode = LibBuilderBase.lib_ldf_mode.fget(self) # pylint: disable=no-member
if not mode.startswith("chain"):
return mode
# parse all project files
@@ -822,45 +902,54 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def src_filter(self):
# pylint: disable=no-member
return self.env.get("SRC_FILTER") or LibBuilderBase.src_filter.fget(self)
@property
def build_flags(self):
# pylint: disable=no-member
return self.env.get("SRC_BUILD_FLAGS") or LibBuilderBase.build_flags.fget(self)
@property
def dependencies(self):
return self.env.GetProjectOption("lib_deps", [])
def process_extra_options(self):
# skip for project, options are already processed
pass
with fs.cd(self.path):
self.env.ProcessFlags(self.build_flags)
self.env.ProcessUnFlags(self.build_unflags)
def install_dependencies(self):
def _is_builtin(uri):
def _is_builtin(spec):
for lb in self.env.GetLibBuilders():
if lb.name == uri:
if lb.name == spec:
return True
return False
not_found_uri = []
for uri in self.dependencies:
not_found_specs = []
for spec in self.dependencies:
# check if built-in library
if _is_builtin(uri):
if _is_builtin(spec):
continue
found = False
for storage_dir in self.env.GetLibSourceDirs():
lm = LibraryManager(storage_dir)
if lm.get_package_dir(*lm.parse_pkg_uri(uri)):
lm = LibraryPackageManager(storage_dir)
if lm.get_package(spec):
found = True
break
if not found:
not_found_uri.append(uri)
not_found_specs.append(spec)
did_install = False
lm = LibraryManager(self.env.subst(join("$PROJECT_LIBDEPS_DIR", "$PIOENV")))
for uri in not_found_uri:
lm = LibraryPackageManager(
self.env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
)
for spec in not_found_specs:
try:
lm.install(uri)
lm.install(spec)
did_install = True
except (exception.LibNotFound, exception.InternetIsOffline) as e:
except (HTTPClientError, UnknownPackageError, InternetIsOffline) as e:
click.secho("Warning! %s" % e, fg="yellow")
# reset cache
@@ -868,20 +957,22 @@ class ProjectAsLibBuilder(LibBuilderBase):
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=None)
def process_dependencies(self): # pylint: disable=too-many-branches
for uri in self.dependencies:
found_lbs = []
for spec in self.dependencies:
found = False
for storage_dir in self.env.GetLibSourceDirs():
if found:
break
lm = LibraryManager(storage_dir)
lib_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
if not lib_dir:
lm = LibraryPackageManager(storage_dir)
pkg = lm.get_package(spec)
if not pkg:
continue
for lb in self.env.GetLibBuilders():
if lib_dir != lb.path:
if pkg.path != lb.path:
continue
if lb not in self.depbuilders:
self.depend_recursive(lb)
self.depend_on(lb, recursive=False)
found_lbs.append(lb)
found = True
break
if found:
@@ -890,15 +981,19 @@ class ProjectAsLibBuilder(LibBuilderBase):
# look for built-in libraries by a name
# which don't have package manifest
for lb in self.env.GetLibBuilders():
if lb.name != uri:
if lb.name != spec:
continue
if lb not in self.depbuilders:
self.depend_recursive(lb)
self.depend_on(lb)
found = True
break
# process library dependencies
for lb in found_lbs:
lb.search_deps_recursive()
def build(self):
self._is_built = True # do not build Project now
self.is_built = True # do not build Project now
result = LibBuilderBase.build(self)
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
return result
@@ -936,7 +1031,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
if DefaultEnvironment().get("__PIO_LIB_BUILDERS", None) is not None:
return sorted(
DefaultEnvironment()["__PIO_LIB_BUILDERS"],
key=lambda lb: 0 if lb.dependent else 1,
key=lambda lb: 0 if lb.is_dependent else 1,
)
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=[])
@@ -945,12 +1040,16 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
found_incompat = False
for storage_dir in env.GetLibSourceDirs():
storage_dir = realpath(storage_dir)
if not isdir(storage_dir):
storage_dir = os.path.abspath(storage_dir)
if not os.path.isdir(storage_dir):
continue
for item in sorted(os.listdir(storage_dir)):
lib_dir = join(storage_dir, item)
if item == "__cores__" or not isdir(lib_dir):
lib_dir = os.path.join(storage_dir, item)
if item == "__cores__":
continue
if LibraryPackageManager.is_symlink(lib_dir):
lib_dir, _ = LibraryPackageManager.resolve_symlink(lib_dir)
if not lib_dir or not os.path.isdir(lib_dir):
continue
try:
lb = LibBuilderFactory.new(env, lib_dir)
@@ -982,13 +1081,21 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
def ConfigureProjectLibBuilder(env):
def _get_vcs_info(lb):
path = LibraryManager.get_src_manifest_path(lb.path)
return fs.load_json(path) if path else None
_pm_storage = {}
def _get_lib_license(pkg):
storage_dir = os.path.dirname(os.path.dirname(pkg.path))
if storage_dir not in _pm_storage:
_pm_storage[storage_dir] = LibraryPackageManager(storage_dir)
try:
return (_pm_storage[storage_dir].load_manifest(pkg) or {}).get("license")
except MissingPackageManifestError:
pass
return None
def _correct_found_libs(lib_builders):
# build full dependency graph
found_lbs = [lb for lb in lib_builders if lb.dependent]
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
for lb in lib_builders:
if lb in found_lbs:
lb.search_deps_recursive(lb.get_search_files())
@@ -1000,27 +1107,29 @@ def ConfigureProjectLibBuilder(env):
def _print_deps_tree(root, level=0):
margin = "| " * (level)
for lb in root.depbuilders:
title = "<%s>" % lb.name
vcs_info = _get_vcs_info(lb)
if lb.version:
title += " %s" % lb.version
if vcs_info and vcs_info.get("version"):
title += " #%s" % vcs_info.get("version")
title = lb.name
pkg = PackageItem(lb.path)
if pkg.metadata:
title += " @ %s" % pkg.metadata.version
elif lb.version:
title += " @ %s" % lb.version
click.echo("%s|-- %s" % (margin, title), nl=False)
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
if vcs_info:
click.echo(" [%s]" % vcs_info.get("url"), nl=False)
click.echo(" (", nl=False)
click.echo(lb.path, nl=False)
click.echo(
" (License: %s, " % (_get_lib_license(pkg) or "Unknown"), nl=False
)
if pkg.metadata and pkg.metadata.spec.external:
click.echo("URI: %s, " % pkg.metadata.spec.uri, nl=False)
click.echo("Path: %s" % lb.path, nl=False)
click.echo(")", nl=False)
click.echo("")
if lb.depbuilders:
_print_deps_tree(lb, level + 1)
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project) # pylint: disable=no-member
click.echo("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
click.echo("LDF: Library Dependency Finder -> https://bit.ly/configure-pio-ldf")
click.echo(
"LDF Modes: Finder ~ %s, Compatibility ~ %s"
% (ldf_mode, project.lib_compat_mode)

View File

@@ -14,16 +14,30 @@
from __future__ import absolute_import
from hashlib import md5
from os import makedirs
from os.path import isdir, isfile, join
import hashlib
import os
import re
from platformio import fs
from platformio.compat import WINDOWS, hashlib_encode_data
from SCons.Platform import TempFileMunge # pylint: disable=import-error
from SCons.Subst import quote_spaces # pylint: disable=import-error
# Windows CLI has limit with command length to 8192
# Leave 2000 chars for flags and other options
MAX_LINE_LENGTH = 6000 if WINDOWS else 128072
from platformio.compat import IS_WINDOWS, hashlib_encode_data
# There are the next limits depending on a platform:
# - Windows = 8192
# - Unix = 131072
# We need ~512 characters for compiler and temporary file paths
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
def tempfile_arg_esc_func(arg):
arg = quote_spaces(arg)
if not IS_WINDOWS:
return arg
# GCC requires double Windows slashes, let's use UNIX separator
return WINPATHSEP_RE.sub(r"/\1", arg)
def long_sources_hook(env, sources):
@@ -42,32 +56,17 @@ def long_sources_hook(env, sources):
return '@"%s"' % _file_long_data(env, " ".join(data))
def long_incflags_hook(env, incflags):
_incflags = env.subst(incflags).replace("\\", "/")
if len(_incflags) < MAX_LINE_LENGTH:
return incflags
# fix space in paths
data = []
for line in _incflags.split(" -I"):
line = line.strip()
if not line.startswith("-I"):
line = "-I" + line
data.append('-I"%s"' % line[2:])
return '@"%s"' % _file_long_data(env, " ".join(data))
def _file_long_data(env, data):
build_dir = env.subst("$BUILD_DIR")
if not isdir(build_dir):
makedirs(build_dir)
tmp_file = join(
build_dir, "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest()
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
tmp_file = os.path.join(
build_dir, "longcmd-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
)
if isfile(tmp_file):
if os.path.isfile(tmp_file):
return tmp_file
fs.write_file_contents(tmp_file, data)
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write(data)
return tmp_file
@@ -76,17 +75,21 @@ def exists(_):
def generate(env):
env.Replace(_long_sources_hook=long_sources_hook)
env.Replace(_long_incflags_hook=long_incflags_hook)
coms = {}
for key in ("ARCOM", "LINKCOM"):
coms[key] = env.get(key, "").replace(
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
)
for key in ("_CCCOMCOM", "ASPPCOM"):
coms[key] = env.get(key, "").replace(
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}"
)
env.Replace(**coms)
kwargs = dict(
_long_sources_hook=long_sources_hook,
TEMPFILE=TempFileMunge,
MAXLINELENGTH=MAX_LINE_LENGTH,
TEMPFILEARGESCFUNC=tempfile_arg_esc_func,
TEMPFILESUFFIX=".tmp",
TEMPFILEDIR="$BUILD_DIR",
)
for name in ("LINKCOM", "ASCOM", "ASPPCOM", "CCCOM", "CXXCOM"):
kwargs[name] = "${TEMPFILE('%s','$%sSTR')}" % (env.get(name), name)
kwargs["ARCOM"] = env.get("ARCOM", "").replace(
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
)
env.Replace(**kwargs)
return env

View File

@@ -14,215 +14,19 @@
from __future__ import absolute_import
import atexit
import re
import os
import sys
from os import environ, remove, walk
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
from tempfile import mkstemp
from SCons.Action import Action # pylint: disable=import-error
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from platformio import fs, util
from platformio.compat import glob_escape
from platformio.managers.core import get_core_package_dir
from platformio.proc import exec_command
class InoToCPPConverter(object):
PROTOTYPE_RE = re.compile(
r"""^(
(?:template\<.*\>\s*)? # template
([a-z_\d\&]+\*?\s+){1,2} # return type
([a-z_\d]+\s*) # name of prototype
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
)\s*(\{|;) # must end with `{` or `;`
""",
re.X | re.M | re.I,
)
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
def __init__(self, env):
self.env = env
self._main_ino = None
def is_main_node(self, contents):
return self.DETECTMAIN_RE.search(contents)
def convert(self, nodes):
contents = self.merge(nodes)
if not contents:
return None
return self.process(contents)
def merge(self, nodes):
assert nodes
lines = []
for node in nodes:
contents = fs.get_file_contents(node.get_path())
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
if self.is_main_node(contents):
lines = _lines + lines
self._main_ino = node.get_path()
else:
lines.extend(_lines)
if not self._main_ino:
self._main_ino = nodes[0].get_path()
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
def process(self, contents):
out_file = self._main_ino + ".cpp"
assert self._gcc_preprocess(contents, out_file)
contents = fs.get_file_contents(out_file)
contents = self._join_multiline_strings(contents)
fs.write_file_contents(
out_file, self.append_prototypes(contents), errors="backslashreplace"
)
return out_file
def _gcc_preprocess(self, contents, out_file):
tmp_path = mkstemp()[1]
fs.write_file_contents(tmp_path, contents, errors="backslashreplace")
self.env.Execute(
self.env.VerboseAction(
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
out_file, tmp_path
),
"Converting " + basename(out_file[:-4]),
)
)
atexit.register(_delete_file, tmp_path)
return isfile(out_file)
def _join_multiline_strings(self, contents):
if "\\\n" not in contents:
return contents
newlines = []
linenum = 0
stropen = False
for line in contents.split("\n"):
_linenum = self._parse_preproc_line_num(line)
if _linenum is not None:
linenum = _linenum
else:
linenum += 1
if line.endswith("\\"):
if line.startswith('"'):
stropen = True
newlines.append(line[:-1])
continue
if stropen:
newlines[len(newlines) - 1] += line[:-1]
continue
elif stropen and line.endswith(('",', '";')):
newlines[len(newlines) - 1] += line
stropen = False
newlines.append(
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
)
continue
newlines.append(line)
return "\n".join(newlines)
@staticmethod
def _parse_preproc_line_num(line):
if not line.startswith("#"):
return None
tokens = line.split(" ", 3)
if len(tokens) > 2 and tokens[1].isdigit():
return int(tokens[1])
return None
def _parse_prototypes(self, contents):
prototypes = []
reserved_keywords = set(["if", "else", "while"])
for match in self.PROTOTYPE_RE.finditer(contents):
if (
set([match.group(2).strip(), match.group(3).strip()])
& reserved_keywords
):
continue
prototypes.append(match)
return prototypes
def _get_total_lines(self, contents):
total = 0
if contents.endswith("\n"):
contents = contents[:-1]
for line in contents.split("\n")[::-1]:
linenum = self._parse_preproc_line_num(line)
if linenum is not None:
return total + linenum
total += 1
return total
def append_prototypes(self, contents):
prototypes = self._parse_prototypes(contents) or []
# skip already declared prototypes
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
if not prototypes:
return contents
prototype_names = set(m.group(3).strip() for m in prototypes)
split_pos = prototypes[0].start()
match_ptrs = re.search(
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
contents[:split_pos],
re.M,
)
if match_ptrs:
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
result = []
result.append(contents[:split_pos].strip())
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
result.append(
'#line %d "%s"'
% (
self._get_total_lines(contents[:split_pos]),
self._main_ino.replace("\\", "/"),
)
)
result.append(contents[split_pos:].strip())
return "\n".join(result)
def ConvertInoToCpp(env):
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
ino_nodes = env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde"))
if not ino_nodes:
return
c = InoToCPPConverter(env)
out_file = c.convert(ino_nodes)
atexit.register(_delete_file, out_file)
def _delete_file(path):
try:
if isfile(path):
remove(path)
except: # pylint: disable=bare-except
pass
@util.memoized()
def _get_compiler_type(env):
def GetCompilerType(env):
if env.subst("$CC").endswith("-gcc"):
return "gcc"
try:
sysenv = environ.copy()
sysenv = os.environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
except OSError:
@@ -237,15 +41,11 @@ def _get_compiler_type(env):
return None
def GetCompilerType(env):
return _get_compiler_type(env)
def GetActualLDScript(env):
def _lookup_in_ldpath(script):
for d in env.get("LIBPATH", []):
path = join(env.subst(d), script)
if isfile(path):
path = os.path.join(env.subst(d), script)
if os.path.isfile(path):
return path
return None
@@ -264,7 +64,7 @@ def GetActualLDScript(env):
else:
continue
script = env.subst(raw_script.replace('"', "").strip())
if isfile(script):
if os.path.isfile(script):
return script
path = _lookup_in_ldpath(script)
if path:
@@ -286,30 +86,7 @@ def GetActualLDScript(env):
env.Exit(1)
def VerboseAction(_, act, actstr):
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
return act
return Action(act, actstr)
def PioClean(env, clean_dir):
if not isdir(clean_dir):
print("Build environment is clean")
env.Exit(0)
clean_rel_path = relpath(clean_dir)
for root, _, files in walk(clean_dir):
for f in files:
dst = join(root, f)
remove(dst)
print(
"Removed %s" % (dst if clean_rel_path.startswith(".") else relpath(dst))
)
print("Done cleaning")
fs.rmtree(clean_dir)
env.Exit(0)
def ConfigureDebugFlags(env):
def ConfigureDebugTarget(env):
def _cleanup_debug_flags(scope):
if scope not in env:
return
@@ -324,7 +101,13 @@ def ConfigureDebugFlags(env):
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
_cleanup_debug_flags(scope)
debug_flags = env.ParseFlags(env.GetProjectOption("debug_build_flags"))
debug_flags = env.ParseFlags(
env.get("PIODEBUGFLAGS")
if env.get("PIODEBUGFLAGS")
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
else env.GetProjectOption("debug_build_flags")
)
env.MergeFlags(debug_flags)
optimization_flags = [
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
@@ -334,22 +117,6 @@ def ConfigureDebugFlags(env):
env.AppendUnique(ASFLAGS=optimization_flags, LINKFLAGS=optimization_flags)
def ConfigureTestTarget(env):
env.Append(
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")],
)
unitylib = env.BuildLibrary(
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity")
)
env.Prepend(LIBS=[unitylib])
src_filter = ["+<*.cpp>", "+<*.c>"]
if "PIOTEST_RUNNING_NAME" in env:
src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], sep))
env.Replace(PIOTEST_SRC_FILTER=src_filter)
def GetExtraScripts(env, scope):
items = []
for item in env.GetProjectOption("extra_scripts", []):
@@ -360,20 +127,17 @@ def GetExtraScripts(env, scope):
if not items:
return items
with fs.cd(env.subst("$PROJECT_DIR")):
return [realpath(item) for item in items]
return [os.path.abspath(env.subst(item)) for item in items]
def generate(env):
env.AddMethod(GetCompilerType)
env.AddMethod(GetActualLDScript)
env.AddMethod(ConfigureDebugTarget)
env.AddMethod(GetExtraScripts)
# bakward-compatibility with Zephyr build script
env.AddMethod(ConfigureDebugTarget, "ConfigureDebugFlags")
def exists(_):
return True
def generate(env):
env.AddMethod(ConvertInoToCpp)
env.AddMethod(GetCompilerType)
env.AddMethod(GetActualLDScript)
env.AddMethod(VerboseAction)
env.AddMethod(PioClean)
env.AddMethod(ConfigureDebugFlags)
env.AddMethod(ConfigureTestTarget)
env.AddMethod(GetExtraScripts)
return env

View File

@@ -14,31 +14,36 @@
from __future__ import absolute_import
import os
import sys
from os.path import isdir, isfile, join
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.compat import WINDOWS
from platformio.managers.platform import PlatformFactory
from platformio import fs, util
from platformio.compat import IS_MACOS, IS_WINDOWS
from platformio.package.meta import PackageItem
from platformio.package.version import get_original_version
from platformio.platform.exception import UnknownBoard
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectOptions
# pylint: disable=too-many-branches, too-many-locals
@util.memoized()
def PioPlatform(env):
variables = env.GetProjectOptions(as_dict=True)
if "framework" in variables:
# support PIO Core 3.0 dev/platforms
variables["pioframework"] = variables["framework"]
p = PlatformFactory.newPlatform(env["PLATFORM_MANIFEST"])
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
def _PioPlatform():
env = DefaultEnvironment()
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
p.configure_project_packages(env["PIOENV"], COMMAND_LINE_TARGETS)
return p
def PioPlatform(_):
return _PioPlatform()
def BoardConfig(env, board=None):
with fs.cd(env.subst("$PROJECT_DIR")):
try:
@@ -46,46 +51,51 @@ def BoardConfig(env, board=None):
board = board or env.get("BOARD")
assert board, "BoardConfig: Board is not defined"
return p.board_config(board)
except (AssertionError, exception.UnknownBoard) as e:
except (AssertionError, UnknownBoard) as e:
sys.stderr.write("Error: %s\n" % str(e))
env.Exit(1)
return None
def GetFrameworkScript(env, framework):
p = env.PioPlatform()
assert p.frameworks and framework in p.frameworks
script_path = env.subst(p.frameworks[framework]["script"])
if not isfile(script_path):
script_path = join(p.get_dir(), script_path)
if not os.path.isfile(script_path):
script_path = os.path.join(p.get_dir(), script_path)
return script_path
def LoadPioPlatform(env):
p = env.PioPlatform()
installed_packages = p.get_installed_packages()
# Ensure real platform name
env["PIOPLATFORM"] = p.name
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
systype = util.get_systype()
for name in installed_packages:
type_ = p.get_package_type(name)
for pkg in p.get_installed_packages():
type_ = p.get_package_type(pkg.metadata.name)
if type_ not in ("toolchain", "uploader", "debugger"):
continue
pkg_dir = p.get_package_dir(name)
env.PrependENVPath(
"PATH", join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir
"PATH",
os.path.join(pkg.path, "bin")
if os.path.isdir(os.path.join(pkg.path, "bin"))
else pkg.path,
)
if not WINDOWS and isdir(join(pkg_dir, "lib")) and type_ != "toolchain":
if (
not IS_WINDOWS
and os.path.isdir(os.path.join(pkg.path, "lib"))
and type_ != "toolchain"
):
env.PrependENVPath(
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"),
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
os.path.join(pkg.path, "lib"),
)
# Platform specific LD Scripts
if isdir(join(p.get_dir(), "ldscripts")):
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
if os.path.isdir(os.path.join(p.get_dir(), "ldscripts")):
env.Prepend(LIBPATH=[os.path.join(p.get_dir(), "ldscripts")])
if "BOARD" not in env:
return
@@ -125,6 +135,7 @@ def LoadPioPlatform(env):
def PrintConfiguration(env): # pylint: disable=too-many-statements
platform = env.PioPlatform()
pkg_metadata = PackageItem(platform.get_dir()).metadata
board_config = env.BoardConfig() if "BOARD" in env else None
def _get_configuration_data():
@@ -139,14 +150,19 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
)
def _get_plaform_data():
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
src_manifest_path = platform.pm.get_src_manifest_path(platform.get_dir())
if src_manifest_path:
src_manifest = fs.load_json(src_manifest_path)
if "version" in src_manifest:
data.append("#" + src_manifest["version"])
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
data.append("(%s)" % src_manifest["url"])
data = [
"PLATFORM: %s (%s)"
% (
platform.title,
pkg_metadata.version if pkg_metadata else platform.version,
)
]
if (
int(ARGUMENTS.get("PIOVERBOSE", 0))
and pkg_metadata
and pkg_metadata.spec.external
):
data.append("(%s)" % pkg_metadata.spec.uri)
if board_config:
data.extend([">", board_config.get("name")])
return data
@@ -165,7 +181,8 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
ram = board_config.get("upload", {}).get("maximum_ram_size")
flash = board_config.get("upload", {}).get("maximum_size")
data.append(
"%s RAM, %s Flash" % (fs.format_filesize(ram), fs.format_filesize(flash))
"%s RAM, %s Flash"
% (fs.humanize_file_size(ram), fs.humanize_file_size(flash))
)
return data
@@ -196,20 +213,14 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
def _get_packages_data():
data = []
for name, options in platform.packages.items():
if options.get("optional"):
continue
pkg_dir = platform.get_package_dir(name)
if not pkg_dir:
continue
manifest = platform.pm.load_manifest(pkg_dir)
original_version = util.get_original_version(manifest["version"])
info = "%s %s" % (manifest["name"], manifest["version"])
for item in platform.dump_used_packages():
original_version = get_original_version(item["version"])
info = "%s @ %s" % (item["name"], item["version"])
extra = []
if original_version:
extra.append(original_version)
if "__src_url" in manifest and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(manifest["__src_url"])
if "src_url" in item and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(item["src_url"])
if extra:
info += " (%s)" % ", ".join(extra)
data.append(info)

View File

@@ -14,7 +14,8 @@
from __future__ import absolute_import
from platformio.project.config import ProjectConfig, ProjectOptions
from platformio.compat import MISSING
from platformio.project.config import ProjectConfig
def GetProjectConfig(env):
@@ -25,20 +26,22 @@ def GetProjectOptions(env, as_dict=False):
return env.GetProjectConfig().items(env=env["PIOENV"], as_dict=as_dict)
def GetProjectOption(env, option, default=None):
def GetProjectOption(env, option, default=MISSING):
return env.GetProjectConfig().get("env:" + env["PIOENV"], option, default)
def LoadProjectOptions(env):
for option, value in env.GetProjectOptions():
option_meta = ProjectOptions.get("env." + option)
config = env.GetProjectConfig()
section = "env:" + env["PIOENV"]
for option in config.options(section):
option_meta = config.find_option_meta(section, option)
if (
not option_meta
or not option_meta.buildenvvar
or option_meta.buildenvvar in env
):
continue
env[option_meta.buildenvvar] = value
env[option_meta.buildenvvar] = config.get(section, option)
def exists(_):

View File

@@ -16,6 +16,7 @@
from __future__ import absolute_import
import json
import sys
from os import environ, makedirs, remove
from os.path import isdir, join, splitdrive
@@ -23,9 +24,8 @@ from os.path import isdir, join, splitdrive
from elftools.elf.descriptions import describe_sh_flags
from elftools.elf.elffile import ELFFile
from platformio.compat import dump_json_to_unicode
from platformio.compat import IS_WINDOWS
from platformio.proc import exec_command
from platformio.util import get_systype
def _run_tool(cmd, env, tool_args):
@@ -37,7 +37,7 @@ def _run_tool(cmd, env, tool_args):
makedirs(build_dir)
tmp_file = join(build_dir, "size-data-longcmd.txt")
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write("\n".join(tool_args))
cmd.append("@" + tmp_file)
@@ -164,7 +164,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
location = symbol_locations.get(hex(symbol["addr"]))
if not location or "?" in location:
continue
if "windows" in get_systype():
if IS_WINDOWS:
drive, tail = splitdrive(location)
location = join(drive.upper(), tail)
symbol["file"] = location
@@ -220,7 +220,7 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
"sections": sections,
}
files = dict()
files = {}
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
file_path = symbol.get("file") or "unknown"
if not files.get(file_path, {}):
@@ -235,14 +235,16 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
files[file_path]["symbols"].append(symbol)
data["memory"]["files"] = list()
data["memory"]["files"] = []
for k, v in files.items():
file_data = {"path": k}
file_data.update(v)
data["memory"]["files"].append(file_data)
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
fp.write(dump_json_to_unicode(data))
with open(
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
) as fp:
fp.write(json.dumps(data))
def exists(_):

View File

@@ -0,0 +1,134 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from SCons.Action import Action # pylint: disable=import-error
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from SCons.Script import AlwaysBuild # pylint: disable=import-error
from platformio import compat, fs
def VerboseAction(_, act, actstr):
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
return act
return Action(act, actstr)
def PioClean(env, clean_all=False):
def _relpath(path):
if compat.IS_WINDOWS:
prefix = os.getcwd()[:2].lower()
if (
":" not in prefix
or not path.lower().startswith(prefix)
or os.path.relpath(path).startswith("..")
):
return path
return os.path.relpath(path)
def _clean_dir(path):
clean_rel_path = _relpath(path)
for root, _, files in os.walk(path):
for f in files:
dst = os.path.join(root, f)
os.remove(dst)
print(
"Removed %s"
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
)
build_dir = env.subst("$BUILD_DIR")
libdeps_dir = env.subst("$PROJECT_LIBDEPS_DIR")
if os.path.isdir(build_dir):
_clean_dir(build_dir)
fs.rmtree(build_dir)
else:
print("Build environment is clean")
if clean_all and os.path.isdir(libdeps_dir):
_clean_dir(libdeps_dir)
fs.rmtree(libdeps_dir)
print("Done cleaning")
def AddTarget( # pylint: disable=too-many-arguments
env,
name,
dependencies,
actions,
title=None,
description=None,
group="General",
always_build=True,
):
if "__PIO_TARGETS" not in env:
env["__PIO_TARGETS"] = {}
assert name not in env["__PIO_TARGETS"]
env["__PIO_TARGETS"][name] = dict(
name=name, title=title, description=description, group=group
)
target = env.Alias(name, dependencies, actions)
if always_build:
AlwaysBuild(target)
return target
def AddPlatformTarget(env, *args, **kwargs):
return env.AddTarget(group="Platform", *args, **kwargs)
def AddCustomTarget(env, *args, **kwargs):
return env.AddTarget(group="Custom", *args, **kwargs)
def DumpTargets(env):
targets = env.get("__PIO_TARGETS") or {}
# pre-fill default targets if embedded dev-platform
if env.PioPlatform().is_embedded() and not any(
t["group"] == "Platform" for t in targets.values()
):
targets["upload"] = dict(name="upload", group="Platform", title="Upload")
targets["compiledb"] = dict(
name="compiledb",
title="Compilation Database",
description="Generate compilation database `compile_commands.json`",
group="Advanced",
)
targets["clean"] = dict(name="clean", title="Clean", group="General")
targets["cleanall"] = dict(
name="cleanall",
title="Clean All",
group="General",
description="Clean a build environment and installed library dependencies",
)
return list(targets.values())
def exists(_):
return True
def generate(env):
env.AddMethod(VerboseAction)
env.AddMethod(PioClean)
env.AddMethod(AddTarget)
env.AddMethod(AddPlatformTarget)
env.AddMethod(AddCustomTarget)
env.AddMethod(DumpTargets)
return env

View File

@@ -0,0 +1,63 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from platformio.builder.tools import platformio as piotool
from platformio.test.result import TestSuite
from platformio.test.runners.factory import TestRunnerFactory
def ConfigureTestTarget(env):
env.Append(
CPPDEFINES=["UNIT_TEST", "PIO_UNIT_TESTING"],
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piotool.SRC_BUILD_EXT],
)
env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"])
if "PIOTEST_RUNNING_NAME" in env:
test_name = env["PIOTEST_RUNNING_NAME"]
while True:
test_name = os.path.dirname(test_name) # parent dir
# skip nested tests (user's side issue?)
if not test_name or os.path.basename(test_name).startswith("test_"):
break
env.Prepend(
PIOTEST_SRC_FILTER=[
f"+<{test_name}{os.path.sep}*.{ext}>"
for ext in piotool.SRC_BUILD_EXT
],
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)],
)
env.Prepend(
PIOTEST_SRC_FILTER=[f"+<$PIOTEST_RUNNING_NAME{os.path.sep}>"],
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", "$PIOTEST_RUNNING_NAME")],
)
test_runner = TestRunnerFactory.new(
TestSuite(env["PIOENV"], env.get("PIOTEST_RUNNING_NAME", "*")),
env.GetProjectConfig(),
)
test_runner.configure_build_env(env)
def generate(env):
env.AddMethod(ConfigureTestTarget)
def exists(_):
return True

View File

@@ -12,25 +12,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
from __future__ import absolute_import
import os
import re
import sys
from fnmatch import fnmatch
from os import environ
from os.path import isfile, join
from shutil import copyfile
from time import sleep
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from serial import Serial, SerialException
from platformio import exception, fs, util
from platformio.compat import WINDOWS
from platformio import exception, fs
from platformio.device.finder import find_mbed_disk, find_serial_port, is_pattern_port
from platformio.device.list import list_serial_ports
from platformio.proc import exec_command
# pylint: disable=unused-argument
def FlushSerialBuffer(env, port):
s = Serial(env.subst(port))
@@ -62,7 +61,7 @@ def WaitForNewSerialPort(env, before):
elapsed = 0
before = [p["port"] for p in before]
while elapsed < 5 and new_port is None:
now = [p["port"] for p in util.get_serial_ports()]
now = [p["port"] for p in list_serial_ports()]
for p in now:
if p not in before:
new_port = p
@@ -97,67 +96,28 @@ def WaitForNewSerialPort(env, before):
def AutodetectUploadPort(*args, **kwargs):
env = args[0]
def _get_pattern():
if "UPLOAD_PORT" not in env:
return None
if set(["*", "?", "[", "]"]) & set(env["UPLOAD_PORT"]):
return env["UPLOAD_PORT"]
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_mbed_disk():
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
for item in util.get_logical_devices():
if item["path"].startswith("/net") or not _is_match_pattern(item["path"]):
continue
mbed_pages = [join(item["path"], n) for n in ("mbed.htm", "mbed.html")]
if any(isfile(p) for p in mbed_pages):
return item["path"]
if item["name"] and any(l in item["name"].lower() for l in msdlabels):
return item["path"]
return None
def _look_for_serial_port():
port = None
board_hwids = []
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
if "BOARD" in env and "build.hwids" in env.BoardConfig():
board_hwids = env.BoardConfig().get("build.hwids")
for item in util.get_serial_ports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if upload_protocol.startswith("blackmagic"):
if WINDOWS and port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return port
if "UPLOAD_PORT" in env and not _get_pattern():
print(env.subst("Use manually specified: $UPLOAD_PORT"))
initial_port = env.subst("$UPLOAD_PORT")
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
if initial_port and not is_pattern_port(initial_port):
print(env.subst("Using manually specified: $UPLOAD_PORT"))
return
if env.subst("$UPLOAD_PROTOCOL") == "mbed" or (
"mbed" in env.subst("$PIOFRAMEWORK") and not env.subst("$UPLOAD_PROTOCOL")
if upload_protocol == "mbed" or (
"mbed" in env.subst("$PIOFRAMEWORK") and not upload_protocol
):
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
env.Replace(UPLOAD_PORT=find_mbed_disk(initial_port))
else:
try:
fs.ensure_udev_rules()
except exception.InvalidUdevRules as e:
sys.stderr.write("\n%s\n\n" % e)
env.Replace(UPLOAD_PORT=_look_for_serial_port())
env.Replace(
UPLOAD_PORT=find_serial_port(
initial_port=initial_port,
board_config=env.BoardConfig() if "BOARD" in env else None,
upload_protocol=upload_protocol,
)
)
if env.subst("$UPLOAD_PORT"):
print(env.subst("Auto-detected: $UPLOAD_PORT"))
@@ -175,10 +135,12 @@ def UploadToDisk(_, target, source, env):
assert "UPLOAD_PORT" in env
progname = env.subst("$PROGNAME")
for ext in ("bin", "hex"):
fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
if not isfile(fpath):
fpath = os.path.join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
if not os.path.isfile(fpath):
continue
copyfile(fpath, join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
copyfile(
fpath, os.path.join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext))
)
print(
"Firmware has been successfully uploaded.\n"
"(Some boards may require manual hard reset)"
@@ -211,7 +173,7 @@ def CheckUploadSize(_, target, source, env):
if not isinstance(cmd, list):
cmd = cmd.split()
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
sysenv = environ.copy()
sysenv = os.environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command(env.subst(cmd), env=sysenv)
if result["returncode"] != 0:
@@ -236,9 +198,9 @@ def CheckUploadSize(_, target, source, env):
def _format_availale_bytes(value, total):
percent_raw = float(value) / float(total)
blocks_per_progress = 10
used_blocks = int(round(blocks_per_progress * percent_raw))
if used_blocks > blocks_per_progress:
used_blocks = blocks_per_progress
used_blocks = min(
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
)
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
)

View File

@@ -26,9 +26,9 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from SCons.Script import Export # pylint: disable=import-error
from SCons.Script import SConscript # pylint: disable=import-error
from platformio import fs
from platformio.compat import string_types
from platformio.util import pioversion_to_intstr
from platformio import __version__, fs
from platformio.compat import IS_MACOS, string_types
from platformio.package.version import pepver_to_semver
SRC_HEADER_EXT = ["h", "hpp"]
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
@@ -47,14 +47,16 @@ def scons_patched_match_splitext(path, suffixes=None):
def GetBuildType(env):
return (
"debug"
if (
set(["debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
or env.GetProjectOption("build_type") == "debug"
)
else "release"
)
modes = []
if (
set(["__debug", "sizedata"]) # sizedata = for memory inspection
& set(COMMAND_LINE_TARGETS)
or env.GetProjectOption("build_type") == "debug"
):
modes.append("debug")
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
modes.append("test")
return "+".join(modes or ["release"])
def BuildProgram(env):
@@ -66,12 +68,17 @@ def BuildProgram(env):
env.Prepend(LINKFLAGS=["-T", env.subst("$LDSCRIPT_PATH")])
# enable "cyclic reference" for linker
if env.get("LIBS") and env.GetCompilerType() == "gcc":
if (
env.get("LIBS")
and env.GetCompilerType() == "gcc"
and (env.PioPlatform().is_embedded() or not IS_MACOS)
):
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
program = env.Program(
os.path.join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
os.path.join("$BUILD_DIR", env.subst("$PROGNAME$PROGSUFFIX")),
env["PIOBUILDFILES"],
)
env.Replace(PIOMAINPROG=program)
@@ -90,11 +97,16 @@ def BuildProgram(env):
def ProcessProgramDeps(env):
def _append_pio_macros():
core_version = pepver_to_semver(__version__)
env.AppendUnique(
CPPDEFINES=[
(
"PLATFORMIO",
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())),
int(
"{0:02d}{1:02d}{2:02d}".format(
core_version.major, core_version.minor, core_version.patch
)
),
)
]
)
@@ -103,10 +115,6 @@ def ProcessProgramDeps(env):
env.PrintConfiguration()
# fix ASM handling under non case-sensitive OS
if not Util.case_sensitive_suffixes(".s", ".S"):
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
# process extra flags from board
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
@@ -117,14 +125,20 @@ def ProcessProgramDeps(env):
# process framework scripts
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
if env.GetBuildType() == "debug":
env.ConfigureDebugFlags()
if "debug" in env.GetBuildType():
env.ConfigureDebugTarget()
if "test" in env.GetBuildType():
env.ConfigureTestTarget()
# remove specified flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
if "__test" in COMMAND_LINE_TARGETS:
env.ConfigureTestTarget()
if "compiledb" in COMMAND_LINE_TARGETS and env.get(
"COMPILATIONDB_INCLUDE_TOOLCHAIN"
):
for scope, includes in env.DumpIntegrationIncludes().items():
if scope in ("toolchain",):
env.Append(CPPPATH=includes)
def ProcessProjectDeps(env):
@@ -148,12 +162,11 @@ def ProcessProjectDeps(env):
# extra build flags from `platformio.ini`
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
is_test = "__test" in COMMAND_LINE_TARGETS
if is_test:
if "test" in env.GetBuildType():
projenv.BuildSources(
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
)
if not is_test or env.GetProjectOption("test_build_project_src"):
if "test" not in env.GetBuildType() or env.GetProjectOption("test_build_src"):
projenv.BuildSources(
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
)
@@ -197,12 +210,12 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
for k in ("CPPPATH", "LIBPATH"):
for i, p in enumerate(result.get(k, [])):
if os.path.isdir(p):
result[k][i] = os.path.realpath(p)
result[k][i] = os.path.abspath(p)
# fix relative path for "-include"
for i, f in enumerate(result.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include":
result["CCFLAGS"][i] = (f[0], env.File(os.path.realpath(f[1].get_path())))
result["CCFLAGS"][i] = (f[0], env.File(os.path.abspath(f[1].get_path())))
return result
@@ -282,18 +295,21 @@ def CollectBuildFiles(
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
for callback, pattern in env.get("__PIO_BUILD_MIDDLEWARES", []):
tmp = []
for node in sources:
if pattern and not fnmatch.fnmatch(node.get_path(), pattern):
tmp.append(node)
continue
n = callback(node)
if n:
tmp.append(n)
sources = tmp
middlewares = env.get("__PIO_BUILD_MIDDLEWARES")
if not middlewares:
return sources
return sources
new_sources = []
for node in sources:
new_node = node
for callback, pattern in middlewares:
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
continue
new_node = callback(new_node)
if new_node:
new_sources.append(new_node)
return new_sources
def AddBuildMiddleware(env, callback, pattern=None):
@@ -333,11 +349,10 @@ def BuildFrameworks(env, frameworks):
env.Exit(1)
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
return env.StaticLibrary(
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
)
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
return env.StaticLibrary(env.subst(variant_dir), nodes)
def BuildSources(env, variant_dir, src_dir, src_filter=None):

165
platformio/cache.py Normal file
View File

@@ -0,0 +1,165 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import hashlib
import os
from time import time
from platformio import app, fs
from platformio.compat import hashlib_encode_data
from platformio.package.lockfile import LockFile
from platformio.project.helpers import get_project_cache_dir
class ContentCache(object):
def __init__(self, namespace=None):
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
self._db_path = os.path.join(self.cache_dir, "db.data")
self._lockfile = None
if not os.path.isdir(self.cache_dir):
os.makedirs(self.cache_dir)
def __enter__(self):
# cleanup obsolete items
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
@staticmethod
def key_from_args(*args):
h = hashlib.sha1()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return os.path.join(self.cache_dir, key)
def get(self, key):
cache_path = self.get_cache_path(key)
if not os.path.isfile(cache_path):
return None
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not app.get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if os.path.isfile(cache_path):
self.delete(key)
if not data:
return False
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not os.path.isdir(os.path.dirname(cache_path)):
os.makedirs(os.path.dirname(cache_path))
try:
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, mode="a", encoding="utf8") as fp:
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
except UnicodeError:
if os.path.isfile(cache_path):
try:
os.remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
"""Keys=None, delete expired items"""
if not os.path.isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, fname = line.split("=")
path = os.path.join(self.cache_dir, fname)
try:
if (
time() < int(expire)
and os.path.isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
pass
found = True
if os.path.isfile(path):
try:
os.remove(path)
if not os.listdir(os.path.dirname(path)):
fs.rmtree(os.path.dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
with open(self._db_path, mode="w", encoding="utf8") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return True
def clean(self):
if not os.path.isdir(self.cache_dir):
return
fs.rmtree(self.cache_dir)
def _lock_dbindex(self):
self._lockfile = LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
#
# Helpers
#
def cleanup_content_cache(namespace=None):
with ContentCache(namespace) as cc:
cc.clean()

View File

@@ -0,0 +1,356 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
from platformio import __accounts_api__, app
from platformio.clients.http import HTTPClient, HTTPClientError
from platformio.exception import PlatformioException
class AccountError(PlatformioException):
MESSAGE = "{0}"
class AccountNotAuthorized(AccountError):
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
class AccountAlreadyAuthorized(AccountError):
MESSAGE = "You are already authorized with {0} account."
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
def __init__(self):
super().__init__(__accounts_api__)
@staticmethod
def get_refresh_token():
try:
return app.get_state_item("account").get("auth").get("refresh_token")
except: # pylint:disable=bare-except
raise AccountNotAuthorized()
@staticmethod
def delete_local_session():
app.delete_state_item("account")
@staticmethod
def delete_local_state(key):
account = app.get_state_item("account")
if not account or key not in account:
return
del account[key]
app.set_state_item("account", account)
def fetch_json_data(self, *args, **kwargs):
try:
return super().fetch_json_data(*args, **kwargs)
except HTTPClientError as exc:
raise AccountError(exc) from exc
def fetch_authentication_token(self):
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
auth = app.get_state_item("account", {}).get("auth", {})
if auth.get("access_token") and auth.get("access_token_expire"):
if auth.get("access_token_expire") > time.time():
return auth.get("access_token")
if auth.get("refresh_token"):
try:
data = self.fetch_json_data(
"post",
"/v1/login",
headers={
"Authorization": "Bearer %s" % auth.get("refresh_token")
},
)
app.set_state_item("account", data)
return data.get("auth").get("access_token")
except AccountError:
self.delete_local_session()
raise AccountNotAuthorized()
def login(self, username, password):
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise AccountAlreadyAuthorized(
app.get_state_item("account", {}).get("email", "")
)
data = self.fetch_json_data(
"post",
"/v1/login",
data={"username": username, "password": password},
)
app.set_state_item("account", data)
return data
def login_with_code(self, client_id, code, redirect_uri):
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise AccountAlreadyAuthorized(
app.get_state_item("account", {}).get("email", "")
)
result = self.fetch_json_data(
"post",
"/v1/login/code",
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
)
app.set_state_item("account", result)
return result
def logout(self):
refresh_token = self.get_refresh_token()
self.delete_local_session()
try:
self.fetch_json_data(
"post",
"/v1/logout",
data={"refresh_token": refresh_token},
)
except AccountError:
pass
return True
def change_password(self, old_password, new_password):
return self.fetch_json_data(
"post",
"/v1/password",
data={"old_password": old_password, "new_password": new_password},
x_with_authorization=True,
)
def registration(
self, username, email, password, firstname, lastname
): # pylint:disable=too-many-arguments
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise AccountAlreadyAuthorized(
app.get_state_item("account", {}).get("email", "")
)
return self.fetch_json_data(
"post",
"/v1/registration",
data={
"username": username,
"email": email,
"password": password,
"firstname": firstname,
"lastname": lastname,
},
)
def auth_token(self, password, regenerate):
return self.fetch_json_data(
"post",
"/v1/token",
data={"password": password, "regenerate": 1 if regenerate else 0},
x_with_authorization=True,
).get("auth_token")
def forgot_password(self, username):
return self.fetch_json_data(
"post",
"/v1/forgot",
data={"username": username},
)
def get_profile(self):
return self.fetch_json_data(
"get",
"/v1/profile",
x_with_authorization=True,
)
def update_profile(self, profile, current_password):
profile["current_password"] = current_password
self.delete_local_state("summary")
response = self.fetch_json_data(
"put",
"/v1/profile",
data=profile,
x_with_authorization=True,
)
return response
def get_account_info(self, offline=False):
account = app.get_state_item("account") or {}
if (
account.get("summary")
and account["summary"].get("expire_at", 0) > time.time()
):
return account["summary"]
if offline and account.get("email"):
return {
"profile": {
"email": account.get("email"),
"username": account.get("username"),
}
}
result = self.fetch_json_data(
"get",
"/v1/summary",
x_with_authorization=True,
)
account["summary"] = dict(
profile=result.get("profile"),
packages=result.get("packages"),
subscriptions=result.get("subscriptions"),
user_id=result.get("user_id"),
expire_at=int(time.time()) + self.SUMMARY_CACHE_TTL,
)
app.set_state_item("account", account)
return result
def get_logged_username(self):
return self.get_account_info(offline=True).get("profile").get("username")
def destroy_account(self):
return self.fetch_json_data(
"delete",
"/v1/account",
x_with_authorization=True,
)
def create_org(self, orgname, email, displayname):
return self.fetch_json_data(
"post",
"/v1/orgs",
data={"orgname": orgname, "email": email, "displayname": displayname},
x_with_authorization=True,
)
def get_org(self, orgname):
return self.fetch_json_data(
"get",
"/v1/orgs/%s" % orgname,
x_with_authorization=True,
)
def list_orgs(self):
return self.fetch_json_data(
"get",
"/v1/orgs",
x_with_authorization=True,
)
def update_org(self, orgname, data):
return self.fetch_json_data(
"put",
"/v1/orgs/%s" % orgname,
data={k: v for k, v in data.items() if v},
x_with_authorization=True,
)
def destroy_org(self, orgname):
return self.fetch_json_data(
"delete",
"/v1/orgs/%s" % orgname,
x_with_authorization=True,
)
def add_org_owner(self, orgname, username):
return self.fetch_json_data(
"post",
"/v1/orgs/%s/owners" % orgname,
data={"username": username},
x_with_authorization=True,
)
def list_org_owners(self, orgname):
return self.fetch_json_data(
"get",
"/v1/orgs/%s/owners" % orgname,
x_with_authorization=True,
)
def remove_org_owner(self, orgname, username):
return self.fetch_json_data(
"delete",
"/v1/orgs/%s/owners" % orgname,
data={"username": username},
x_with_authorization=True,
)
def create_team(self, orgname, teamname, description):
return self.fetch_json_data(
"post",
"/v1/orgs/%s/teams" % orgname,
data={"name": teamname, "description": description},
x_with_authorization=True,
)
def destroy_team(self, orgname, teamname):
return self.fetch_json_data(
"delete",
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
x_with_authorization=True,
)
def get_team(self, orgname, teamname):
return self.fetch_json_data(
"get",
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
x_with_authorization=True,
)
def list_teams(self, orgname):
return self.fetch_json_data(
"get",
"/v1/orgs/%s/teams" % orgname,
x_with_authorization=True,
)
def update_team(self, orgname, teamname, data):
return self.fetch_json_data(
"put",
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
data={k: v for k, v in data.items() if v},
x_with_authorization=True,
)
def add_team_member(self, orgname, teamname, username):
return self.fetch_json_data(
"post",
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
data={"username": username},
x_with_authorization=True,
)
def remove_team_member(self, orgname, teamname, username):
return self.fetch_json_data(
"delete",
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
data={"username": username},
x_with_authorization=True,
)

216
platformio/clients/http.py Normal file
View File

@@ -0,0 +1,216 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import math
import os
import socket
from urllib.parse import urljoin
import requests.adapters
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
from platformio import __check_internet_hosts__, __default_requests_timeout__, app, util
from platformio.cache import ContentCache, cleanup_content_cache
from platformio.exception import PlatformioException, UserSideException
class HTTPClientError(PlatformioException):
def __init__(self, message, response=None):
super().__init__()
self.message = message
self.response = response
def __str__(self): # pragma: no cover
return self.message
class InternetIsOffline(UserSideException):
MESSAGE = (
"You are not connected to the Internet.\n"
"PlatformIO needs the Internet connection to"
" download dependent packages or to work with PlatformIO Account."
)
class EndpointSession(requests.Session):
def __init__(self, base_url, *args, **kwargs):
super().__init__(*args, **kwargs)
self.base_url = base_url
def request( # pylint: disable=signature-differs,arguments-differ
self, method, url, *args, **kwargs
):
# print(self.base_url, method, url, args, kwargs)
return super().request(method, urljoin(self.base_url, url), *args, **kwargs)
class EndpointSessionIterator(object):
def __init__(self, endpoints):
if not isinstance(endpoints, list):
endpoints = [endpoints]
self.endpoints = endpoints
self.endpoints_iter = iter(endpoints)
self.retry = Retry(
total=math.ceil(6 / len(self.endpoints)),
backoff_factor=1,
# method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
status_forcelist=[413, 429, 500, 502, 503, 504],
)
def __iter__(self): # pylint: disable=non-iterator-returned
return self
def __next__(self):
base_url = next(self.endpoints_iter)
session = EndpointSession(base_url)
session.headers.update({"User-Agent": app.get_user_agent()})
adapter = requests.adapters.HTTPAdapter(max_retries=self.retry)
session.mount(base_url, adapter)
return session
class HTTPClient(object):
def __init__(self, endpoints):
self._session_iter = EndpointSessionIterator(endpoints)
self._session = None
self._next_session()
def __del__(self):
if not self._session:
return
try:
self._session.close()
except: # pylint: disable=bare-except
pass
self._session = None
def _next_session(self):
if self._session:
self._session.close()
self._session = next(self._session_iter)
@util.throttle(500)
def send_request(self, method, path, **kwargs):
# check Internet before and resolve issue with 60 seconds timeout
ensure_internet_on(raise_exception=True)
headers = kwargs.get("headers", {})
with_authorization = (
kwargs.pop("x_with_authorization")
if "x_with_authorization" in kwargs
else False
)
if with_authorization and "Authorization" not in headers:
# pylint: disable=import-outside-toplevel
from platformio.clients.account import AccountClient
headers["Authorization"] = (
"Bearer %s" % AccountClient().fetch_authentication_token()
)
kwargs["headers"] = headers
# set default timeout
if "timeout" not in kwargs:
kwargs["timeout"] = __default_requests_timeout__
while True:
try:
return getattr(self._session, method)(path, **kwargs)
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
) as e:
try:
self._next_session()
except: # pylint: disable=bare-except
raise HTTPClientError(str(e))
def fetch_json_data(self, method, path, **kwargs):
if method not in ("get", "head", "options"):
cleanup_content_cache("http")
cache_valid = kwargs.pop("x_cache_valid") if "x_cache_valid" in kwargs else None
if not cache_valid:
return self._parse_json_response(self.send_request(method, path, **kwargs))
cache_key = ContentCache.key_from_args(
method, path, kwargs.get("params"), kwargs.get("data")
)
with ContentCache("http") as cc:
result = cc.get(cache_key)
if result is not None:
return json.loads(result)
response = self.send_request(method, path, **kwargs)
data = self._parse_json_response(response)
cc.set(cache_key, response.text, cache_valid)
return data
@staticmethod
def _parse_json_response(response, expected_codes=(200, 201, 202)):
if response.status_code in expected_codes:
try:
return response.json()
except ValueError:
pass
try:
message = response.json()["message"]
except (KeyError, ValueError):
message = response.text
raise HTTPClientError(message, response)
#
# Helpers
#
@util.memoized(expire="10s")
def _internet_on():
timeout = 2
socket.setdefaulttimeout(timeout)
for host in __check_internet_hosts__:
try:
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
if not os.getenv(var) and not os.getenv(var.lower()):
continue
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
return True
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect
# to all possible addresses (IPv4 and IPv6) in turn until a connection succeeds:
s = socket.create_connection((host, 80))
s.close()
return True
except: # pylint: disable=bare-except
pass
return False
def ensure_internet_on(raise_exception=False):
result = _internet_on()
if raise_exception and not result:
raise InternetIsOffline()
return result
def fetch_remote_content(*args, **kwargs):
kwargs["headers"] = kwargs.get("headers", {})
if "User-Agent" not in kwargs["headers"]:
kwargs["headers"]["User-Agent"] = app.get_user_agent()
if "timeout" not in kwargs:
kwargs["timeout"] = __default_requests_timeout__
r = requests.get(*args, **kwargs)
r.raise_for_status()
return r.text

View File

@@ -0,0 +1,159 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio import __registry_mirror_hosts__, fs
from platformio.clients.account import AccountClient, AccountError
from platformio.clients.http import HTTPClient, HTTPClientError
# pylint: disable=too-many-arguments
class RegistryClient(HTTPClient):
def __init__(self):
endpoints = [f"https://api.{host}" for host in __registry_mirror_hosts__]
super().__init__(endpoints)
@staticmethod
def allowed_private_packages():
private_permissions = set(
[
"service.registry.publish-private-tool",
"service.registry.publish-private-platform",
"service.registry.publish-private-library",
]
)
try:
info = AccountClient().get_account_info() or {}
for item in info.get("packages", []):
if set(item.keys()) & private_permissions:
return True
except AccountError:
pass
return False
def publish_package( # pylint: disable=redefined-builtin
self, owner, type, archive_path, released_at=None, private=False, notify=True
):
with open(archive_path, "rb") as fp:
return self.fetch_json_data(
"post",
"/v3/packages/%s/%s" % (owner, type),
params={
"private": 1 if private else 0,
"notify": 1 if notify else 0,
"released_at": released_at,
},
headers={
"Content-Type": "application/octet-stream",
"X-PIO-Content-SHA256": fs.calculate_file_hashsum(
"sha256", archive_path
),
},
data=fp,
x_with_authorization=True,
)
def unpublish_package( # pylint: disable=redefined-builtin
self, owner, type, name, version=None, undo=False
):
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
if version:
path += "/" + version
return self.fetch_json_data(
"delete", path, params={"undo": 1 if undo else 0}, x_with_authorization=True
)
def update_resource(self, urn, private):
return self.fetch_json_data(
"put",
"/v3/resources/%s" % urn,
data={"private": int(private)},
x_with_authorization=True,
)
def grant_access_for_resource(self, urn, client, level):
return self.fetch_json_data(
"put",
"/v3/resources/%s/access" % urn,
data={"client": client, "level": level},
x_with_authorization=True,
)
def revoke_access_from_resource(self, urn, client):
return self.fetch_json_data(
"delete",
"/v3/resources/%s/access" % urn,
data={"client": client},
x_with_authorization=True,
)
def list_resources(self, owner):
return self.fetch_json_data(
"get",
"/v3/resources",
params={"owner": owner} if owner else None,
x_cache_valid="1h",
x_with_authorization=True,
)
def list_packages(self, query=None, qualifiers=None, page=None, sort=None):
search_query = []
if qualifiers:
valid_qualifiers = (
"authors",
"keywords",
"frameworks",
"platforms",
"headers",
"ids",
"names",
"owners",
"types",
)
assert set(qualifiers.keys()) <= set(valid_qualifiers)
for name, values in qualifiers.items():
for value in set(
values if isinstance(values, (list, tuple)) else [values]
):
search_query.append('%s:"%s"' % (name[:-1], value))
if query:
search_query.append(query)
params = dict(query=" ".join(search_query))
if page:
params["page"] = int(page)
if sort:
params["sort"] = sort
return self.fetch_json_data(
"get",
"/v3/search",
params=params,
x_cache_valid="1h",
x_with_authorization=self.allowed_private_packages(),
)
def get_package(self, type_, owner, name, version=None):
try:
return self.fetch_json_data(
"get",
"/v3/packages/{owner}/{type}/{name}".format(
type=type_, owner=owner.lower(), name=name.lower()
),
params=dict(version=version) if version else None,
x_cache_valid="1h",
x_with_authorization=self.allowed_private_packages(),
)
except HTTPClientError as e:
if e.response is not None and e.response.status_code == 404:
return None
raise e

View File

@@ -22,7 +22,7 @@ class PlatformioCLI(click.MultiCommand):
leftover_args = []
def __init__(self, *args, **kwargs):
super(PlatformioCLI, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._pio_cmds_dir = os.path.dirname(__file__)
@staticmethod
@@ -41,7 +41,7 @@ class PlatformioCLI(click.MultiCommand):
PlatformioCLI.leftover_args = ctx.args
if hasattr(ctx, "protected_args"):
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
return super(PlatformioCLI, self).invoke(ctx)
return super().invoke(ctx)
def list_commands(self, ctx):
cmds = []
@@ -74,7 +74,13 @@ class PlatformioCLI(click.MultiCommand):
def _handle_obsolate_command(name):
# pylint: disable=import-outside-toplevel
if name == "init":
from platformio.commands.project import project_init
from platformio.project.commands.init import project_init_cmd
return project_init_cmd
if name == "package":
from platformio.commands.pkg import cli
return cli
return project_init
raise AttributeError()

View File

@@ -0,0 +1,154 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import re
import click
from tabulate import tabulate
from platformio.clients.registry import RegistryClient
from platformio.commands.account import validate_username
from platformio.commands.team import validate_orgname_teamname
def validate_client(value):
if ":" in value:
validate_orgname_teamname(value)
else:
validate_username(value)
return value
@click.group("access", short_help="Manage resource access")
def cli():
pass
def validate_urn(value):
value = str(value).strip()
if not re.match(r"^prn:reg:pkg:(\d+):(\w+)$", value, flags=re.I):
raise click.BadParameter("Invalid URN format.")
return value
@cli.command("public", short_help="Make resource public")
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_public(urn, urn_type):
client = RegistryClient()
client.update_resource(urn=urn, private=0)
return click.secho(
"The resource %s has been successfully updated." % urn,
fg="green",
)
@cli.command("private", short_help="Make resource private")
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_private(urn, urn_type):
client = RegistryClient()
client.update_resource(urn=urn, private=1)
return click.secho(
"The resource %s has been successfully updated." % urn,
fg="green",
)
@cli.command("grant", short_help="Grant access")
@click.argument("level", type=click.Choice(["admin", "maintainer", "guest"]))
@click.argument(
"client",
metavar="[<ORGNAME:TEAMNAME>|<USERNAME>]",
callback=lambda _, __, value: validate_client(value),
)
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_grant(level, client, urn, urn_type):
reg_client = RegistryClient()
reg_client.grant_access_for_resource(urn=urn, client=client, level=level)
return click.secho(
"Access for resource %s has been granted for %s" % (urn, client),
fg="green",
)
@cli.command("revoke", short_help="Revoke access")
@click.argument(
"client",
metavar="[ORGNAME:TEAMNAME|USERNAME]",
callback=lambda _, __, value: validate_client(value),
)
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_revoke(client, urn, urn_type):
reg_client = RegistryClient()
reg_client.revoke_access_from_resource(urn=urn, client=client)
return click.secho(
"Access for resource %s has been revoked for %s" % (urn, client),
fg="green",
)
@cli.command("list", short_help="List published resources")
@click.argument("owner", required=False)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
@click.option("--json-output", is_flag=True)
def access_list(owner, urn_type, json_output):
reg_client = RegistryClient()
resources = reg_client.list_resources(owner=owner)
if json_output:
return click.echo(json.dumps(resources))
if not resources:
return click.secho("You do not have any resources.", fg="yellow")
for resource in resources:
click.echo()
click.secho(resource.get("name"), fg="cyan")
click.echo("-" * len(resource.get("name")))
table_data = []
table_data.append(("URN:", resource.get("urn")))
table_data.append(("Owner:", resource.get("owner")))
table_data.append(
(
"Access:",
click.style("Private", fg="red")
if resource.get("private", False)
else "Public",
)
)
table_data.append(
(
"Access level(s):",
", ".join(
(level.capitalize() for level in resource.get("access_levels"))
),
)
)
click.echo(tabulate(table_data, tablefmt="plain"))
return click.echo()

View File

@@ -14,59 +14,279 @@
# pylint: disable=unused-argument
import sys
import json
import re
import click
from tabulate import tabulate
from platformio.managers.core import pioplus_call
from platformio import util
from platformio.clients.account import AccountClient, AccountNotAuthorized
@click.group("account", short_help="Manage PIO Account")
@click.group("account", short_help="Manage PlatformIO account")
def cli():
pass
@cli.command("register", short_help="Create new PIO Account")
@click.option("-u", "--username")
def account_register(**kwargs):
pioplus_call(sys.argv[1:])
def validate_username(value, field="username"):
value = str(value).strip()
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I):
raise click.BadParameter(
"Invalid %s format. "
"%s must contain only alphanumeric characters "
"or single hyphens, cannot begin or end with a hyphen, "
"and must not be longer than 38 characters."
% (field.lower(), field.capitalize())
)
return value
@cli.command("login", short_help="Log in to PIO Account")
@click.option("-u", "--username")
@click.option("-p", "--password")
def account_login(**kwargs):
pioplus_call(sys.argv[1:])
def validate_email(value):
value = str(value).strip()
if not re.match(r"^[a-z\d_.+-]+@[a-z\d\-]+\.[a-z\d\-.]+$", value, flags=re.I):
raise click.BadParameter("Invalid email address")
return value
@cli.command("logout", short_help="Log out of PIO Account")
def validate_password(value):
value = str(value).strip()
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
raise click.BadParameter(
"Invalid password format. "
"Password must contain at least 8 characters"
" including a number and a lowercase letter"
)
return value
@cli.command("register", short_help="Create new PlatformIO Account")
@click.option(
"-u",
"--username",
prompt=True,
callback=lambda _, __, value: validate_username(value),
)
@click.option(
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
)
@click.option(
"-p",
"--password",
prompt=True,
hide_input=True,
confirmation_prompt=True,
callback=lambda _, __, value: validate_password(value),
)
@click.option("--firstname", prompt=True)
@click.option("--lastname", prompt=True)
def account_register(username, email, password, firstname, lastname):
client = AccountClient()
client.registration(username, email, password, firstname, lastname)
return click.secho(
"An account has been successfully created. "
"Please check your mail to activate your account and verify your email address.",
fg="green",
)
@cli.command("login", short_help="Log in to PlatformIO Account")
@click.option("-u", "--username", prompt="Username or email")
@click.option("-p", "--password", prompt=True, hide_input=True)
def account_login(username, password):
client = AccountClient()
client.login(username, password)
return click.secho("Successfully logged in!", fg="green")
@cli.command("logout", short_help="Log out of PlatformIO Account")
def account_logout():
pioplus_call(sys.argv[1:])
client = AccountClient()
client.logout()
return click.secho("Successfully logged out!", fg="green")
@cli.command("password", short_help="Change password")
@click.option("--old-password")
@click.option("--new-password")
def account_password(**kwargs):
pioplus_call(sys.argv[1:])
@click.option("--old-password", prompt=True, hide_input=True)
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
def account_password(old_password, new_password):
client = AccountClient()
client.change_password(old_password, new_password)
return click.secho("Password successfully changed!", fg="green")
@cli.command("token", short_help="Get or regenerate Authentication Token")
@click.option("-p", "--password")
@click.option("-p", "--password", prompt=True, hide_input=True)
@click.option("--regenerate", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_token(**kwargs):
pioplus_call(sys.argv[1:])
def account_token(password, regenerate, json_output):
client = AccountClient()
auth_token = client.auth_token(password, regenerate)
if json_output:
return click.echo(json.dumps({"status": "success", "result": auth_token}))
return click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
@cli.command("forgot", short_help="Forgot password")
@click.option("-u", "--username")
def account_forgot(**kwargs):
pioplus_call(sys.argv[1:])
@click.option("--username", prompt="Username or email")
def account_forgot(username):
client = AccountClient()
client.forgot_password(username)
return click.secho(
"If this account is registered, we will send the "
"further instructions to your email.",
fg="green",
)
@cli.command("show", short_help="PIO Account information")
@cli.command("update", short_help="Update profile information")
@click.option("--current-password", prompt=True, hide_input=True)
@click.option("--username")
@click.option("--email")
@click.option("--firstname")
@click.option("--lastname")
def account_update(current_password, **kwargs):
client = AccountClient()
profile = client.get_profile()
new_profile = profile.copy()
if not any(kwargs.values()):
for field in profile:
new_profile[field] = click.prompt(
field.replace("_", " ").capitalize(), default=profile[field]
)
if field == "email":
validate_email(new_profile[field])
if field == "username":
validate_username(new_profile[field])
else:
new_profile.update({key: value for key, value in kwargs.items() if value})
client.update_profile(new_profile, current_password)
click.secho("Profile successfully updated!", fg="green")
username_changed = new_profile["username"] != profile["username"]
email_changed = new_profile["email"] != profile["email"]
if not username_changed and not email_changed:
return None
try:
client.logout()
except AccountNotAuthorized:
pass
if email_changed:
return click.secho(
"Please check your mail to verify your new email address and re-login. ",
fg="yellow",
)
return click.secho("Please re-login.", fg="yellow")
@cli.command("destroy", short_help="Destroy account")
def account_destroy():
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s user account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% client.get_logged_username(),
abort=True,
)
client.destroy_account()
try:
client.logout()
except AccountNotAuthorized:
pass
return click.secho(
"User account has been destroyed.",
fg="green",
)
@cli.command("show", short_help="PlatformIO Account information")
@click.option("--offline", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_show(**kwargs):
pioplus_call(sys.argv[1:])
def account_show(offline, json_output):
client = AccountClient()
info = client.get_account_info(offline)
if json_output:
return click.echo(json.dumps(info))
click.echo()
if info.get("profile"):
print_profile(info["profile"])
if info.get("packages"):
print_packages(info["packages"])
if info.get("subscriptions"):
print_subscriptions(info["subscriptions"])
return click.echo()
def print_profile(profile):
click.secho("Profile", fg="cyan", bold=True)
click.echo("=" * len("Profile"))
data = []
if profile.get("username"):
data.append(("Username:", profile["username"]))
if profile.get("email"):
data.append(("Email:", profile["email"]))
if profile.get("firstname"):
data.append(("First name:", profile["firstname"]))
if profile.get("lastname"):
data.append(("Last name:", profile["lastname"]))
click.echo(tabulate(data, tablefmt="plain"))
def print_packages(packages):
click.echo()
click.secho("Packages", fg="cyan")
click.echo("=" * len("Packages"))
for package in packages:
click.echo()
click.secho(package.get("name"), bold=True)
click.echo("-" * len(package.get("name")))
if package.get("description"):
click.echo(package.get("description"))
data = []
expire = "-"
if "subscription" in package:
expire = util.parse_datetime(
package["subscription"].get("end_at")
or package["subscription"].get("next_bill_at")
).strftime("%Y-%m-%d")
data.append(("Expire:", expire))
services = []
for key in package:
if not key.startswith("service."):
continue
if isinstance(package[key], dict):
services.append(package[key].get("title"))
else:
services.append(package[key])
if services:
data.append(("Services:", ", ".join(services)))
click.echo(tabulate(data, tablefmt="plain"))
def print_subscriptions(subscriptions):
click.echo()
click.secho("Subscriptions", fg="cyan")
click.echo("=" * len("Subscriptions"))
for subscription in subscriptions:
click.echo()
click.secho(subscription.get("product_name"), bold=True)
click.echo("-" * len(subscription.get("product_name")))
data = [("State:", subscription.get("status"))]
begin_at = util.parse_datetime(subscription.get("begin_at")).strftime("%c")
data.append(("Start date:", begin_at or "-"))
end_at = subscription.get("end_at")
if end_at:
end_at = util.parse_datetime(subscription.get("end_at")).strftime("%c")
data.append(("End date:", end_at or "-"))
next_bill_at = subscription.get("next_bill_at")
if next_bill_at:
next_bill_at = util.parse_datetime(
subscription.get("next_bill_at")
).strftime("%c")
data.append(("Next payment:", next_bill_at or "-"))
data.append(
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
)
data.append(
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
)
click.echo(tabulate(data, tablefmt="plain"))

View File

@@ -13,16 +13,16 @@
# limitations under the License.
import json
import shutil
import click
from tabulate import tabulate
from platformio import fs
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformManager
from platformio.package.manager.platform import PlatformPackageManager
@click.command("boards", short_help="Embedded Board Explorer")
@click.command("boards", short_help="Board Explorer")
@click.argument("query", required=False)
@click.option("--installed", is_flag=True)
@click.option("--json-output", is_flag=True)
@@ -41,7 +41,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
grpboards[board["platform"]] = []
grpboards[board["platform"]].append(board)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
for (platform, boards) in sorted(grpboards.items()):
click.echo("")
click.echo("Platform: ", nl=False)
@@ -59,8 +59,8 @@ def print_boards(boards):
click.style(b["id"], fg="cyan"),
b["mcu"],
"%dMHz" % (b["fcpu"] / 1000000),
fs.format_filesize(b["rom"]),
fs.format_filesize(b["ram"]),
fs.humanize_file_size(b["rom"]),
fs.humanize_file_size(b["ram"]),
b["name"],
)
for b in boards
@@ -71,7 +71,7 @@ def print_boards(boards):
def _get_boards(installed=False):
pm = PlatformManager()
pm = PlatformPackageManager()
return pm.get_installed_boards() if installed else pm.get_all_boards()
@@ -83,4 +83,4 @@ def _print_boards_json(query, installed=False):
if query.lower() not in search_data.lower():
continue
result.append(board)
click.echo(dump_json_to_unicode(result))
click.echo(json.dumps(result))

View File

@@ -15,7 +15,9 @@
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
# pylint: disable=redefined-builtin,too-many-statements
import json
import os
import shutil
from collections import Counter
from os.path import dirname, isfile
from time import time
@@ -26,12 +28,11 @@ from tabulate import tabulate
from platformio import app, exception, fs, util
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools import CheckToolFactory
from platformio.compat import dump_json_to_unicode
from platformio.project.config import ProjectConfig
from platformio.project.helpers import find_project_dir_above, get_project_dir
@click.command("check", short_help="Run a static analysis tool on code")
@click.command("check", short_help="Static Code Analysis")
@click.option("-e", "--environment", multiple=True)
@click.option(
"-d",
@@ -61,6 +62,7 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
multiple=True,
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
)
@click.option("--skip-packages", is_flag=True)
def cli(
environment,
project_dir,
@@ -72,6 +74,7 @@ def cli(
verbose,
json_output,
fail_on_defect,
skip_packages,
):
app.set_session_var("custom_project_conf", project_conf)
@@ -103,8 +106,8 @@ def cli(
)
default_patterns = [
config.get_optional_dir("src"),
config.get_optional_dir("include"),
config.get("platformio", "src_dir"),
config.get("platformio", "include_dir"),
]
tool_options = dict(
verbose=verbose,
@@ -114,6 +117,8 @@ def cli(
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
if silent
else severity or config.get("env:" + envname, "check_severity"),
skip_packages=skip_packages or env_options.get("check_skip_packages"),
platform_packages=env_options.get("platform_packages"),
)
for tool in config.get("env:" + envname, "check_tool"):
@@ -160,9 +165,12 @@ def cli(
print_processing_footer(result)
if json_output:
click.echo(dump_json_to_unicode(results_to_json(results)))
click.echo(json.dumps(results_to_json(results)))
elif not silent:
print_check_summary(results)
print_check_summary(results, verbose=verbose)
# Reset custom project config
app.set_session_var("custom_project_conf", None)
command_failed = any(r.get("succeeded") is False for r in results)
if command_failed:
@@ -190,7 +198,7 @@ def print_processing_header(tool, envname, envdump):
"Checking %s > %s (%s)"
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@@ -211,7 +219,7 @@ def print_processing_footer(result):
def collect_component_stats(result):
components = dict()
components = {}
def _append_defect(component, defect):
if not components.get(component):
@@ -222,7 +230,7 @@ def collect_component_stats(result):
component = dirname(defect.file) or defect.file
_append_defect(component, defect)
if component.startswith(get_project_dir()):
if component.lower().startswith(get_project_dir().lower()):
while os.sep in component:
component = dirname(component)
_append_defect(component, defect)
@@ -246,7 +254,7 @@ def print_defects_stats(results):
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
severity_labels.reverse()
tabular_data = list()
tabular_data = []
for k, v in component_stats.items():
tool_defect = [v.get(s, 0) for s in severity_labels]
tabular_data.append([k] + tool_defect)
@@ -263,7 +271,7 @@ def print_defects_stats(results):
click.echo()
def print_check_summary(results):
def print_check_summary(results, verbose=False):
click.echo()
tabular_data = []
@@ -280,6 +288,8 @@ def print_check_summary(results):
status_str = click.style("FAILED", fg="red")
elif result.get("succeeded") is None:
status_str = "IGNORED"
if not verbose:
continue
else:
succeeded_nums += 1
status_str = click.style("PASSED", fg="green")

View File

@@ -34,7 +34,7 @@ class DefectItem(object):
severity,
category,
message,
file="unknown",
file=None,
line=0,
column=0,
id=None,
@@ -50,8 +50,8 @@ class DefectItem(object):
self.callstack = callstack
self.cwe = cwe
self.id = id
self.file = file
if file.startswith(get_project_dir()):
self.file = file or "unknown"
if file.lower().startswith(get_project_dir().lower()):
self.file = os.path.relpath(file, get_project_dir())
def __repr__(self):
@@ -86,7 +86,7 @@ class DefectItem(object):
"severity": self.SEVERITY_LABELS[self.severity],
"category": self.category,
"message": self.message,
"file": os.path.realpath(self.file),
"file": os.path.abspath(self.file),
"line": self.line,
"column": self.column,
"callstack": self.callstack,

View File

@@ -14,12 +14,15 @@
import glob
import os
import tempfile
import click
from platformio import fs, proc
from platformio.commands.check.defect import DefectItem
from platformio.project.helpers import get_project_dir, load_project_ide_data
from platformio.package.manager.core import get_core_package_dir
from platformio.package.meta import PackageSpec
from platformio.project.helpers import load_build_metadata
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
@@ -32,12 +35,13 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
self.cpp_includes = []
self.cpp_defines = []
self.toolchain_defines = []
self._tmp_files = []
self.cc_path = None
self.cxx_path = None
self._defects = []
self._on_defect_callback = None
self._bad_input = False
self._load_cpp_data(project_dir, envname)
self._load_cpp_data(project_dir)
# detect all defects by default
if not self.options.get("severity"):
@@ -52,17 +56,24 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
for s in self.options["severity"]
]
def _load_cpp_data(self, project_dir, envname):
data = load_project_ide_data(project_dir, envname)
def _load_cpp_data(self, project_dir):
data = load_build_metadata(project_dir, self.envname)
if not data:
return
self.cc_flags = data.get("cc_flags", "").split(" ")
self.cxx_flags = data.get("cxx_flags", "").split(" ")
self.cpp_includes = data.get("includes", [])
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
self.cpp_includes = self._dump_includes(data.get("includes", {}))
self.cpp_defines = data.get("defines", [])
self.cc_path = data.get("cc_path")
self.cxx_path = data.get("cxx_path")
self.toolchain_defines = self._get_toolchain_defines(self.cc_path)
self.toolchain_defines = self._get_toolchain_defines()
def get_tool_dir(self, pkg_name):
for spec in self.options["platform_packages"] or []:
spec = PackageSpec(spec)
if spec.name == pkg_name:
return get_core_package_dir(pkg_name, spec=spec)
return get_core_package_dir(pkg_name)
def get_flags(self, tool):
result = []
@@ -75,21 +86,54 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return result
def _get_toolchain_defines(self):
def _extract_defines(language, includes_file):
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
defines = []
cmd = "echo | %s -x %s %s %s -dM -E -" % (
self.cc_path,
language,
" ".join(
[f for f in build_flags if f.startswith(("-m", "-f", "-std"))]
),
includes_file,
)
result = proc.exec_command(cmd, shell=True)
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
if len(tokens) > 2:
defines.append("%s=%s" % (tokens[1], tokens[2]))
else:
defines.append(tokens[1])
return defines
incflags_file = self._long_includes_hook(self.cpp_includes)
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
def _create_tmp_file(self, data):
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
fp.write(data)
self._tmp_files.append(fp.name)
return fp.name
def _long_includes_hook(self, includes):
data = []
for inc in includes:
data.append('-I"%s"' % fs.to_unix_path(inc))
return '@"%s"' % self._create_tmp_file(" ".join(data))
@staticmethod
def _get_toolchain_defines(cc_path):
defines = []
result = proc.exec_command("echo | %s -dM -E -x c++ -" % cc_path, shell=True)
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
if len(tokens) > 2:
defines.append("%s=%s" % (tokens[1], tokens[2]))
else:
defines.append(tokens[1])
return defines
def _dump_includes(includes_map):
result = []
for includes in includes_map.values():
for include in includes:
if include not in result:
result.append(include)
return result
@staticmethod
def is_flag_set(flag, flags):
@@ -129,19 +173,51 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return raw_line
def clean_up(self):
pass
for f in self._tmp_files:
if os.path.isfile(f):
os.remove(f)
def get_project_target_files(self):
allowed_extensions = (".h", ".hpp", ".c", ".cc", ".cpp", ".ino")
result = []
@staticmethod
def is_check_successful(cmd_result):
return cmd_result["returncode"] == 0
def execute_check_cmd(self, cmd):
result = proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
if not self.is_check_successful(result):
click.echo(
"\nError: Failed to execute check command! Exited with code %d."
% result["returncode"]
)
if self.options.get("verbose"):
click.echo(result["out"])
click.echo(result["err"])
self._bad_input = True
return result
@staticmethod
def get_project_target_files(patterns):
c_extension = (".c",)
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
header_extensions = (".h", ".hh", ".hpp", ".hxx")
result = {"c": [], "c++": [], "headers": []}
def _add_file(path):
if not path.endswith(allowed_extensions):
return
result.append(os.path.realpath(path))
if path.endswith(header_extensions):
result["headers"].append(os.path.abspath(path))
elif path.endswith(c_extension):
result["c"].append(os.path.abspath(path))
elif path.endswith(cpp_extensions):
result["c++"].append(os.path.abspath(path))
for pattern in self.options["patterns"]:
for item in glob.glob(pattern):
for pattern in patterns:
for item in glob.glob(pattern, recursive=True):
if not os.path.isdir(item):
_add_file(item)
for root, _, files in os.walk(item, followlinks=True):
@@ -150,27 +226,19 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return result
def get_source_language(self):
with fs.cd(get_project_dir()):
for _, __, files in os.walk(self.config.get_optional_dir("src")):
for name in files:
if "." not in name:
continue
if os.path.splitext(name)[1].lower() in (".cpp", ".cxx", ".ino"):
return "c++"
return "c"
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
cmd = self.configure_command()
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if cmd:
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
self.execute_check_cmd(cmd)
else:
if self.options.get("verbose"):
click.echo("Error: Couldn't configure command")
self._bad_input = True
self.clean_up()

View File

@@ -17,11 +17,10 @@ from os.path import join
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
class ClangtidyCheckTool(CheckToolBase):
def tool_output_filter(self, line):
def tool_output_filter(self, line): # pylint: disable=arguments-differ
if not self.options.get("verbose") and "[clang-diagnostic-error]" in line:
return ""
@@ -34,7 +33,7 @@ class ClangtidyCheckTool(CheckToolBase):
return ""
def parse_defect(self, raw_line):
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line)
if not match:
return raw_line
@@ -49,19 +48,41 @@ class ClangtidyCheckTool(CheckToolBase):
return DefectItem(severity, category, message, file_, line, column, defect_id)
@staticmethod
def is_check_successful(cmd_result):
# Note: Clang-Tidy returns 1 for not critical compilation errors,
# so 0 and 1 are only acceptable values
return cmd_result["returncode"] < 2
def configure_command(self):
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")
tool_path = join(self.get_tool_dir("tool-clangtidy"), "clang-tidy")
cmd = [tool_path, "--quiet"]
flags = self.get_flags("clangtidy")
if not self.is_flag_set("--checks", flags):
if not (
self.is_flag_set("--checks", flags) or self.is_flag_set("--config", flags)
):
cmd.append("--checks=*")
cmd.extend(flags)
cmd.extend(self.get_project_target_files())
cmd.append("--")
project_files = self.get_project_target_files(self.options["patterns"])
cmd.extend(["-D%s" % d for d in self.cpp_defines + self.toolchain_defines])
cmd.extend(["-I%s" % inc for inc in self.cpp_includes])
src_files = []
for scope in project_files:
src_files.extend(project_files[scope])
cmd.extend(flags + src_files + ["--"])
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines["c++"]]
)
includes = []
for inc in self.cpp_includes:
if self.options.get("skip_packages") and inc.lower().startswith(
self.config.get("platformio", "packages_dir").lower()
):
continue
includes.append(inc)
cmd.extend(["-I%s" % inc for inc in includes])
return cmd

View File

@@ -12,18 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from os import remove
from os.path import isfile, join
from tempfile import NamedTemporaryFile
import os
import click
from platformio import proc
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
class CppcheckCheckTool(CheckToolBase):
def __init__(self, *args, **kwargs):
self._tmp_files = []
super().__init__(*args, **kwargs)
self._field_delimiter = "<&PIO&>"
self._buffer = ""
self.defect_fields = [
"severity",
"message",
@@ -34,9 +36,8 @@ class CppcheckCheckTool(CheckToolBase):
"cwe",
"id",
]
super(CppcheckCheckTool, self).__init__(*args, **kwargs)
def tool_output_filter(self, line):
def tool_output_filter(self, line): # pylint: disable=arguments-differ
if (
not self.options.get("verbose")
and "--suppress=unmatchedSuppression:" in line
@@ -48,20 +49,23 @@ class CppcheckCheckTool(CheckToolBase):
for msg in (
"No C or C++ source files found",
"unrecognized command line option",
"there was an internal error",
)
):
self._bad_input = True
return line
def parse_defect(self, raw_line):
if "<&PIO&>" not in raw_line or any(
f not in raw_line for f in self.defect_fields
):
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
if self._field_delimiter not in raw_line:
return None
args = dict()
for field in raw_line.split("<&PIO&>"):
self._buffer += raw_line
if any(f not in self._buffer for f in self.defect_fields):
return None
args = {}
for field in self._buffer.split(self._field_delimiter):
field = field.strip().replace('"', "")
name, value = field.split("=", 1)
args[name] = value
@@ -74,20 +78,45 @@ class CppcheckCheckTool(CheckToolBase):
else:
args["severity"] = DefectItem.SEVERITY_LOW
# Skip defects found in third-party software, but keep in mind that such defects
# might break checking process so defects from project files are not reported
breaking_defect_ids = ("preprocessorErrorDirective", "syntaxError")
if (
args.get("file", "")
.lower()
.startswith(self.config.get("platformio", "packages_dir").lower())
):
if args["id"] in breaking_defect_ids:
if self.options.get("verbose"):
click.echo(
"Error: Found a breaking defect '%s' in %s:%s\n"
"Please note: check results might not be valid!\n"
"Try adding --skip-packages"
% (args.get("message"), args.get("file"), args.get("line"))
)
click.echo()
self._bad_input = True
self._buffer = ""
return None
self._buffer = ""
return DefectItem(**args)
def configure_command(self):
tool_path = join(get_core_package_dir("tool-cppcheck"), "cppcheck")
def configure_command(self, language, src_file): # pylint: disable=arguments-differ
tool_path = os.path.join(self.get_tool_dir("tool-cppcheck"), "cppcheck")
cmd = [
tool_path,
"--error-exitcode=1",
"--addon-python=%s" % proc.get_pythonexe_path(),
"--error-exitcode=3",
"--verbose" if self.options.get("verbose") else "--quiet",
]
cmd.append(
'--template="%s"'
% "<&PIO&>".join(["{0}={{{0}}}".format(f) for f in self.defect_fields])
% self._field_delimiter.join(
["{0}={{{0}}}".format(f) for f in self.defect_fields]
)
)
flags = self.get_flags("cppcheck")
@@ -108,51 +137,131 @@ class CppcheckCheckTool(CheckToolBase):
cmd.append("--enable=%s" % ",".join(enabled_checks))
if not self.is_flag_set("--language", flags):
if self.get_source_language() == "c++":
cmd.append("--language=c++")
cmd.append("--language=" + language)
if not self.is_flag_set("--std", flags):
for f in self.cxx_flags + self.cc_flags:
if "-std" in f:
# Standards with GNU extensions are not allowed
cmd.append("-" + f.replace("gnu", "c"))
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
if not self.is_flag_set("--std", flags):
# Try to guess the standard version from the build flags
for flag in build_flags:
if "-std" in flag:
cmd.append("-" + self.convert_language_standard(flag))
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
)
cmd.extend(["-D%s" % d for d in self.cpp_defines + self.toolchain_defines])
cmd.extend(flags)
cmd.append("--file-list=%s" % self._generate_src_file())
cmd.extend(
"--include=" + inc
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
)
cmd.append("--includes-file=%s" % self._generate_inc_file())
core_dir = self.config.get_optional_dir("packages")
cmd.append("--suppress=*:%s*" % core_dir)
cmd.append("--suppress=unmatchedSuppression:%s*" % core_dir)
cmd.append('"%s"' % src_file)
return cmd
def _create_tmp_file(self, data):
with NamedTemporaryFile("w", delete=False) as fp:
fp.write(data)
self._tmp_files.append(fp.name)
return fp.name
@staticmethod
def get_forced_includes(build_flags, includes):
def _extract_filepath(flag, include_options, build_flags):
path = ""
for option in include_options:
if not flag.startswith(option):
continue
if flag.split(option)[1].strip():
path = flag.split(option)[1].strip()
elif build_flags.index(flag) + 1 < len(build_flags):
path = build_flags[build_flags.index(flag) + 1]
return path
def _generate_src_file(self):
src_files = [
f for f in self.get_project_target_files() if not f.endswith((".h", ".hpp"))
]
def _search_include_dir(filepath, include_paths):
for inc_path in include_paths:
path = os.path.join(inc_path, filepath)
if os.path.isfile(path):
return path
return ""
result = []
include_options = ("-include", "-imacros")
for f in build_flags:
if f.startswith(include_options):
filepath = _extract_filepath(f, include_options, build_flags)
if not os.path.isabs(filepath):
filepath = _search_include_dir(filepath, includes)
if os.path.isfile(filepath):
result.append(filepath)
return result
def _generate_src_file(self, src_files):
return self._create_tmp_file("\n".join(src_files))
def _generate_inc_file(self):
return self._create_tmp_file("\n".join(self.cpp_includes))
result = []
for inc in self.cpp_includes:
if self.options.get("skip_packages") and inc.lower().startswith(
self.config.get("platformio", "packages_dir").lower()
):
continue
result.append(inc)
return self._create_tmp_file("\n".join(result))
def clean_up(self):
for f in self._tmp_files:
if isfile(f):
remove(f)
super().clean_up()
# delete temporary dump files generated by addons
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
return
for f in self.get_project_target_files():
dump_file = f + ".dump"
if isfile(dump_file):
remove(dump_file)
for files in self.get_project_target_files(self.options["patterns"]).values():
for f in files:
dump_file = f + ".dump"
if os.path.isfile(dump_file):
os.remove(dump_file)
@staticmethod
def is_check_successful(cmd_result):
# Cppcheck is configured to return '3' if a defect is found
return cmd_result["returncode"] in (0, 3)
@staticmethod
def convert_language_standard(flag):
cpp_standards_map = {
"0x": "11",
"1y": "14",
"1z": "17",
"2a": "20",
}
standard = flag[-2:]
# Note: GNU extensions are not supported and converted to regular standards
return flag.replace("gnu", "c").replace(
standard, cpp_standards_map.get(standard, standard)
)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
project_files = self.get_project_target_files(self.options["patterns"])
src_files_scope = ("c", "c++")
if not any(project_files[t] for t in src_files_scope):
click.echo("Error: Nothing to check.")
return True
for scope, files in project_files.items():
if scope not in src_files_scope:
continue
for src_file in files:
cmd = self.configure_command(scope, src_file)
if not cmd:
self._bad_input = True
continue
if self.options.get("verbose"):
click.echo(" ".join(cmd))
self.execute_check_cmd(cmd)
self.clean_up()
return self._bad_input

View File

@@ -19,39 +19,50 @@ from xml.etree.ElementTree import fromstring
import click
from platformio import proc, util
from platformio import proc
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
from platformio.compat import IS_WINDOWS
class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._tmp_dir = tempfile.mkdtemp(prefix="piocheck")
self._tmp_preprocessed_file = self._generate_tmp_file_path() + ".i"
self._tmp_output_file = self._generate_tmp_file_path() + ".pvs"
self._tmp_cfg_file = self._generate_tmp_file_path() + ".cfg"
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
self.tool_path = os.path.join(
get_core_package_dir("tool-pvs-studio"),
"x64" if "windows" in util.get_systype() else "bin",
self.get_tool_dir("tool-pvs-studio"),
"x64" if IS_WINDOWS else "bin",
"pvs-studio",
)
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
with open(self._tmp_cfg_file, "w") as fp:
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
fp.write(
"exclude-path = "
+ self.config.get_optional_dir("packages").replace("\\", "/")
+ self.config.get("platformio", "packages_dir").replace("\\", "/")
)
with open(self._tmp_cmd_file, "w") as fp:
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
fp.write(
" ".join(
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
)
)
def tool_output_filter(self, line): # pylint: disable=arguments-differ
if any(
err_msg in line.lower()
for err_msg in (
"license was not entered",
"license information is incorrect",
)
):
self._bad_input = True
return line
def _process_defects(self, defects):
for defect in defects:
if not isinstance(defect, DefectItem):
@@ -64,10 +75,8 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
def _demangle_report(self, output_file):
converter_tool = os.path.join(
get_core_package_dir("tool-pvs-studio"),
"HtmlGenerator"
if "windows" in util.get_systype()
else os.path.join("bin", "plog-converter"),
self.get_tool_dir("tool-pvs-studio"),
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
)
cmd = (
@@ -140,9 +149,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
os.remove(self._tmp_output_file)
if not os.path.isfile(self._tmp_preprocessed_file):
click.echo(
"Error: Missing preprocessed file '%s'" % (self._tmp_preprocessed_file)
)
click.echo("Error: Missing preprocessed file for '%s'" % src_file)
return ""
cmd = [
@@ -175,51 +182,69 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
return os.path.join(self._tmp_dir, next(tempfile._get_candidate_names()))
def _prepare_preprocessed_file(self, src_file):
if os.path.isfile(self._tmp_preprocessed_file):
os.remove(self._tmp_preprocessed_file)
flags = self.cxx_flags
compiler = self.cxx_path
if src_file.endswith(".c"):
flags = self.cc_flags
compiler = self.cc_path
cmd = [compiler, src_file, "-E", "-o", self._tmp_preprocessed_file]
cmd = [
compiler,
'"%s"' % src_file,
"-E",
"-o",
'"%s"' % self._tmp_preprocessed_file,
]
cmd.extend([f for f in flags if f])
cmd.extend(["-D%s" % d for d in self.cpp_defines])
cmd.extend(['"-D%s"' % d.replace('"', '\\"') for d in self.cpp_defines])
cmd.append('@"%s"' % self._tmp_cmd_file)
# Explicitly specify C++ as the language used in .ino files
if src_file.endswith(".ino"):
cmd.insert(1, "-xc++")
result = proc.exec_command(" ".join(cmd), shell=True)
if result["returncode"] != 0:
if result["returncode"] != 0 or result["err"]:
if self.options.get("verbose"):
click.echo(" ".join(cmd))
click.echo(result["err"])
self._bad_input = True
def clean_up(self):
super().clean_up()
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
@staticmethod
def is_check_successful(cmd_result):
return (
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
src_files = [
f for f in self.get_project_target_files() if not f.endswith((".h", ".hpp"))
]
for src_file in src_files:
self._prepare_preprocessed_file(src_file)
cmd = self.configure_command(src_file)
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if not cmd:
self._bad_input = True
for scope, files in self.get_project_target_files(
self.options["patterns"]
).items():
if scope not in ("c", "c++"):
continue
for src_file in files:
self._prepare_preprocessed_file(src_file)
cmd = self.configure_command(src_file)
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if not cmd:
self._bad_input = True
continue
result = proc.exec_command(cmd)
# pylint: disable=unsupported-membership-test
if result["returncode"] != 0 or "License was not entered" in result["err"]:
self._bad_input = True
click.echo(result["err"])
continue
result = self.execute_check_cmd(cmd)
if result["returncode"] != 0:
continue
self._process_defects(self.parse_defects(self._tmp_output_file))
self._process_defects(self.parse_defects(self._tmp_output_file))
self.clean_up()

View File

@@ -12,20 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from glob import glob
from os import getenv, makedirs, remove
from os.path import basename, isdir, isfile, join, realpath
from shutil import copyfile, copytree
from tempfile import mkdtemp
import glob
import os
import shutil
import tempfile
import click
from platformio import app, fs
from platformio.commands.project import project_init as cmd_project_init
from platformio.commands.project import validate_boards
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import glob_escape
from platformio.exception import CIBuildEnvsEmpty
from platformio.project.commands.init import project_init_cmd, validate_boards
from platformio.project.config import ProjectConfig
@@ -35,8 +32,8 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
for i, p in enumerate(value):
if p.startswith("~"):
value[i] = fs.expanduser(p)
value[i] = realpath(value[i])
if not glob(value[i]):
value[i] = os.path.abspath(value[i])
if not glob.glob(value[i], recursive=True):
invalid_path = p
break
try:
@@ -53,7 +50,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option(
"--build-dir",
default=mkdtemp,
default=tempfile.mkdtemp,
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
@click.option("--keep-build-dir", is_flag=True)
@@ -80,28 +77,28 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
verbose,
):
if not src and getenv("PLATFORMIO_CI_SRC"):
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
if not src and os.getenv("PLATFORMIO_CI_SRC"):
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
if not src:
raise click.BadParameter("Missing argument 'src'")
try:
app.set_session_var("force_option", True)
if not keep_build_dir and isdir(build_dir):
if not keep_build_dir and os.path.isdir(build_dir):
fs.rmtree(build_dir)
if not isdir(build_dir):
makedirs(build_dir)
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
for dir_name, patterns in dict(lib=lib, src=src).items():
if not patterns:
continue
contents = []
for p in patterns:
contents += glob(p)
_copy_contents(join(build_dir, dir_name), contents)
contents += glob.glob(p, recursive=True)
_copy_contents(os.path.join(build_dir, dir_name), contents)
if project_conf and isfile(project_conf):
if project_conf and os.path.isfile(project_conf):
_copy_project_conf(build_dir, project_conf)
elif not board:
raise CIBuildEnvsEmpty()
@@ -111,7 +108,7 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
# initialise project
ctx.invoke(
cmd_project_init,
project_init_cmd,
project_dir=build_dir,
board=board,
project_option=project_option,
@@ -124,52 +121,55 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
fs.rmtree(build_dir)
def _copy_contents(dst_dir, contents):
def _copy_contents(dst_dir, contents): # pylint: disable=too-many-branches
items = {"dirs": set(), "files": set()}
for path in contents:
if isdir(path):
if os.path.isdir(path):
items["dirs"].add(path)
elif isfile(path):
elif os.path.isfile(path):
items["files"].add(path)
dst_dir_name = basename(dst_dir)
dst_dir_name = os.path.basename(dst_dir)
if dst_dir_name == "src" and len(items["dirs"]) == 1:
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
if not os.path.isdir(dst_dir):
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
else:
if not isdir(dst_dir):
makedirs(dst_dir)
if not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
for d in items["dirs"]:
copytree(d, join(dst_dir, basename(d)), symlinks=True)
src_dst_dir = os.path.join(dst_dir, os.path.basename(d))
if not os.path.isdir(src_dst_dir):
shutil.copytree(d, src_dst_dir, symlinks=True)
if not items["files"]:
return
if dst_dir_name == "lib":
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
for f in items["files"]:
dst_file = join(dst_dir, basename(f))
dst_file = os.path.join(dst_dir, os.path.basename(f))
if f == dst_file:
continue
copyfile(f, dst_file)
shutil.copyfile(f, dst_file)
def _exclude_contents(dst_dir, patterns):
contents = []
for p in patterns:
contents += glob(join(glob_escape(dst_dir), p))
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
for path in contents:
path = realpath(path)
if isdir(path):
path = os.path.abspath(path)
if os.path.isdir(path):
fs.rmtree(path)
elif isfile(path):
remove(path)
elif os.path.isfile(path):
os.remove(path)
def _copy_project_conf(build_dir, project_conf):
config = ProjectConfig(project_conf, parse_extra=False)
if config.has_section("platformio"):
config.remove_section("platformio")
config.save(join(build_dir, "platformio.ini"))
config.save(os.path.join(build_dir, "platformio.ini"))

View File

@@ -12,10 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
[report]
# Regexes for lines to exclude from consideration
exclude_lines =
pragma: no cover
def __repr__
raise AssertionError
raise NotImplementedError
# pylint: disable=unused-import
from platformio.debug.command import debug_cmd as cli

View File

@@ -1,298 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import signal
import time
from hashlib import sha1
from os.path import basename, dirname, isdir, join, realpath, splitext
from tempfile import mkdtemp
from twisted.internet import protocol # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import stdio # pylint: disable=import-error
from twisted.internet import task # pylint: disable=import-error
from platformio import app, fs, proc, telemetry, util
from platformio.commands.debug import helpers, initcfgs
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.process import BaseProcess
from platformio.commands.debug.server import DebugServer
from platformio.compat import hashlib_encode_data, is_bytes
from platformio.project.helpers import get_project_cache_dir
LOG_FILE = None
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
PIO_SRC_NAME = ".pioinit"
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
def __init__(self, project_dir, args, debug_options, env_options):
self.project_dir = project_dir
self.args = list(args)
self.debug_options = debug_options
self.env_options = env_options
self._debug_server = DebugServer(debug_options, env_options)
self._session_id = None
if not isdir(get_project_cache_dir()):
os.makedirs(get_project_cache_dir())
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
self._target_is_run = False
self._last_server_activity = 0
self._auto_continue_timer = None
self._errors_buffer = b""
def spawn(self, gdb_path, prog_path):
session_hash = gdb_path + prog_path
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
self._kill_previous_session()
patterns = {
"PROJECT_DIR": self.project_dir,
"PROG_PATH": prog_path,
"PROG_DIR": dirname(prog_path),
"PROG_NAME": basename(splitext(prog_path)[0]),
"DEBUG_PORT": self.debug_options["port"],
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
"INIT_BREAK": self.debug_options["init_break"] or "",
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
}
self._debug_server.spawn(patterns)
if not patterns["DEBUG_PORT"]:
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
self.generate_pioinit(self._gdbsrc_dir, patterns)
# start GDB client
args = [
"piogdb",
"-q",
"--directory",
self._gdbsrc_dir,
"--directory",
self.project_dir,
"-l",
"10",
]
args.extend(self.args)
if not gdb_path:
raise DebugInvalidOptionsError("GDB client is not configured")
gdb_data_dir = self._get_data_dir(gdb_path)
if gdb_data_dir:
args.extend(["--data-directory", gdb_data_dir])
args.append(patterns["PROG_PATH"])
return reactor.spawnProcess(
self, gdb_path, args, path=self.project_dir, env=os.environ
)
@staticmethod
def _get_data_dir(gdb_path):
if "msp430" in gdb_path:
return None
gdb_data_dir = realpath(join(dirname(gdb_path), "..", "share", "gdb"))
return gdb_data_dir if isdir(gdb_data_dir) else None
def generate_pioinit(self, dst_dir, patterns):
server_exe = (
(self.debug_options.get("server") or {}).get("executable", "").lower()
)
if "jlink" in server_exe:
cfg = initcfgs.GDB_JLINK_INIT_CONFIG
elif "st-util" in server_exe:
cfg = initcfgs.GDB_STUTIL_INIT_CONFIG
elif "mspdebug" in server_exe:
cfg = initcfgs.GDB_MSPDEBUG_INIT_CONFIG
elif "qemu" in server_exe:
cfg = initcfgs.GDB_QEMU_INIT_CONFIG
elif self.debug_options["require_debug_port"]:
cfg = initcfgs.GDB_BLACKMAGIC_INIT_CONFIG
else:
cfg = initcfgs.GDB_DEFAULT_INIT_CONFIG
commands = cfg.split("\n")
if self.debug_options["init_cmds"]:
commands = self.debug_options["init_cmds"]
commands.extend(self.debug_options["extra_cmds"])
if not any("define pio_reset_run_target" in cmd for cmd in commands):
commands = [
"define pio_reset_run_target",
" echo Warning! Undefined pio_reset_run_target command\\n",
" monitor reset",
"end",
] + commands
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
commands = [
"define pio_reset_halt_target",
" echo Warning! Undefined pio_reset_halt_target command\\n",
" monitor reset halt",
"end",
] + commands
if not any("define pio_restart_target" in cmd for cmd in commands):
commands += [
"define pio_restart_target",
" pio_reset_halt_target",
" $INIT_BREAK",
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
"end",
]
banner = [
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
"echo PlatformIO: Initializing remote target...\\n",
]
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
commands = banner + commands + footer
with open(join(dst_dir, self.PIO_SRC_NAME), "w") as fp:
fp.write("\n".join(self.apply_patterns(commands, patterns)))
def connectionMade(self):
self._lock_session(self.transport.pid)
p = protocol.Protocol()
p.dataReceived = self.onStdInData
stdio.StandardIO(p)
def onStdInData(self, data):
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
self._last_server_activity = time.time()
if b"-exec-run" in data:
if self._target_is_run:
token, _ = data.split(b"-", 1)
self.outReceived(token + b"^running\n")
return
data = data.replace(b"-exec-run", b"-exec-continue")
if b"-exec-continue" in data:
self._target_is_run = True
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)
self.transport.write(b"pio_reset_run_target\n")
self.transport.write(data)
def processEnded(self, reason): # pylint: disable=unused-argument
self._unlock_session()
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
fs.rmtree(self._gdbsrc_dir)
if self._debug_server:
self._debug_server.terminate()
reactor.stop()
def outReceived(self, data):
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
self._last_server_activity = time.time()
super(GDBClient, self).outReceived(data)
self._handle_error(data)
# go to init break automatically
if self.INIT_COMPLETED_BANNER.encode() in data:
telemetry.send_event(
"Debug", "Started", telemetry.encode_run_environment(self.env_options)
)
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
self._auto_continue_timer.start(0.1)
def errReceived(self, data):
super(GDBClient, self).errReceived(data)
self._handle_error(data)
def console_log(self, msg):
if helpers.is_gdbmi_mode():
msg = helpers.escape_gdbmi_stream("~", msg)
self.outReceived(msg if is_bytes(msg) else msg.encode())
def _auto_exec_continue(self):
auto_exec_delay = 0.5 # in seconds
if self._last_server_activity > (time.time() - auto_exec_delay):
return
if self._auto_continue_timer:
self._auto_continue_timer.stop()
self._auto_continue_timer = None
if not self.debug_options["init_break"] or self._target_is_run:
return
self.console_log(
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
% self.debug_options["init_break"]
)
self.console_log(
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
)
self.transport.write(
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
)
self._target_is_run = True
def _handle_error(self, data):
self._errors_buffer += data
if self.PIO_SRC_NAME.encode() not in data or b"Error in sourced" not in data:
return
last_erros = self._errors_buffer.decode()
last_erros = " ".join(reversed(last_erros.split("\n")))
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
err = "%s -> %s" % (
telemetry.encode_run_environment(self.env_options),
last_erros,
)
telemetry.send_exception("DebugInitError: %s" % err)
self.transport.loseConnection()
def _kill_previous_session(self):
assert self._session_id
pid = None
with app.ContentCache() as cc:
pid = cc.get(self._session_id)
cc.delete(self._session_id)
if not pid:
return
if "windows" in util.get_systype():
kill = ["Taskkill", "/PID", pid, "/F"]
else:
kill = ["kill", pid]
try:
proc.exec_command(kill)
except: # pylint: disable=bare-except
pass
def _lock_session(self, pid):
if not self._session_id:
return
with app.ContentCache() as cc:
cc.set(self._session_id, str(pid), "1h")
def _unlock_session(self):
if not self._session_id:
return
with app.ContentCache() as cc:
cc.delete(self._session_id)

View File

@@ -1,159 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments, too-many-statements
# pylint: disable=too-many-locals, too-many-branches
import os
import signal
from os.path import isfile
import click
from platformio import app, exception, fs, proc, util
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.managers.core import inject_contrib_pysite
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectEnvsNotAvailableError
from platformio.project.helpers import is_platformio_project, load_project_ide_data
@click.command(
"debug",
context_settings=dict(ignore_unknown_options=True),
short_help="PIO Unified Debugger",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--environment", "-e", metavar="<environment>")
@click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
app.set_session_var("custom_project_conf", project_conf)
# use env variables from Eclipse or CLion
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir):
break
if os.getenv(sysenv):
project_dir = os.getenv(sysenv)
with fs.cd(project_dir):
config = ProjectConfig.get_instance(project_conf)
config.validate(envs=[environment] if environment else None)
env_name = environment or helpers.get_default_debug_env(config)
env_options = config.items(env=env_name, as_dict=True)
if not set(env_options.keys()) >= set(["platform", "board"]):
raise ProjectEnvsNotAvailableError()
debug_options = helpers.validate_debug_options(ctx, env_options)
assert debug_options
if not interface:
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
configuration = load_project_ide_data(project_dir, env_name)
if not configuration:
raise DebugInvalidOptionsError("Could not load debug configuration")
if "--version" in __unprocessed:
result = proc.exec_command([configuration["gdb_path"], "--version"])
if result["returncode"] == 0:
return click.echo(result["out"])
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
try:
fs.ensure_udev_rules()
except exception.InvalidUdevRules as e:
click.echo(
helpers.escape_gdbmi_stream("~", str(e) + "\n")
if helpers.is_gdbmi_mode()
else str(e) + "\n",
nl=False,
)
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
debug_options, configuration
)
rebuild_prog = False
preload = debug_options["load_cmds"] == ["preload"]
load_mode = debug_options["load_mode"]
if load_mode == "always":
rebuild_prog = preload or not helpers.has_debug_symbols(
configuration["prog_path"]
)
elif load_mode == "modified":
rebuild_prog = helpers.is_prog_obsolete(
configuration["prog_path"]
) or not helpers.has_debug_symbols(configuration["prog_path"])
else:
rebuild_prog = not isfile(configuration["prog_path"])
if preload or (not rebuild_prog and load_mode != "always"):
# don't load firmware through debug server
debug_options["load_cmds"] = []
if rebuild_prog:
if helpers.is_gdbmi_mode():
click.echo(
helpers.escape_gdbmi_stream(
"~", "Preparing firmware for debugging...\n"
),
nl=False,
)
stream = helpers.GDBMIConsoleStream()
with util.capture_std_streams(stream):
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
stream.close()
else:
click.echo("Preparing firmware for debugging...")
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
# save SHA sum of newly created prog
if load_mode == "modified":
helpers.is_prog_obsolete(configuration["prog_path"])
if not isfile(configuration["prog_path"]):
raise DebugInvalidOptionsError("Program/firmware is missed")
# run debugging client
inject_contrib_pysite()
# pylint: disable=import-outside-toplevel
from platformio.commands.debug.client import GDBClient, reactor
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
client.spawn(configuration["gdb_path"], configuration["prog_path"])
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
reactor.run()
return True

View File

@@ -1,306 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import is_bytes
from platformio.managers.platform import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.options import ProjectOptions
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def get_default_debug_env(config):
default_envs = config.default_envs()
all_envs = config.envs()
for env in default_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
for env in all_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
return default_envs[0] if default_envs else all_envs[0]
def predebug_project(ctx, project_dir, env_name, preload, verbose):
ctx.invoke(
cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def validate_debug_options(cmd_ctx, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
try:
platform = PlatformFactory.newPlatform(env_options["platform"])
except exception.UnknownPlatform:
cmd_ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.newPlatform(env_options["platform"])
board_config = platform.board_config(env_options["board"])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
server_options = None
# specific server per a system
if isinstance(tool_settings.get("server", {}), list):
for item in tool_settings["server"][:]:
tool_settings["server"] = item
if util.get_systype() in item.get("system", []):
break
# user overwrites debug server
if env_options.get("debug_server"):
server_options = {
"cwd": None,
"executable": None,
"arguments": env_options.get("debug_server"),
}
server_options["executable"] = server_options["arguments"][0]
server_options["arguments"] = server_options["arguments"][1:]
elif "server" in tool_settings:
server_package = tool_settings["server"].get("package")
server_package_dir = (
platform.get_package_dir(server_package) if server_package else None
)
if server_package and not server_package_dir:
platform.install_packages(
with_packages=[server_package], skip_default_package=True, silent=True
)
server_package_dir = platform.get_package_dir(server_package)
server_options = dict(
cwd=server_package_dir if server_package else None,
executable=tool_settings["server"].get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
for a in tool_settings["server"].get("arguments", [])
],
)
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
result = dict(
tool=tool_name,
upload_protocol=env_options.get(
"upload_protocol", board_config.get("upload", {}).get("protocol")
),
load_cmds=_cleanup_cmds(
env_options.get(
"debug_load_cmds",
tool_settings.get(
"load_cmds",
tool_settings.get(
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
),
),
)
),
load_mode=env_options.get(
"debug_load_mode",
tool_settings.get(
"load_mode", ProjectOptions["env.debug_load_mode"].default
),
),
init_break=env_options.get(
"debug_init_break",
tool_settings.get(
"init_break", ProjectOptions["env.debug_init_break"].default
),
),
init_cmds=_cleanup_cmds(
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
),
extra_cmds=extra_cmds,
require_debug_port=tool_settings.get("require_debug_port", False),
port=reveal_debug_port(
env_options.get("debug_port", tool_settings.get("port")),
tool_name,
tool_settings,
),
server=server_options,
)
return result
def configure_esp32_load_cmds(debug_options, configuration):
ignore_conds = [
debug_options["load_cmds"] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not configuration.get("flash_extra_images"),
not all(
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
),
]
if any(ignore_conds):
return debug_options["load_cmds"]
mon_cmds = [
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
for item in configuration.get("flash_extra_images")
]
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
% fs.to_unix_path(configuration["prog_path"][:-4])
)
return mon_cmds
def has_debug_symbols(prog_path):
if not isfile(prog_path):
return False
matched = {
b".debug_info": False,
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
while True:
data = fp.read(1024)
if not data:
break
for pattern, found in matched.items():
if found:
continue
if pattern in last_data + data:
matched[pattern] = True
last_data = data
return all(matched.values())
def is_prog_obsolete(prog_path):
prog_hash_path = prog_path + ".sha1"
if not isfile(prog_path):
return True
shasum = sha1()
with open(prog_path, "rb") as fp:
while True:
data = fp.read(1024)
if not data:
break
shasum.update(data)
new_digest = shasum.hexdigest()
old_digest = (
fs.get_file_contents(prog_hash_path) if isfile(prog_hash_path) else None
)
if new_digest == old_digest:
return False
fs.write_file_contents(prog_hash_path, new_digest)
return True
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
if set(["*", "?", "[", "]"]) & set(env_debug_port):
return env_debug_port
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if tool_name.startswith("blackmagic"):
if (
"windows" in util.get_systype()
and port.startswith("COM")
and len(port) > 4
):
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return None
if env_debug_port and not _get_pattern():
return env_debug_port
if not tool_settings.get("require_debug_port"):
return None
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
return debug_port

View File

@@ -1,125 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
GDB_DEFAULT_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset halt
end
define pio_reset_run_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor init
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_STUTIL_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
end
define pio_reset_run_target
monitor reset
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_JLINK_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
end
define pio_reset_run_target
monitor clrbp
monitor reset
monitor go
end
target extended-remote $DEBUG_PORT
monitor clrbp
monitor speed auto
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_BLACKMAGIC_INIT_CONFIG = """
define pio_reset_halt_target
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
end
define pio_reset_run_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
monitor swdp_scan
attach 1
set mem inaccessible-by-default off
$LOAD_CMDS
$INIT_BREAK
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
"""
GDB_MSPDEBUG_INIT_CONFIG = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
target extended-remote $DEBUG_PORT
monitor erase
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""
GDB_QEMU_INIT_CONFIG = """
define pio_reset_halt_target
monitor system_reset
end
define pio_reset_run_target
monitor system_reset
end
target extended-remote $DEBUG_PORT
$LOAD_CMDS
pio_reset_halt_target
$INIT_BREAK
"""

View File

@@ -1,83 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import signal
import click
from twisted.internet import protocol # pylint: disable=import-error
from platformio import fs
from platformio.compat import string_types
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_core_dir
LOG_FILE = None
class BaseProcess(protocol.ProcessProtocol, object):
STDOUT_CHUNK_SIZE = 2048
COMMON_PATTERNS = {
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
"PYTHONEXE": get_pythonexe_path(),
}
def apply_patterns(self, source, patterns=None):
_patterns = self.COMMON_PATTERNS.copy()
_patterns.update(patterns or {})
for key, value in _patterns.items():
if key.endswith(("_DIR", "_PATH")):
_patterns[key] = fs.to_unix_path(value)
def _replace(text):
for key, value in _patterns.items():
pattern = "$%s" % key
text = text.replace(pattern, value or "")
return text
if isinstance(source, string_types):
source = _replace(source)
elif isinstance(source, (list, dict)):
items = enumerate(source) if isinstance(source, list) else source.items()
for key, value in items:
if isinstance(value, string_types):
source[key] = _replace(value)
elif isinstance(value, (list, dict)):
source[key] = self.apply_patterns(value, patterns)
return source
def outReceived(self, data):
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
while data:
chunk = data[: self.STDOUT_CHUNK_SIZE]
click.echo(chunk, nl=False)
data = data[self.STDOUT_CHUNK_SIZE :]
@staticmethod
def errReceived(data):
if LOG_FILE:
with open(LOG_FILE, "ab") as fp:
fp.write(data)
click.echo(data, nl=False, err=True)
@staticmethod
def processEnded(_):
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)

View File

@@ -1,138 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from os.path import isdir, isfile, join
from twisted.internet import reactor # pylint: disable=import-error
from platformio import fs, util
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
from platformio.commands.debug.process import BaseProcess
from platformio.proc import where_is_program
class DebugServer(BaseProcess):
def __init__(self, debug_options, env_options):
self.debug_options = debug_options
self.env_options = env_options
self._debug_port = None
self._transport = None
self._process_ended = False
def spawn(self, patterns): # pylint: disable=too-many-branches
systype = util.get_systype()
server = self.debug_options.get("server")
if not server:
return None
server = self.apply_patterns(server, patterns)
server_executable = server["executable"]
if not server_executable:
return None
if server["cwd"]:
server_executable = join(server["cwd"], server_executable)
if (
"windows" in systype
and not server_executable.endswith(".exe")
and isfile(server_executable + ".exe")
):
server_executable = server_executable + ".exe"
if not isfile(server_executable):
server_executable = where_is_program(server_executable)
if not isfile(server_executable):
raise DebugInvalidOptionsError(
"\nCould not launch Debug Server '%s'. Please check that it "
"is installed and is included in a system PATH\n\n"
"See documentation or contact contact@platformio.org:\n"
"https://docs.platformio.org/page/plus/debugging.html\n"
% server_executable
)
self._debug_port = ":3333"
openocd_pipe_allowed = all(
[not self.debug_options["port"], "openocd" in server_executable]
)
if openocd_pipe_allowed:
args = []
if server["cwd"]:
args.extend(["-s", server["cwd"]])
args.extend(
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
)
args.extend(server["arguments"])
str_args = " ".join(
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
)
self._debug_port = '| "%s" %s' % (server_executable, str_args)
self._debug_port = fs.to_unix_path(self._debug_port)
else:
env = os.environ.copy()
# prepend server "lib" folder to LD path
if (
"windows" not in systype
and server["cwd"]
and isdir(join(server["cwd"], "lib"))
):
ld_key = (
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
)
env[ld_key] = join(server["cwd"], "lib")
if os.environ.get(ld_key):
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
# prepend BIN to PATH
if server["cwd"] and isdir(join(server["cwd"], "bin")):
env["PATH"] = "%s%s%s" % (
join(server["cwd"], "bin"),
os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")),
)
self._transport = reactor.spawnProcess(
self,
server_executable,
[server_executable] + server["arguments"],
path=server["cwd"],
env=env,
)
if "mspdebug" in server_executable.lower():
self._debug_port = ":2000"
elif "jlink" in server_executable.lower():
self._debug_port = ":2331"
elif "qemu" in server_executable.lower():
self._debug_port = ":1234"
return self._transport
def get_debug_port(self):
return self._debug_port
def outReceived(self, data):
super(DebugServer, self).outReceived(
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
)
def processEnded(self, reason):
self._process_ended = True
super(DebugServer, self).processEnded(reason)
def terminate(self):
if self._process_ended or not self._transport:
return
try:
self._transport.signalProcess("KILL")
except: # pylint: disable=bare-except
pass

View File

@@ -1,280 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from fnmatch import fnmatch
from os import getcwd
import click
from serial.tools import miniterm
from platformio import exception, fs, util
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
@click.group(short_help="Monitor device or list existing")
def cli():
pass
@cli.command("list", short_help="List devices")
@click.option("--serial", is_flag=True, help="List serial ports, default")
@click.option("--logical", is_flag=True, help="List logical devices")
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
@click.option("--json-output", is_flag=True)
def device_list( # pylint: disable=too-many-branches
serial, logical, mdns, json_output
):
if not logical and not mdns:
serial = True
data = {}
if serial:
data["serial"] = util.get_serial_ports()
if logical:
data["logical"] = util.get_logical_devices()
if mdns:
data["mdns"] = util.get_mdns_services()
single_key = list(data)[0] if len(list(data)) == 1 else None
if json_output:
return click.echo(
dump_json_to_unicode(data[single_key] if single_key else data)
)
titles = {
"serial": "Serial Ports",
"logical": "Logical Devices",
"mdns": "Multicast DNS Services",
}
for key, value in data.items():
if not single_key:
click.secho(titles[key], bold=True)
click.echo("=" * len(titles[key]))
if key == "serial":
for item in value:
click.secho(item["port"], fg="cyan")
click.echo("-" * len(item["port"]))
click.echo("Hardware ID: %s" % item["hwid"])
click.echo("Description: %s" % item["description"])
click.echo("")
if key == "logical":
for item in value:
click.secho(item["path"], fg="cyan")
click.echo("-" * len(item["path"]))
click.echo("Name: %s" % item["name"])
click.echo("")
if key == "mdns":
for item in value:
click.secho(item["name"], fg="cyan")
click.echo("-" * len(item["name"]))
click.echo("Type: %s" % item["type"])
click.echo("IP: %s" % item["ip"])
click.echo("Port: %s" % item["port"])
if item["properties"]:
click.echo(
"Properties: %s"
% (
"; ".join(
[
"%s=%s" % (k, v)
for k, v in item["properties"].items()
]
)
)
)
click.echo("")
if single_key:
click.echo("")
return True
@cli.command("monitor", short_help="Monitor device (Serial)")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.option(
"-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment",
)
def device_monitor(**kwargs): # pylint: disable=too-many-branches
click.echo(
"Looking for advanced Serial Monitor with UI? "
"Check http://bit.ly/pio-advanced-monitor"
)
project_options = {}
try:
with fs.cd(kwargs["project_dir"]):
project_options = get_project_options(kwargs["environment"])
kwargs = apply_project_monitor_options(kwargs, project_options)
except NotPlatformIOProjectError:
pass
if not kwargs["port"]:
ports = util.get_serial_ports(filter_hwid=True)
if len(ports) == 1:
kwargs["port"] = ports[0]["port"]
elif "platform" in project_options and "board" in project_options:
board_hwids = get_board_hwids(
kwargs["project_dir"],
project_options["platform"],
project_options["board"],
)
for item in ports:
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
kwargs["port"] = item["port"]
break
if kwargs["port"]:
break
elif kwargs["port"] and (set(["*", "?", "[", "]"]) & set(kwargs["port"])):
for item in util.get_serial_ports():
if fnmatch(item["port"], kwargs["port"]):
kwargs["port"] = item["port"]
break
# override system argv with patched options
sys.argv = ["monitor"] + options_to_argv(
kwargs,
project_options,
ignore=("port", "baud", "rts", "dtr", "environment", "project_dir"),
)
try:
miniterm.main(
default_port=kwargs["port"],
default_baudrate=kwargs["baud"] or 9600,
default_rts=kwargs["rts"],
default_dtr=kwargs["dtr"],
)
except Exception as e:
raise exception.MinitermException(e)
def apply_project_monitor_options(cli_options, project_options):
for k in ("port", "speed", "rts", "dtr"):
k2 = "monitor_%s" % k
if k == "speed":
k = "baud"
if cli_options[k] is None and k2 in project_options:
cli_options[k] = project_options[k2]
if k != "port":
cli_options[k] = int(cli_options[k])
return cli_options
def options_to_argv(cli_options, project_options, ignore=None):
result = project_options.get("monitor_flags", [])
for k, v in cli_options.items():
if v is None or (ignore and k in ignore):
continue
k = "--" + k.replace("_", "-")
if k in project_options.get("monitor_flags", []):
continue
if isinstance(v, bool):
if v:
result.append(k)
elif isinstance(v, tuple):
for i in v:
result.extend([k, i])
else:
result.extend([k, str(v)])
return result
def get_project_options(environment=None):
config = ProjectConfig.get_instance()
config.validate(envs=[environment] if environment else None)
if not environment:
default_envs = config.default_envs()
if default_envs:
environment = default_envs[0]
else:
environment = config.envs()[0]
return config.items(env=environment, as_dict=True)
def get_board_hwids(project_dir, platform, board):
with fs.cd(project_dir):
return (
PlatformFactory.newPlatform(platform)
.board_config(board)
.get("build.hwids", [])
)

View File

@@ -0,0 +1,18 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-import
from platformio.device.filters.base import (
DeviceMonitorFilterBase as DeviceMonitorFilter,
)

View File

@@ -0,0 +1,30 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from platformio.device.commands.list import device_list_cmd
from platformio.device.commands.monitor import device_monitor_cmd
@click.group(
"device",
commands=[
device_list_cmd,
device_monitor_cmd,
],
short_help="Device manager & Serial/Socket monitor",
)
def cli():
pass

View File

@@ -12,24 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-locals
import mimetypes
import socket
from os.path import isdir
import click
from platformio import exception
from platformio.compat import WINDOWS
from platformio.managers.core import (
build_contrib_pysite_deps,
get_core_package_dir,
inject_contrib_pysite,
)
from platformio.commands.home.helpers import is_port_used
from platformio.commands.home.run import run_server
@click.command("home", short_help="PIO Home")
@click.command("home", short_help="GUI to manage PlatformIO")
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
@click.option(
"--host",
@@ -49,63 +40,28 @@ from platformio.managers.core import (
"are connected. Default is 0 which means never auto shutdown"
),
)
def cli(port, host, no_open, shutdown_timeout):
# pylint: disable=import-error, import-outside-toplevel
# import contrib modules
inject_contrib_pysite()
try:
from autobahn.twisted.resource import WebSocketResource
except: # pylint: disable=bare-except
build_contrib_pysite_deps(get_core_package_dir("contrib-pysite"))
from autobahn.twisted.resource import WebSocketResource
from twisted.internet import reactor
from twisted.web import server
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
from platformio.commands.home.rpc.handlers.misc import MiscRPC
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.server import JSONRPCServerFactory
from platformio.commands.home.web import WebRoot
factory = JSONRPCServerFactory(shutdown_timeout)
factory.addHandler(AppRPC(), namespace="app")
factory.addHandler(IDERPC(), namespace="ide")
factory.addHandler(MiscRPC(), namespace="misc")
factory.addHandler(OSRPC(), namespace="os")
factory.addHandler(PIOCoreRPC(), namespace="core")
factory.addHandler(ProjectRPC(), namespace="project")
contrib_dir = get_core_package_dir("contrib-piohome")
if not isdir(contrib_dir):
raise exception.PlatformioException("Invalid path to PIO Home Contrib")
@click.option(
"--session-id",
help=(
"A unique session identifier to keep PIO Home isolated from other instances "
"and protect from 3rd party access"
),
)
def cli(port, host, no_open, shutdown_timeout, session_id):
# Ensure PIO Home mimetypes are known
mimetypes.add_type("text/html", ".html")
mimetypes.add_type("text/css", ".css")
mimetypes.add_type("application/javascript", ".js")
root = WebRoot(contrib_dir)
root.putChild(b"wsrpc", WebSocketResource(factory))
site = server.Site(root)
# hook for `platformio-node-helpers`
if host == "__do_not_start__":
return
already_started = is_port_used(host, port)
home_url = "http://%s:%d" % (host, port)
if not no_open:
if already_started:
click.launch(home_url)
else:
reactor.callLater(1, lambda: click.launch(home_url))
home_url = "http://%s:%d%s" % (
host,
port,
("/session/%s/" % session_id) if session_id else "/",
)
click.echo(
"\n".join(
[
@@ -114,40 +70,25 @@ def cli(port, host, no_open, shutdown_timeout):
" /\\-_--\\ PlatformIO Home",
"/ \\_-__\\",
"|[]| [] | %s" % home_url,
"|__|____|______________%s" % ("_" * len(host)),
"|__|____|__%s" % ("_" * len(home_url)),
]
)
)
click.echo("")
click.echo("Open PlatformIO Home in your browser by this URL => %s" % home_url)
if already_started:
if is_port_used(host, port):
click.secho(
"PlatformIO Home server is already started in another process.", fg="yellow"
)
if not no_open:
click.launch(home_url)
return
click.echo("PIO Home has been started. Press Ctrl+C to shutdown.")
reactor.listenTCP(port, site, interface=host)
reactor.run()
def is_port_used(host, port):
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if WINDOWS:
try:
s.bind((host, port))
s.close()
return False
except (OSError, socket.error):
pass
else:
try:
s.connect((host, port))
s.close()
except socket.error:
return False
return True
run_server(
host=host,
port=port,
no_open=no_open,
shutdown_timeout=shutdown_timeout,
home_url=home_url,
)

View File

@@ -12,58 +12,49 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=keyword-arg-before-vararg, arguments-differ
import os
import socket
import requests
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import threads # pylint: disable=import-error
from starlette.concurrency import run_in_threadpool
from platformio import util
from platformio.compat import IS_WINDOWS
from platformio.proc import where_is_program
class AsyncSession(requests.Session):
def __init__(self, n=None, *args, **kwargs):
if n:
pool = reactor.getThreadPool()
pool.adjustPoolsize(0, n)
super(AsyncSession, self).__init__(*args, **kwargs)
def request(self, *args, **kwargs):
func = super(AsyncSession, self).request
return threads.deferToThread(func, *args, **kwargs)
def wrap(self, *args, **kwargs): # pylint: disable=no-self-use
return defer.ensureDeferred(*args, **kwargs)
async def request( # pylint: disable=signature-differs,invalid-overridden-method
self, *args, **kwargs
):
func = super().request
return await run_in_threadpool(func, *args, **kwargs)
@util.memoized(expire="60s")
def requests_session():
return AsyncSession(n=5)
return AsyncSession()
@util.memoized(expire="60s")
def get_core_fullpath():
return where_is_program(
"platformio" + (".exe" if "windows" in util.get_systype() else "")
)
return where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
@util.memoized(expire="10s")
def is_twitter_blocked():
ip = "104.244.42.1"
timeout = 2
try:
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
requests.get("http://%s" % ip, allow_redirects=False, timeout=timeout)
else:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, 80))
return False
except: # pylint: disable=bare-except
pass
def is_port_used(host, port):
socket.setdefaulttimeout(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if IS_WINDOWS:
try:
s.bind((host, port))
s.close()
return False
except (OSError, socket.error):
pass
else:
try:
s.connect((host, port))
s.close()
except socket.error:
return False
return True

View File

@@ -0,0 +1,29 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ajsonrpc.core import JSONRPC20DispatchException
from platformio.clients.account import AccountClient
class AccountRPC:
@staticmethod
def call_client(method, *args, **kwargs):
try:
client = AccountClient()
return getattr(client, method)(*args, **kwargs)
except Exception as e: # pylint: disable=bare-except
raise JSONRPC20DispatchException(
code=4003, message="PIO Account Call Error", data=str(e)
)

View File

@@ -14,15 +14,15 @@
from __future__ import absolute_import
from os.path import join
import os
from pathlib import Path
from platformio import __version__, app, fs, util
from platformio.project.helpers import get_project_core_dir, is_platformio_project
from platformio.project.config import ProjectConfig
from platformio.project.helpers import is_platformio_project
class AppRPC(object):
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
class AppRPC:
IGNORE_STORAGE_KEYS = [
"cid",
@@ -34,9 +34,16 @@ class AppRPC(object):
"projectsDir",
]
@staticmethod
def get_state_path():
core_dir = ProjectConfig.get_instance().get("platformio", "core_dir")
if not os.path.isdir(core_dir):
os.makedirs(core_dir)
return os.path.join(core_dir, "homestate.json")
@staticmethod
def load_state():
with app.State(AppRPC.APPSTATE_PATH, lock=True) as state:
with app.State(AppRPC.get_state_path(), lock=True) as state:
storage = state.get("storage", {})
# base data
@@ -58,9 +65,13 @@ class AppRPC(object):
storage["projectsDir"] = storage["coreSettings"]["projects_dir"]["value"]
# skip non-existing recent projects
storage["recentProjects"] = [
p for p in storage.get("recentProjects", []) if is_platformio_project(p)
]
storage["recentProjects"] = list(
set(
str(Path(p).resolve())
for p in storage.get("recentProjects", [])
if is_platformio_project(p)
)
)
state["storage"] = storage
state.modified = False # skip saving extra fields
@@ -72,7 +83,7 @@ class AppRPC(object):
@staticmethod
def save_state(state):
with app.State(AppRPC.APPSTATE_PATH, lock=True) as s:
with app.State(AppRPC.get_state_path(), lock=True) as s:
s.clear()
s.update(state)
storage = s.get("storage", {})

View File

@@ -13,35 +13,73 @@
# limitations under the License.
import time
from pathlib import Path
import jsonrpc # pylint: disable=import-error
from twisted.internet import defer # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from platformio.compat import aio_get_running_loop
class IDERPC(object):
class IDERPC:
COMMAND_TIMEOUT = 1.5 # in seconds
def __init__(self):
self._queue = {}
self._ide_queue = []
self._cmd_queue = {}
def send_command(self, sid, command, params):
if not self._queue.get(sid):
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4005, message="PIO Home IDE agent is not started"
)
while self._queue[sid]:
self._queue[sid].pop().callback(
{"id": time.time(), "method": command, "params": params}
)
async def listen_commands(self):
f = aio_get_running_loop().create_future()
self._ide_queue.append(f)
self._process_commands()
return await f
def listen_commands(self, sid=0):
if sid not in self._queue:
self._queue[sid] = []
self._queue[sid].append(defer.Deferred())
return self._queue[sid][-1]
def open_project(self, sid, project_dir):
return self.send_command(sid, "open_project", project_dir)
def open_text_document(self, sid, path, line=None, column=None):
return self.send_command(
sid, "open_text_document", dict(path=path, line=line, column=column)
async def send_command(self, command, params=None):
cmd_id = f"ide-{command}-{time.time()}"
self._cmd_queue[cmd_id] = {
"method": command,
"params": params,
"time": time.time(),
"future": aio_get_running_loop().create_future(),
}
self._process_commands()
# in case if IDE agent has not been started
aio_get_running_loop().call_later(
self.COMMAND_TIMEOUT + 0.1, self._process_commands
)
return await self._cmd_queue[cmd_id]["future"]
def on_command_result(self, cmd_id, value):
if cmd_id not in self._cmd_queue:
return
if self._cmd_queue[cmd_id]["method"] == "get_pio_project_dirs":
value = [str(Path(p).resolve()) for p in value]
self._cmd_queue[cmd_id]["future"].set_result(value)
del self._cmd_queue[cmd_id]
def _process_commands(self):
for cmd_id in list(self._cmd_queue):
cmd_data = self._cmd_queue[cmd_id]
if cmd_data["future"].done():
del self._cmd_queue[cmd_id]
continue
if (
not self._ide_queue
and (time.time() - cmd_data["time"]) > self.COMMAND_TIMEOUT
):
cmd_data["future"].set_exception(
JSONRPC20DispatchException(
code=4005, message="PIO Home IDE agent is not started"
)
)
continue
while self._ide_queue:
self._ide_queue.pop().set_result(
{
"id": cmd_id,
"method": cmd_data["method"],
"params": cmd_data["params"],
}
)

View File

@@ -15,38 +15,35 @@
import json
import time
from twisted.internet import defer, reactor # pylint: disable=import-error
from platformio import app
from platformio.cache import ContentCache
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.compat import aio_create_task
class MiscRPC(object):
def load_latest_tweets(self, data_url):
cache_key = app.ContentCache.key_from_args(data_url, "tweets")
cache_valid = "7d"
with app.ContentCache() as cc:
class MiscRPC:
async def load_latest_tweets(self, data_url):
cache_key = ContentCache.key_from_args(data_url, "tweets")
cache_valid = "180d"
with ContentCache() as cc:
cache_data = cc.get(cache_key)
if cache_data:
cache_data = json.loads(cache_data)
# automatically update cache in background every 12 hours
if cache_data["time"] < (time.time() - (3600 * 12)):
reactor.callLater(
5, self._preload_latest_tweets, data_url, cache_key, cache_valid
aio_create_task(
self._preload_latest_tweets(data_url, cache_key, cache_valid)
)
return cache_data["result"]
result = self._preload_latest_tweets(data_url, cache_key, cache_valid)
return result
return await self._preload_latest_tweets(data_url, cache_key, cache_valid)
@staticmethod
@defer.inlineCallbacks
def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((yield OSRPC.fetch_content(data_url)))
with app.ContentCache() as cc:
async def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((await OSRPC.fetch_content(data_url)))
with ContentCache() as cc:
cc.set(
cache_key,
json.dumps({"time": int(time.time()), "result": result}),
cache_valid,
)
defer.returnValue(result)
return result

View File

@@ -15,22 +15,23 @@
from __future__ import absolute_import
import glob
import io
import os
import shutil
from functools import cmp_to_key
import click
from twisted.internet import defer # pylint: disable=import-error
from platformio import app, fs, util
from platformio import __default_requests_timeout__, fs
from platformio.cache import ContentCache
from platformio.clients.http import ensure_internet_on
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding
from platformio.device.list import list_logical_devices
class OSRPC(object):
class OSRPC:
@staticmethod
@defer.inlineCallbacks
def fetch_content(uri, data=None, headers=None, cache_valid=None):
async def fetch_content(uri, data=None, headers=None, cache_valid=None):
if not headers:
headers = {
"User-Agent": (
@@ -39,34 +40,39 @@ class OSRPC(object):
"Safari/603.3.8"
)
}
cache_key = app.ContentCache.key_from_args(uri, data) if cache_valid else None
with app.ContentCache() as cc:
cache_key = ContentCache.key_from_args(uri, data) if cache_valid else None
with ContentCache() as cc:
if cache_key:
result = cc.get(cache_key)
if result is not None:
defer.returnValue(result)
return result
# check internet before and resolve issue with 60 seconds timeout
util.internet_on(raise_exception=True)
ensure_internet_on(raise_exception=True)
session = helpers.requests_session()
if data:
r = yield session.post(uri, data=data, headers=headers)
r = await session.post(
uri, data=data, headers=headers, timeout=__default_requests_timeout__
)
else:
r = yield session.get(uri, headers=headers)
r = await session.get(
uri, headers=headers, timeout=__default_requests_timeout__
)
r.raise_for_status()
result = r.text
if cache_valid:
with app.ContentCache() as cc:
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
defer.returnValue(result)
return result
def request_content(self, uri, data=None, headers=None, cache_valid=None):
async def request_content(self, uri, data=None, headers=None, cache_valid=None):
if uri.startswith("http"):
return self.fetch_content(uri, data, headers, cache_valid)
return await self.fetch_content(uri, data, headers, cache_valid)
if os.path.isfile(uri):
return fs.get_file_contents(uri, encoding="utf8")
with io.open(uri, encoding="utf-8") as fp:
return fp.read()
return None
@staticmethod
@@ -75,13 +81,11 @@ class OSRPC(object):
@staticmethod
def reveal_file(path):
return click.launch(
path.encode(get_filesystem_encoding()) if PY2 else path, locate=True
)
return click.launch(path, locate=True)
@staticmethod
def open_file(path):
return click.launch(path.encode(get_filesystem_encoding()) if PY2 else path)
return click.launch(path)
@staticmethod
def is_file(path):
@@ -105,7 +109,7 @@ class OSRPC(object):
@staticmethod
def copy(src, dst):
return shutil.copytree(src, dst)
return shutil.copytree(src, dst, symlinks=True)
@staticmethod
def glob(pathnames, root=None):
@@ -113,7 +117,11 @@ class OSRPC(object):
pathnames = [pathnames]
result = set()
for pathname in pathnames:
result |= set(glob.glob(os.path.join(root, pathname) if root else pathname))
result |= set(
glob.glob(
os.path.join(root, pathname) if root else pathname, recursive=True
)
)
return list(result)
@staticmethod
@@ -147,7 +155,7 @@ class OSRPC(object):
@staticmethod
def get_logical_devices():
items = []
for item in util.get_logical_devices():
for item in list_logical_devices():
if item["name"]:
item["name"] = item["name"]
items.append(item)

View File

@@ -14,42 +14,36 @@
from __future__ import absolute_import
import io
import json
import os
import sys
from io import BytesIO, StringIO
import threading
import click
import jsonrpc # pylint: disable=import-error
from twisted.internet import defer # pylint: disable=import-error
from twisted.internet import threads # pylint: disable=import-error
from twisted.internet import utils # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from starlette.concurrency import run_in_threadpool
from platformio import __main__, __version__, fs
from platformio import __main__, __version__, fs, proc
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding, is_bytes, string_types
try:
from thread import get_ident as thread_get_ident
except ImportError:
from threading import get_ident as thread_get_ident
from platformio.compat import get_locale_encoding, is_bytes
class MultiThreadingStdStream(object):
def __init__(self, parent_stream):
self._buffers = {thread_get_ident(): parent_stream}
self._buffers = {threading.get_ident(): parent_stream}
def __getattr__(self, name):
thread_id = thread_get_ident()
thread_id = threading.get_ident()
self._ensure_thread_buffer(thread_id)
return getattr(self._buffers[thread_id], name)
def _ensure_thread_buffer(self, thread_id):
if thread_id not in self._buffers:
self._buffers[thread_id] = BytesIO() if PY2 else StringIO()
self._buffers[thread_id] = io.StringIO()
def write(self, value):
thread_id = thread_get_ident()
thread_id = threading.get_ident()
self._ensure_thread_buffer(thread_id)
return self._buffers[thread_id].write(
value.decode() if is_bytes(value) else value
@@ -59,14 +53,14 @@ class MultiThreadingStdStream(object):
result = ""
try:
result = self.getvalue()
self.truncate(0)
self.seek(0)
self.truncate(0)
except AttributeError:
pass
return result
class PIOCoreRPC(object):
class PIOCoreRPC:
@staticmethod
def version():
return __version__
@@ -81,43 +75,44 @@ class PIOCoreRPC(object):
sys.stderr = PIOCoreRPC.thread_stderr
@staticmethod
def call(args, options=None):
return defer.maybeDeferred(PIOCoreRPC._call_generator, args, options)
@staticmethod
@defer.inlineCallbacks
def _call_generator(args, options=None):
async def call(args, options=None):
for i, arg in enumerate(args):
if isinstance(arg, string_types):
args[i] = arg.encode(get_filesystem_encoding()) if PY2 else arg
else:
if not isinstance(arg, str):
args[i] = str(arg)
options = options or {}
to_json = "--json-output" in args
try:
if args and args[0] in ("account", "remote"):
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
else:
result = yield PIOCoreRPC._call_inline(args, options)
try:
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
except ValueError:
# fall-back to subprocess method
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
if options.get("force_subprocess"):
result = await PIOCoreRPC._call_subprocess(args, options)
return PIOCoreRPC._process_result(result, to_json)
result = await PIOCoreRPC._call_inline(args, options)
try:
return PIOCoreRPC._process_result(result, to_json)
except ValueError:
# fall-back to subprocess method
result = await PIOCoreRPC._call_subprocess(args, options)
return PIOCoreRPC._process_result(result, to_json)
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4003, message="PIO Core Call Error", data=str(e)
)
@staticmethod
def _call_inline(args, options):
PIOCoreRPC.setup_multithreading_std_streams()
cwd = (options or {}).get("cwd") or os.getcwd()
async def _call_subprocess(args, options):
result = await run_in_threadpool(
proc.exec_command,
[helpers.get_core_fullpath()] + args,
cwd=options.get("cwd") or os.getcwd(),
)
return (result["out"], result["err"], result["returncode"])
def _thread_task():
@staticmethod
async def _call_inline(args, options):
PIOCoreRPC.setup_multithreading_std_streams()
def _thread_safe_call(args, cwd):
with fs.cd(cwd):
exit_code = __main__.main(["-c"] + args)
return (
@@ -126,21 +121,17 @@ class PIOCoreRPC(object):
exit_code,
)
return threads.deferToThread(_thread_task)
@staticmethod
def _call_subprocess(args, options):
cwd = (options or {}).get("cwd") or os.getcwd()
return utils.getProcessOutputAndValue(
helpers.get_core_fullpath(),
args,
path=cwd,
env={k: v for k, v in os.environ.items() if "%" not in k},
return await run_in_threadpool(
_thread_safe_call, args=args, cwd=options.get("cwd") or os.getcwd()
)
@staticmethod
def _process_result(result, to_json=False):
out, err, code = result
if out and is_bytes(out):
out = out.decode(get_locale_encoding())
if err and is_bytes(err):
err = err.decode(get_locale_encoding())
text = ("%s\n\n%s" % (out, err)).strip()
if code != 0:
raise Exception(text)

View File

@@ -18,21 +18,20 @@ import os
import shutil
import time
import jsonrpc # pylint: disable=import-error
from ajsonrpc.core import JSONRPC20DispatchException
from platformio import exception, fs
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.compat import PY2, get_filesystem_encoding
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectError
from platformio.project.generator import ProjectGenerator
from platformio.project.helpers import get_project_dir, is_platformio_project
from platformio.project.options import get_config_options_schema
class ProjectRPC(object):
class ProjectRPC:
@staticmethod
def config_call(init_kwargs, method, *args):
assert isinstance(init_kwargs, dict)
@@ -82,7 +81,7 @@ class ProjectRPC(object):
data["description"] = config.get("platformio", "description")
data["libExtraDirs"].extend(config.get("platformio", "lib_extra_dirs", []))
libdeps_dir = config.get_optional_dir("libdeps")
libdeps_dir = config.get("platformio", "libdeps_dir")
for section in config.sections():
if not section.startswith("env:"):
continue
@@ -94,7 +93,7 @@ class ProjectRPC(object):
# skip non existing folders and resolve full path
for key in ("envLibdepsDirs", "libExtraDirs"):
data[key] = [
fs.expanduser(d) if d.startswith("~") else os.path.realpath(d)
fs.expanduser(d) if d.startswith("~") else os.path.abspath(d)
for d in data[key]
if os.path.isdir(d)
]
@@ -105,7 +104,7 @@ class ProjectRPC(object):
return (os.path.sep).join(path.split(os.path.sep)[-2:])
result = []
pm = PlatformManager()
pm = PlatformPackageManager()
for project_dir in AppRPC.load_state()["storage"]["recentProjects"]:
if not os.path.isdir(project_dir):
continue
@@ -148,8 +147,9 @@ class ProjectRPC(object):
@staticmethod
def get_project_examples():
result = []
for manifest in PlatformManager().get_installed():
examples_dir = os.path.join(manifest["__pkg_dir"], "examples")
pm = PlatformPackageManager()
for pkg in pm.get_installed():
examples_dir = os.path.join(pkg.path, "examples")
if not os.path.isdir(examples_dir):
continue
items = []
@@ -172,6 +172,7 @@ class ProjectRPC(object):
"description": project_description,
}
)
manifest = pm.load_manifest(pkg)
result.append(
{
"platform": {
@@ -183,7 +184,7 @@ class ProjectRPC(object):
)
return sorted(result, key=lambda data: data["platform"]["title"])
def init(self, board, framework, project_dir):
async def init(self, board, framework, project_dir):
assert project_dir
state = AppRPC.load_state()
if not os.path.isdir(project_dir):
@@ -196,12 +197,13 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._generate_project_main, project_dir, framework)
return d
await PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
return self._generate_project_main(project_dir, board, framework)
@staticmethod
def _generate_project_main(_, project_dir, framework):
def _generate_project_main(project_dir, board, framework):
main_content = None
if framework == "arduino":
main_content = "\n".join(
@@ -236,38 +238,51 @@ class ProjectRPC(object):
)
if not main_content:
return project_dir
is_cpp_project = True
pm = PlatformPackageManager()
try:
board = pm.board_config(board)
platforms = board.get("platforms", board.get("platform"))
if not isinstance(platforms, list):
platforms = [platforms]
c_based_platforms = ["intel_mcs51", "ststm8"]
is_cpp_project = not (set(platforms) & set(c_based_platforms))
except exception.PlatformioException:
pass
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get_optional_dir("src")
main_path = os.path.join(src_dir, "main.cpp")
src_dir = config.get("platformio", "src_dir")
main_path = os.path.join(
src_dir, "main.%s" % ("cpp" if is_cpp_project else "c")
)
if os.path.isfile(main_path):
return project_dir
if not os.path.isdir(src_dir):
os.makedirs(src_dir)
fs.write_file_contents(main_path, main_content.strip())
with open(main_path, mode="w", encoding="utf8") as fp:
fp.write(main_content.strip())
return project_dir
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
@staticmethod
async def import_arduino(board, use_arduino_libs, arduino_project_dir):
board = str(board)
if arduino_project_dir and PY2:
arduino_project_dir = arduino_project_dir.encode(get_filesystem_encoding())
# don't import PIO Project
if is_platformio_project(arduino_project_dir):
return arduino_project_dir
is_arduino_project = any(
[
os.path.isfile(
os.path.join(
arduino_project_dir,
"%s.%s" % (os.path.basename(arduino_project_dir), ext),
)
os.path.isfile(
os.path.join(
arduino_project_dir,
"%s.%s" % (os.path.basename(arduino_project_dir), ext),
)
for ext in ("ino", "pde")
]
)
for ext in ("ino", "pde")
)
if not is_arduino_project:
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
)
@@ -288,31 +303,28 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
return d
@staticmethod
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
await PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
with fs.cd(project_dir):
config = ProjectConfig()
src_dir = config.get_optional_dir("src")
src_dir = config.get("platformio", "src_dir")
if os.path.isdir(src_dir):
fs.rmtree(src_dir)
shutil.copytree(arduino_project_dir, src_dir)
shutil.copytree(arduino_project_dir, src_dir, symlinks=True)
return project_dir
@staticmethod
def import_pio(project_dir):
async def import_pio(project_dir):
if not project_dir or not is_platformio_project(project_dir):
raise jsonrpc.exceptions.JSONRPCDispatchException(
raise JSONRPC20DispatchException(
code=4001, message="Not an PlatformIO project: %s" % project_dir
)
new_project_dir = os.path.join(
AppRPC.load_state()["storage"]["projectsDir"],
time.strftime("%y%m%d-%H%M%S-") + os.path.basename(project_dir),
)
shutil.copytree(project_dir, new_project_dir)
shutil.copytree(project_dir, new_project_dir, symlinks=True)
state = AppRPC.load_state()
args = ["init"]
@@ -321,6 +333,7 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
d.addCallback(lambda _: new_project_dir)
return d
await PIOCoreRPC.call(
args, options={"cwd": new_project_dir, "force_subprocess": True}
)
return new_project_dir

View File

@@ -12,90 +12,86 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=import-error
import click
import jsonrpc
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from jsonrpc.exceptions import JSONRPCDispatchException
from twisted.internet import defer, reactor
from ajsonrpc.dispatcher import Dispatcher
from ajsonrpc.manager import AsyncJSONRPCResponseManager
from starlette.endpoints import WebSocketEndpoint
from platformio.compat import PY2, dump_json_to_unicode, is_bytes
from platformio.compat import aio_create_task, aio_get_running_loop
from platformio.proc import force_exit
class JSONRPCServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.connection_nums += 1
if self.factory.shutdown_timer:
self.factory.shutdown_timer.cancel()
self.factory.shutdown_timer = None
class JSONRPCServerFactoryBase:
def onClose(self, wasClean, code, reason): # pylint: disable=unused-argument
self.factory.connection_nums -= 1
if self.factory.connection_nums == 0:
self.factory.shutdownByTimeout()
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
# click.echo("> %s" % payload)
response = jsonrpc.JSONRPCResponseManager.handle(
payload, self.factory.dispatcher
).data
# if error
if "result" not in response:
self.sendJSONResponse(response)
return None
d = defer.maybeDeferred(lambda: response["result"])
d.addCallback(self._callback, response)
d.addErrback(self._errback, response)
return None
def _callback(self, result, response):
response["result"] = result
self.sendJSONResponse(response)
def _errback(self, failure, response):
if isinstance(failure.value, JSONRPCDispatchException):
e = failure.value
else:
e = JSONRPCDispatchException(code=4999, message=failure.getErrorMessage())
del response["result"]
response["error"] = e.error._data # pylint: disable=protected-access
self.sendJSONResponse(response)
def sendJSONResponse(self, response):
# click.echo("< %s" % response)
if "error" in response:
click.secho("Error: %s" % response["error"], fg="red", err=True)
response = dump_json_to_unicode(response)
if not PY2 and not is_bytes(response):
response = response.encode("utf-8")
self.sendMessage(response)
class JSONRPCServerFactory(WebSocketServerFactory):
protocol = JSONRPCServerProtocol
connection_nums = 0
shutdown_timer = 0
shutdown_timer = None
def __init__(self, shutdown_timeout=0):
super(JSONRPCServerFactory, self).__init__()
self.shutdown_timeout = shutdown_timeout
self.dispatcher = jsonrpc.Dispatcher()
self.manager = AsyncJSONRPCResponseManager(
Dispatcher(), is_server_error_verbose=True
)
def shutdownByTimeout(self):
def __call__(self, *args, **kwargs):
raise NotImplementedError
def add_object_handler(self, handler, namespace):
self.manager.dispatcher.add_object(handler, prefix="%s." % namespace)
def on_client_connect(self):
self.connection_nums += 1
if self.shutdown_timer:
self.shutdown_timer.cancel()
self.shutdown_timer = None
def on_client_disconnect(self):
self.connection_nums -= 1
if self.connection_nums < 1:
self.connection_nums = 0
if self.connection_nums == 0:
self.shutdown_by_timeout()
async def on_shutdown(self):
pass
def shutdown_by_timeout(self):
if self.shutdown_timeout < 1:
return
def _auto_shutdown_server():
click.echo("Automatically shutdown server on timeout")
reactor.stop()
force_exit()
self.shutdown_timer = reactor.callLater(
self.shutdown_timer = aio_get_running_loop().call_later(
self.shutdown_timeout, _auto_shutdown_server
)
def addHandler(self, handler, namespace):
self.dispatcher.build_method_map(handler, prefix="%s." % namespace)
class WebSocketJSONRPCServerFactory(JSONRPCServerFactoryBase):
def __call__(self, *args, **kwargs):
ws = WebSocketJSONRPCServer(*args, **kwargs)
ws.factory = self
return ws
class WebSocketJSONRPCServer(WebSocketEndpoint):
encoding = "text"
factory: WebSocketJSONRPCServerFactory = None
async def on_connect(self, websocket):
await websocket.accept()
self.factory.on_client_connect() # pylint: disable=no-member
async def on_receive(self, websocket, data):
aio_create_task(self._handle_rpc(websocket, data))
async def on_disconnect(self, websocket, close_code):
self.factory.on_client_disconnect() # pylint: disable=no-member
async def _handle_rpc(self, websocket, data):
# pylint: disable=no-member
response = await self.factory.manager.get_response_for_payload(data)
if response.error and response.error.data:
click.secho("Error: %s" % response.error.data, fg="red", err=True)
await websocket.send_text(self.factory.manager.serialize(response.body))

View File

@@ -0,0 +1,99 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from urllib.parse import urlparse
import click
import uvicorn
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.responses import PlainTextResponse
from starlette.routing import Mount, Route, WebSocketRoute
from starlette.staticfiles import StaticFiles
from starlette.status import HTTP_403_FORBIDDEN
from platformio.commands.home.rpc.handlers.account import AccountRPC
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
from platformio.commands.home.rpc.handlers.misc import MiscRPC
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
from platformio.compat import aio_get_running_loop
from platformio.exception import PlatformioException
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import force_exit
class ShutdownMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
if scope["type"] == "http" and b"__shutdown__" in scope.get("query_string", {}):
await shutdown_server()
await self.app(scope, receive, send)
async def shutdown_server(_=None):
aio_get_running_loop().call_later(0.5, force_exit)
return PlainTextResponse("Server has been shutdown!")
async def protected_page(_):
return PlainTextResponse(
"Protected PlatformIO Home session", status_code=HTTP_403_FORBIDDEN
)
def run_server(host, port, no_open, shutdown_timeout, home_url):
contrib_dir = get_core_package_dir("contrib-piohome")
if not os.path.isdir(contrib_dir):
raise PlatformioException("Invalid path to PIO Home Contrib")
ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)
ws_rpc_factory.add_object_handler(AccountRPC(), namespace="account")
ws_rpc_factory.add_object_handler(AppRPC(), namespace="app")
ws_rpc_factory.add_object_handler(IDERPC(), namespace="ide")
ws_rpc_factory.add_object_handler(MiscRPC(), namespace="misc")
ws_rpc_factory.add_object_handler(OSRPC(), namespace="os")
ws_rpc_factory.add_object_handler(PIOCoreRPC(), namespace="core")
ws_rpc_factory.add_object_handler(ProjectRPC(), namespace="project")
path = urlparse(home_url).path
routes = [
WebSocketRoute(path + "wsrpc", ws_rpc_factory, name="wsrpc"),
Route(path + "__shutdown__", shutdown_server, methods=["POST"]),
Mount(path, StaticFiles(directory=contrib_dir, html=True), name="static"),
]
if path != "/":
routes.append(Route("/", protected_page))
uvicorn.run(
Starlette(
middleware=[Middleware(ShutdownMiddleware)],
routes=routes,
on_startup=[
lambda: click.echo(
"PIO Home has been started. Press Ctrl+C to shutdown."
),
lambda: None if no_open else click.launch(home_url),
],
),
host=host,
port=port,
log_level="warning",
)

View File

@@ -14,28 +14,25 @@
# pylint: disable=too-many-branches, too-many-locals
import json
import logging
import os
import time
from urllib.parse import quote
import click
import semantic_version
from tabulate import tabulate
from platformio import exception, util
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.compat import dump_json_to_unicode
from platformio.managers.lib import LibraryManager, get_builtin_libs, is_builtin_lib
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.package.manifest.schema import ManifestSchema
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.meta import PackageItem, PackageSpec
from platformio.proc import is_ci
from platformio.project.config import ProjectConfig
from platformio.project.helpers import get_project_dir, is_platformio_project
try:
from urllib.parse import quote
except ImportError:
from urllib import quote
CTX_META_INPUT_DIRS_KEY = __name__ + ".input_dirs"
CTX_META_PROJECT_ENVIRONMENTS_KEY = __name__ + ".project_environments"
CTX_META_STORAGE_DIRS_KEY = __name__ + ".storage_dirs"
@@ -43,10 +40,10 @@ CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
def get_project_global_lib_dir():
return ProjectConfig.get_instance().get_optional_dir("globallib")
return ProjectConfig.get_instance().get("platformio", "globallib_dir")
@click.group(short_help="Library Manager")
@click.group(short_help="Library manager", hidden=True)
@click.option(
"-d",
"--storage-dir",
@@ -71,6 +68,14 @@ def get_project_global_lib_dir():
)
@click.pass_context
def cli(ctx, **options):
in_silence = PlatformioCLI.in_silence()
if not in_silence:
click.secho(
"\nWARNING!!! This command is deprecated and will be removed in "
"the next releases. \nPlease use `pio pkg` instead.\n",
fg="yellow",
)
storage_cmds = ("install", "uninstall", "update", "list")
# skip commands that don't need storage folder
if ctx.invoked_subcommand not in storage_cmds or (
@@ -93,11 +98,10 @@ def cli(ctx, **options):
)
if not storage_dirs:
raise exception.NotGlobalLibDir(
raise NotGlobalLibDir(
get_project_dir(), get_project_global_lib_dir(), ctx.invoked_subcommand
)
in_silence = PlatformioCLI.in_silence()
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options["environment"]
ctx.meta[CTX_META_INPUT_DIRS_KEY] = storage_dirs
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = []
@@ -106,102 +110,117 @@ def cli(ctx, **options):
if not is_platformio_project(storage_dir):
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
continue
config = ProjectConfig.get_instance(os.path.join(storage_dir, "platformio.ini"))
config.validate(options["environment"], silent=in_silence)
libdeps_dir = config.get_optional_dir("libdeps")
for env in config.envs():
if options["environment"] and env not in options["environment"]:
continue
storage_dir = os.path.join(libdeps_dir, env)
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
"env:" + env, "lib_deps", []
with fs.cd(storage_dir):
config = ProjectConfig.get_instance(
os.path.join(storage_dir, "platformio.ini")
)
config.validate(options["environment"], silent=in_silence)
libdeps_dir = config.get("platformio", "libdeps_dir")
for env in config.envs():
if options["environment"] and env not in options["environment"]:
continue
storage_dir = os.path.join(libdeps_dir, env)
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
"env:" + env, "lib_deps", []
)
@cli.command("install", short_help="Install library")
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
@click.option(
"--save",
"--save/--no-save",
is_flag=True,
help="Save installed libraries into the `platformio.ini` dependency list",
default=True,
help="Save installed libraries into the `platformio.ini` dependency list"
" (enabled by default)",
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"--interactive", is_flag=True, help="Allow to make a choice for all prompts"
"--interactive",
is_flag=True,
help="Deprecated! Please use a strict dependency specification (owner/libname)",
)
@click.option(
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
)
@click.pass_context
def lib_install( # pylint: disable=too-many-arguments
def lib_install( # pylint: disable=too-many-arguments,unused-argument
ctx, libraries, save, silent, interactive, force
):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
installed_manifests = {}
installed_pkgs = {}
for storage_dir in storage_dirs:
if not silent and (libraries or storage_dir in storage_libdeps):
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
if libraries:
for library in libraries:
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
installed_manifests[library] = lm.load_manifest(pkg_dir)
elif storage_dir in storage_libdeps:
builtin_lib_storages = None
for library in storage_libdeps[storage_dir]:
try:
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
installed_manifests[library] = lm.load_manifest(pkg_dir)
except exception.LibNotFound as e:
if builtin_lib_storages is None:
builtin_lib_storages = get_builtin_libs()
if not silent or not is_builtin_lib(builtin_lib_storages, library):
click.secho("Warning! %s" % e, fg="yellow")
lm = LibraryPackageManager(storage_dir)
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
if not save or not libraries:
return
if libraries:
installed_pkgs = {
library: lm.install(library, force=force) for library in libraries
}
elif storage_dir in storage_libdeps:
for library in storage_libdeps[storage_dir]:
lm.install(library, force=force)
if save and installed_pkgs:
_save_deps(ctx, installed_pkgs)
def _save_deps(ctx, pkgs, action="add"):
specs = []
for library, pkg in pkgs.items():
spec = PackageSpec(library)
if spec.external:
specs.append(spec)
else:
specs.append(
PackageSpec(
owner=pkg.metadata.spec.owner,
name=pkg.metadata.spec.name,
requirements=spec.requirements
or (
("^%s" % pkg.metadata.version)
if not pkg.metadata.version.build
else pkg.metadata.version
),
)
)
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
for input_dir in input_dirs:
config = ProjectConfig.get_instance(os.path.join(input_dir, "platformio.ini"))
config.validate(project_environments)
for env in config.envs():
if project_environments and env not in project_environments:
continue
config.expand_interpolations = False
lib_deps = config.get("env:" + env, "lib_deps", [])
for library in libraries:
if library in lib_deps:
continue
manifest = installed_manifests[library]
try:
assert library.lower() == manifest["name"].lower()
assert semantic_version.Version(manifest["version"])
lib_deps.append("{name}@^{version}".format(**manifest))
except (AssertionError, ValueError):
lib_deps.append(library)
config.set("env:" + env, "lib_deps", lib_deps)
config.save()
if not is_platformio_project(input_dir):
continue
save_project_libdeps(input_dir, specs, project_environments, action=action)
@cli.command("uninstall", short_help="Uninstall libraries")
@cli.command("uninstall", short_help="Remove libraries")
@click.argument("libraries", nargs=-1, metavar="[LIBRARY...]")
@click.option(
"--save/--no-save",
is_flag=True,
default=True,
help="Remove libraries from the `platformio.ini` dependency list and save changes"
" (enabled by default)",
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.pass_context
def lib_uninstall(ctx, libraries):
def lib_uninstall(ctx, libraries, save, silent):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
uninstalled_pkgs = {}
for storage_dir in storage_dirs:
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
for library in libraries:
lm.uninstall(library)
lm = LibraryPackageManager(storage_dir)
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
uninstalled_pkgs = {library: lm.uninstall(library) for library in libraries}
if save and uninstalled_pkgs:
_save_deps(ctx, uninstalled_pkgs, action="remove")
@cli.command("update", short_help="Update installed libraries")
@@ -215,46 +234,66 @@ def lib_uninstall(ctx, libraries):
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option("--json-output", is_flag=True)
@click.pass_context
def lib_update(ctx, libraries, only_check, dry_run, json_output):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
def lib_update( # pylint: disable=too-many-arguments
ctx, libraries, only_check, dry_run, silent, json_output
):
only_check = dry_run or only_check
if only_check and not json_output:
raise exception.UserSideException(
"This command is deprecated, please use `pio pkg outdated` instead"
)
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
json_result = {}
for storage_dir in storage_dirs:
if not json_output:
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
_libraries = libraries
if not _libraries:
_libraries = [manifest["__pkg_dir"] for manifest in lm.get_installed()]
lib_deps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, {}).get(storage_dir, [])
lm = LibraryPackageManager(storage_dir)
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
_libraries = libraries or lib_deps or lm.get_installed()
if only_check and json_output:
result = []
for library in _libraries:
pkg_dir = library if os.path.isdir(library) else None
requirements = None
url = None
if not pkg_dir:
name, requirements, url = lm.parse_pkg_uri(library)
pkg_dir = lm.get_package_dir(name, requirements, url)
if not pkg_dir:
spec = None
pkg = None
if isinstance(library, PackageItem):
pkg = library
else:
spec = PackageSpec(library)
pkg = lm.get_package(spec)
if not pkg:
continue
latest = lm.outdated(pkg_dir, requirements)
if not latest:
outdated = lm.outdated(pkg, spec)
if not outdated.is_outdated(allow_incompatible=True):
continue
manifest = lm.load_manifest(pkg_dir)
manifest["versionLatest"] = latest
manifest = lm.legacy_load_manifest(pkg)
manifest["versionWanted"] = (
str(outdated.wanted) if outdated.wanted else None
)
manifest["versionLatest"] = (
str(outdated.latest) if outdated.latest else None
)
result.append(manifest)
json_result[storage_dir] = result
else:
for library in _libraries:
lm.update(library, only_check=only_check)
to_spec = (
None if isinstance(library, PackageItem) else PackageSpec(library)
)
try:
lm.update(library, to_spec=to_spec)
except UnknownPackageError as e:
if library not in lib_deps:
raise e
if json_output:
return click.echo(
dump_json_to_unicode(
json.dumps(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
@@ -271,8 +310,8 @@ def lib_list(ctx, json_output):
for storage_dir in storage_dirs:
if not json_output:
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
items = lm.get_installed()
lm = LibraryPackageManager(storage_dir)
items = lm.legacy_get_installed()
if json_output:
json_result[storage_dir] = items
elif items:
@@ -283,7 +322,7 @@ def lib_list(ctx, json_output):
if json_output:
return click.echo(
dump_json_to_unicode(
json.dumps(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
@@ -296,6 +335,7 @@ def lib_list(ctx, json_output):
@click.option("--json-output", is_flag=True)
@click.option("--page", type=click.INT, default=1)
@click.option("--id", multiple=True)
@click.option("-o", "--owner", multiple=True)
@click.option("-n", "--name", multiple=True)
@click.option("-a", "--author", multiple=True)
@click.option("-k", "--keyword", multiple=True)
@@ -308,6 +348,7 @@ def lib_list(ctx, json_output):
help="Do not prompt, automatically paginate with delay",
)
def lib_search(query, json_output, page, noninteractive, **filters):
regclient = LibraryPackageManager().get_registry_client_instance()
if not query:
query = []
if not isinstance(query, list):
@@ -317,12 +358,15 @@ def lib_search(query, json_output, page, noninteractive, **filters):
for value in values:
query.append('%s:"%s"' % (key, value))
result = util.get_api_result(
"/v2/lib/search", dict(query=" ".join(query), page=page), cache_valid="1d"
result = regclient.fetch_json_data(
"get",
"/v2/lib/search",
params=dict(query=" ".join(query), page=page),
x_cache_valid="1d",
)
if json_output:
click.echo(dump_json_to_unicode(result))
click.echo(json.dumps(result))
return
if result["total"] == 0:
@@ -367,10 +411,11 @@ def lib_search(query, json_output, page, noninteractive, **filters):
time.sleep(5)
elif not click.confirm("Show next libraries?"):
break
result = util.get_api_result(
result = regclient.fetch_json_data(
"get",
"/v2/lib/search",
{"query": " ".join(query), "page": int(result["page"]) + 1},
cache_valid="1d",
params=dict(query=" ".join(query), page=int(result["page"]) + 1),
x_cache_valid="1d",
)
@@ -380,7 +425,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
def lib_builtin(storage, json_output):
items = get_builtin_libs(storage)
if json_output:
return click.echo(dump_json_to_unicode(items))
return click.echo(json.dumps(items))
for storage_ in items:
if not storage_["items"]:
@@ -399,28 +444,28 @@ def lib_builtin(storage, json_output):
@click.argument("library", metavar="[LIBRARY]")
@click.option("--json-output", is_flag=True)
def lib_show(library, json_output):
lm = LibraryManager()
name, requirements, _ = lm.parse_pkg_uri(library)
lib_id = lm.search_lib_id(
{"name": name, "requirements": requirements},
silent=json_output,
interactive=not json_output,
lm = LibraryPackageManager()
lm.set_log_level(logging.ERROR if json_output else logging.DEBUG)
lib_id = lm.reveal_registry_package_id(library)
regclient = lm.get_registry_client_instance()
lib = regclient.fetch_json_data(
"get", "/v2/lib/info/%d" % lib_id, x_cache_valid="1h"
)
lib = util.get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
if json_output:
return click.echo(dump_json_to_unicode(lib))
return click.echo(json.dumps(lib))
click.secho(lib["name"], fg="cyan")
click.echo("=" * len(lib["name"]))
click.secho("#ID: %d" % lib["id"], bold=True)
title = "{ownername}/{name}".format(**lib)
click.secho(title, fg="cyan")
click.echo("=" * len(title))
click.echo(lib["description"])
click.echo()
click.secho("ID: %d" % lib["id"])
click.echo(
"Version: %s, released %s"
% (
lib["version"]["name"],
time.strftime("%c", util.parse_date(lib["version"]["released"])),
util.parse_datetime(lib["version"]["released"]).strftime("%c"),
)
)
click.echo("Manifest: %s" % lib["confurl"])
@@ -428,9 +473,9 @@ def lib_show(library, json_output):
if key not in lib or not lib[key]:
continue
if isinstance(lib[key], list):
click.echo("%s: %s" % (key.title(), ", ".join(lib[key])))
click.echo("%s: %s" % (key.capitalize(), ", ".join(lib[key])))
else:
click.echo("%s: %s" % (key.title(), lib[key]))
click.echo("%s: %s" % (key.capitalize(), lib[key]))
blocks = []
@@ -438,7 +483,7 @@ def lib_show(library, json_output):
for author in lib.get("authors", []):
_data = []
for key in ("name", "email", "url", "maintainer"):
if not author[key]:
if not author.get(key):
continue
if key == "email":
_data.append("<%s>" % author[key])
@@ -462,7 +507,7 @@ def lib_show(library, json_output):
"Versions",
[
"%s, released %s"
% (v["name"], time.strftime("%c", util.parse_date(v["released"])))
% (v["name"], util.parse_datetime(v["released"]).strftime("%c"))
for v in lib["versions"]
],
)
@@ -488,38 +533,28 @@ def lib_show(library, json_output):
return True
@cli.command("register", short_help="Register a new library")
@cli.command("register", short_help="Deprecated")
@click.argument("config_url")
def lib_register(config_url):
if not config_url.startswith("http://") and not config_url.startswith("https://"):
raise exception.InvalidLibConfURL(config_url)
# Validate manifest
ManifestSchema().load_manifest(
ManifestParserFactory.new_from_url(config_url).as_dict()
def lib_register(config_url): # pylint: disable=unused-argument
raise exception.UserSideException(
"This command is deprecated. Please use `pio pkg publish` command."
)
result = util.get_api_result("/lib/register", data=dict(config_url=config_url))
if "message" in result and result["message"]:
click.secho(
result["message"],
fg="green" if "successed" in result and result["successed"] else "red",
)
@cli.command("stats", short_help="Library Registry Statistics")
@click.option("--json-output", is_flag=True)
def lib_stats(json_output):
result = util.get_api_result("/lib/stats", cache_valid="1h")
regclient = LibraryPackageManager().get_registry_client_instance()
result = regclient.fetch_json_data("get", "/v2/lib/stats", x_cache_valid="1h")
if json_output:
return click.echo(dump_json_to_unicode(result))
return click.echo(json.dumps(result))
for key in ("updated", "added"):
tabular_data = [
(
click.style(item["name"], fg="cyan"),
time.strftime("%c", util.parse_date(item["date"])),
util.parse_datetime(item["date"]).strftime("%c"),
"https://platformio.org/lib/show/%s/%s"
% (item["id"], quote(item["name"])),
)
@@ -594,9 +629,9 @@ def print_lib_item(item):
if key not in item or not item[key]:
continue
if isinstance(item[key], list):
click.echo("%s: %s" % (key.title(), ", ".join(item[key])))
click.echo("%s: %s" % (key.capitalize(), ", ".join(item[key])))
else:
click.echo("%s: %s" % (key.title(), item[key]))
click.echo("%s: %s" % (key.capitalize(), item[key]))
for key in ("frameworks", "platforms"):
if key not in item:

View File

@@ -0,0 +1,104 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from platformio import util
from platformio.compat import ci_strings_are_equal
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.meta import PackageSpec
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.exception import InvalidProjectConfError
@util.memoized(expire="60s")
def get_builtin_libs(storage_names=None):
# pylint: disable=import-outside-toplevel
from platformio.package.manager.library import LibraryPackageManager
items = []
storage_names = storage_names or []
pm = PlatformPackageManager()
for pkg in pm.get_installed():
p = PlatformFactory.new(pkg)
for storage in p.get_lib_storages():
if storage_names and storage["name"] not in storage_names:
continue
lm = LibraryPackageManager(storage["path"])
items.append(
{
"name": storage["name"],
"path": storage["path"],
"items": lm.legacy_get_installed(),
}
)
return items
def is_builtin_lib(name):
for storage in get_builtin_libs():
for lib in storage["items"]:
if lib.get("name") == name:
return True
return False
def ignore_deps_by_specs(deps, specs):
result = []
for dep in deps:
depspec = PackageSpec(dep)
if depspec.external:
result.append(dep)
continue
ignore_conditions = []
for spec in specs:
if depspec.owner:
ignore_conditions.append(
ci_strings_are_equal(depspec.owner, spec.owner)
and ci_strings_are_equal(depspec.name, spec.name)
)
else:
ignore_conditions.append(ci_strings_are_equal(depspec.name, spec.name))
if not any(ignore_conditions):
result.append(dep)
return result
def save_project_libdeps(project_dir, specs, environments=None, action="add"):
config = ProjectConfig.get_instance(os.path.join(project_dir, "platformio.ini"))
config.validate(environments)
for env in config.envs():
if environments and env not in environments:
continue
config.expand_interpolations = False
candidates = []
try:
candidates = ignore_deps_by_specs(
config.get("env:" + env, "lib_deps"), specs
)
except InvalidProjectConfError:
pass
if action == "add":
candidates.extend(spec.as_dependency() for spec in specs)
if candidates:
result = []
for item in candidates:
item = item.strip()
if item and item not in result:
result.append(item)
config.set("env:" + env, "lib_deps", result)
elif config.has_option("env:" + env, "lib_deps"):
config.remove_option("env:" + env, "lib_deps")
config.save()

165
platformio/commands/org.py Normal file
View File

@@ -0,0 +1,165 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
@click.group("org", short_help="Manage organizations")
def cli():
pass
def validate_orgname(value):
return validate_username(value, "Organization name")
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname",
callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option(
"--displayname",
)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization `%s` has been successfully created." % orgname,
fg="green",
)
@cli.command("list", short_help="List organizations and their members")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organization")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
data.append(("Email:", org.get("email")))
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("cur_orgname")
@click.option(
"--orgname",
callback=lambda _, __, value: validate_orgname(value),
help="A new orgname",
)
@click.option("--email")
@click.option("--displayname")
def org_update(cur_orgname, **kwargs):
client = AccountClient()
org = client.get_org(cur_orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(cur_orgname, new_org)
return click.secho(
"The organization `%s` has been successfully updated." % cur_orgname,
fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the `%s` organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho(
"Organization `%s` has been destroyed." % orgname,
fg="green",
)
@cli.command("add", short_help="Add a new owner to organization")
@click.argument(
"orgname",
)
@click.argument(
"username",
)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner `%s` has been successfully added to the `%s` organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument(
"orgname",
)
@click.argument(
"username",
)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The `%s` owner has been successfully removed from the `%s` organization."
% (username, orgname),
fg="green",
)

View File

@@ -0,0 +1,48 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from platformio.package.commands.exec import package_exec_cmd
from platformio.package.commands.install import package_install_cmd
from platformio.package.commands.list import package_list_cmd
from platformio.package.commands.outdated import package_outdated_cmd
from platformio.package.commands.pack import package_pack_cmd
from platformio.package.commands.publish import package_publish_cmd
from platformio.package.commands.search import package_search_cmd
from platformio.package.commands.show import package_show_cmd
from platformio.package.commands.uninstall import package_uninstall_cmd
from platformio.package.commands.unpublish import package_unpublish_cmd
from platformio.package.commands.update import package_update_cmd
@click.group(
"pkg",
commands=[
package_exec_cmd,
package_install_cmd,
package_list_cmd,
package_outdated_cmd,
package_pack_cmd,
package_publish_cmd,
package_search_cmd,
package_show_cmd,
package_uninstall_cmd,
package_unpublish_cmd,
package_update_cmd,
],
short_help="Unified Package Manager",
)
def cli():
pass

View File

@@ -12,152 +12,31 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import dirname, isdir
import json
import logging
import os
import click
from platformio import app, exception, util
from platformio.commands import PlatformioCLI
from platformio.commands.boards import print_boards
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformFactory, PlatformManager
from platformio.package.pack import PackagePacker
from platformio.exception import UserSideException
from platformio.package.exception import UnknownPackageError
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.meta import PackageItem, PackageSpec
from platformio.package.version import get_original_version
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
@click.group(short_help="Platform Manager")
@click.group(short_help="Platform manager", hidden=True)
def cli():
pass
def _print_platforms(platforms):
for platform in platforms:
click.echo(
"{name} ~ {title}".format(
name=click.style(platform["name"], fg="cyan"), title=platform["title"]
)
if not PlatformioCLI.in_silence():
click.secho(
"\nWARNING!!! This command is deprecated and will be removed in "
"the next releases. \nPlease use `pio pkg` instead.\n",
fg="yellow",
)
click.echo("=" * (3 + len(platform["name"] + platform["title"])))
click.echo(platform["description"])
click.echo()
if "homepage" in platform:
click.echo("Home: %s" % platform["homepage"])
if "frameworks" in platform and platform["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(platform["frameworks"]))
if "packages" in platform:
click.echo("Packages: %s" % ", ".join(platform["packages"]))
if "version" in platform:
if "__src_url" in platform:
click.echo(
"Version: #%s (%s)" % (platform["version"], platform["__src_url"])
)
else:
click.echo("Version: " + platform["version"])
click.echo()
def _get_registry_platforms():
platforms = util.get_api_result("/platforms", cache_valid="7d")
pm = PlatformManager()
for platform in platforms or []:
platform["versions"] = pm.get_all_repo_versions(platform["name"])
return platforms
def _get_platform_data(*args, **kwargs):
try:
return _get_installed_platform_data(*args, **kwargs)
except exception.UnknownPlatform:
return _get_registry_platform_data(*args, **kwargs)
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
p = PlatformFactory.newPlatform(platform)
data = dict(
name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
packages=list(p.packages) if p.packages else [],
)
# if dump to API
# del data['version']
# return data
# overwrite VCS version and add extra fields
manifest = PlatformManager().load_manifest(dirname(p.manifest_path))
assert manifest
for key in manifest:
if key == "version" or key.startswith("__"):
data[key] = manifest[key]
if with_boards:
data["boards"] = [c.get_brief_data() for c in p.get_boards().values()]
if not data["packages"] or not expose_packages:
return data
data["packages"] = []
installed_pkgs = p.get_installed_packages()
for name, opts in p.packages.items():
item = dict(
name=name,
type=p.get_package_type(name),
requirements=opts.get("version"),
optional=opts.get("optional") is True,
)
if name in installed_pkgs:
for key, value in installed_pkgs[name].items():
if key not in ("url", "version", "description"):
continue
item[key] = value
if key == "version":
item["originalVersion"] = util.get_original_version(value)
data["packages"].append(item)
return data
def _get_registry_platform_data( # pylint: disable=unused-argument
platform, with_boards=True, expose_packages=True
):
_data = None
for p in _get_registry_platforms():
if p["name"] == platform:
_data = p
break
if not _data:
return None
data = dict(
name=_data["name"],
title=_data["title"],
description=_data["description"],
homepage=_data["homepage"],
repository=_data["repository"],
url=_data["url"],
license=_data["license"],
forDesktop=_data["forDesktop"],
frameworks=_data["frameworks"],
packages=_data["packages"],
versions=_data["versions"],
)
if with_boards:
data["boards"] = [
board
for board in PlatformManager().get_registered_boards()
if board["platform"] == _data["name"]
]
return data
@cli.command("search", short_help="Search for development platform")
@@ -168,7 +47,7 @@ def platform_search(query, json_output):
for platform in _get_registry_platforms():
if query == "all":
query = ""
search_data = dump_json_to_unicode(platform)
search_data = json.dumps(platform)
if query and query.lower() not in search_data.lower():
continue
platforms.append(
@@ -178,7 +57,7 @@ def platform_search(query, json_output):
)
if json_output:
click.echo(dump_json_to_unicode(platforms))
click.echo(json.dumps(platforms))
else:
_print_platforms(platforms)
@@ -187,11 +66,14 @@ def platform_search(query, json_output):
@click.argument("query", required=False)
@click.option("--json-output", is_flag=True)
def platform_frameworks(query, json_output):
regclient = PlatformPackageManager().get_registry_client_instance()
frameworks = []
for framework in util.get_api_result("/frameworks", cache_valid="7d"):
for framework in regclient.fetch_json_data(
"get", "/v2/frameworks", x_cache_valid="1d"
):
if query == "all":
query = ""
search_data = dump_json_to_unicode(framework)
search_data = json.dumps(framework)
if query and query.lower() not in search_data.lower():
continue
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
@@ -204,7 +86,7 @@ def platform_frameworks(query, json_output):
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
if json_output:
click.echo(dump_json_to_unicode(frameworks))
click.echo(json.dumps(frameworks))
else:
_print_platforms(frameworks)
@@ -213,17 +95,15 @@ def platform_frameworks(query, json_output):
@click.option("--json-output", is_flag=True)
def platform_list(json_output):
platforms = []
pm = PlatformManager()
for manifest in pm.get_installed():
pm = PlatformPackageManager()
for pkg in pm.get_installed():
platforms.append(
_get_installed_platform_data(
manifest["__pkg_dir"], with_boards=False, expose_packages=False
)
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
)
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
if json_output:
click.echo(dump_json_to_unicode(platforms))
click.echo(json.dumps(platforms))
else:
_print_platforms(platforms)
@@ -234,16 +114,15 @@ def platform_list(json_output):
def platform_show(platform, json_output): # pylint: disable=too-many-branches
data = _get_platform_data(platform)
if not data:
raise exception.UnknownPlatform(platform)
raise UnknownPlatform(platform)
if json_output:
return click.echo(dump_json_to_unicode(data))
return click.echo(json.dumps(data))
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
click.echo(
"{name} ~ {title}".format(
name=click.style(data["name"], fg="cyan"), title=data["title"]
)
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
)
click.echo("=" * (3 + len(data["name"] + data["title"])))
click.echo("=" * (3 + len(dep + data["title"])))
click.echo(data["description"])
click.echo()
if "version" in data:
@@ -300,33 +179,70 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
@click.option("--without-package", multiple=True)
@click.option("--skip-default-package", is_flag=True)
@click.option("--with-all-packages", is_flag=True)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"-f",
"--force",
is_flag=True,
help="Reinstall/redownload dev/platform and its packages if exist",
)
def platform_install( # pylint: disable=too-many-arguments
def platform_install( # pylint: disable=too-many-arguments,too-many-locals
platforms,
with_package,
without_package,
skip_default_package,
with_all_packages,
silent,
force,
):
pm = PlatformManager()
def _find_pkg_names(p, candidates):
result = []
for candidate in candidates:
found = False
# lookup by package types
for _name, _opts in p.packages.items():
if _opts.get("type") == candidate:
result.append(_name)
found = True
if (
p.frameworks
and candidate.startswith("framework-")
and candidate[10:] in p.frameworks
):
result.append(p.frameworks[candidate[10:]]["package"])
found = True
if not found:
result.append(candidate)
return result
pm = PlatformPackageManager()
pm.set_log_level(logging.WARN if silent else logging.DEBUG)
for platform in platforms:
if pm.install(
name=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
with_all_packages=with_all_packages,
force=force,
):
if with_package or without_package or with_all_packages:
pkg = pm.install(platform, skip_dependencies=True)
p = PlatformFactory.new(pkg)
if with_all_packages:
with_package = list(p.packages)
with_package = set(_find_pkg_names(p, with_package or []))
without_package = set(_find_pkg_names(p, without_package or []))
upkgs = with_package | without_package
ppkgs = set(p.packages)
if not upkgs.issubset(ppkgs):
raise UnknownPackageError(", ".join(upkgs - ppkgs))
for name, options in p.packages.items():
if name in without_package:
continue
if name in with_package or not (
skip_default_package or options.get("optional", False)
):
p.pm.install(p.get_package_spec(name), force=force)
else:
pkg = pm.install(platform, skip_dependencies=skip_default_package)
if pkg and not silent:
click.secho(
"The platform '%s' has been successfully installed!\n"
"The rest of packages will be installed automatically "
"The rest of the packages will be installed later "
"depending on your build environment." % platform,
fg="green",
)
@@ -335,11 +251,12 @@ def platform_install( # pylint: disable=too-many-arguments
@cli.command("uninstall", short_help="Uninstall development platform")
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
def platform_uninstall(platforms):
pm = PlatformManager()
pm = PlatformPackageManager()
pm.set_log_level(logging.DEBUG)
for platform in platforms:
if pm.uninstall(platform):
click.secho(
"The platform '%s' has been successfully uninstalled!" % platform,
"The platform '%s' has been successfully removed!" % platform,
fg="green",
)
@@ -358,66 +275,199 @@ def platform_uninstall(platforms):
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option("--json-output", is_flag=True)
def platform_update( # pylint: disable=too-many-locals
platforms, only_packages, only_check, dry_run, json_output
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
platforms, only_check, dry_run, silent, json_output, **_
):
pm = PlatformManager()
pkg_dir_to_name = {}
if not platforms:
platforms = []
for manifest in pm.get_installed():
platforms.append(manifest["__pkg_dir"])
pkg_dir_to_name[manifest["__pkg_dir"]] = manifest.get(
"title", manifest["name"]
)
if only_check and not json_output:
raise UserSideException(
"This command is deprecated, please use `pio pkg outdated` instead"
)
pm = PlatformPackageManager()
pm.set_log_level(logging.WARN if silent else logging.DEBUG)
platforms = platforms or pm.get_installed()
only_check = dry_run or only_check
if only_check and json_output:
result = []
for platform in platforms:
pkg_dir = platform if isdir(platform) else None
requirements = None
url = None
if not pkg_dir:
name, requirements, url = pm.parse_pkg_uri(platform)
pkg_dir = pm.get_package_dir(name, requirements, url)
if not pkg_dir:
spec = None
pkg = None
if isinstance(platform, PackageItem):
pkg = platform
else:
spec = PackageSpec(platform)
pkg = pm.get_package(spec)
if not pkg:
continue
latest = pm.outdated(pkg_dir, requirements)
outdated = pm.outdated(pkg, spec)
if (
not latest
and not PlatformFactory.newPlatform(pkg_dir).are_outdated_packages()
not outdated.is_outdated(allow_incompatible=True)
and not PlatformFactory.new(pkg).are_outdated_packages()
):
continue
data = _get_installed_platform_data(
pkg_dir, with_boards=False, expose_packages=False
pkg, with_boards=False, expose_packages=False
)
if latest:
data["versionLatest"] = latest
if outdated.is_outdated(allow_incompatible=True):
data["versionLatest"] = (
str(outdated.latest) if outdated.latest else None
)
result.append(data)
return click.echo(dump_json_to_unicode(result))
return click.echo(json.dumps(result))
# cleanup cached board and platform lists
app.clean_cache()
for platform in platforms:
click.echo(
"Platform %s"
% click.style(pkg_dir_to_name.get(platform, platform), fg="cyan")
% click.style(
platform.metadata.name
if isinstance(platform, PackageItem)
else platform,
fg="cyan",
)
)
click.echo("--------")
pm.update(platform, only_packages=only_packages, only_check=only_check)
pm.update(platform)
click.echo()
return True
@cli.command(
"pack", short_help="Create a tarball from development platform/tool package"
)
@click.argument("package", required=True, metavar="[source directory, tar.gz or zip]")
def platform_pack(package):
p = PackagePacker(package)
tarball_path = p.pack()
click.secho('Wrote a tarball to "%s"' % tarball_path, fg="green")
#
# Helpers
#
def _print_platforms(platforms):
for platform in platforms:
click.echo(
"{name} ~ {title}".format(
name=click.style(platform["name"], fg="cyan"), title=platform["title"]
)
)
click.echo("=" * (3 + len(platform["name"] + platform["title"])))
click.echo(platform["description"])
click.echo()
if "homepage" in platform:
click.echo("Home: %s" % platform["homepage"])
if "frameworks" in platform and platform["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(platform["frameworks"]))
if "packages" in platform:
click.echo("Packages: %s" % ", ".join(platform["packages"]))
if "version" in platform:
if "__src_url" in platform:
click.echo(
"Version: %s (%s)" % (platform["version"], platform["__src_url"])
)
else:
click.echo("Version: " + platform["version"])
click.echo()
def _get_registry_platforms():
regclient = PlatformPackageManager().get_registry_client_instance()
return regclient.fetch_json_data("get", "/v2/platforms", x_cache_valid="1d")
def _get_platform_data(*args, **kwargs):
try:
return _get_installed_platform_data(*args, **kwargs)
except UnknownPlatform:
return _get_registry_platform_data(*args, **kwargs)
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
p = PlatformFactory.new(platform)
data = dict(
name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
url=p.homepage,
repository=p.repository_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
packages=list(p.packages) if p.packages else [],
)
# if dump to API
# del data['version']
# return data
# overwrite VCS version and add extra fields
manifest = PlatformPackageManager().legacy_load_manifest(
os.path.dirname(p.manifest_path)
)
assert manifest
for key in manifest:
if key == "version" or key.startswith("__"):
data[key] = manifest[key]
if with_boards:
data["boards"] = [c.get_brief_data() for c in p.get_boards().values()]
if not data["packages"] or not expose_packages:
return data
data["packages"] = []
installed_pkgs = {
pkg.metadata.name: p.pm.load_manifest(pkg) for pkg in p.get_installed_packages()
}
for name, options in p.packages.items():
item = dict(
name=name,
type=p.get_package_type(name),
requirements=options.get("version"),
optional=options.get("optional") is True,
)
if name in installed_pkgs:
for key, value in installed_pkgs[name].items():
if key not in ("url", "version", "description"):
continue
item[key] = value
if key == "version":
item["originalVersion"] = get_original_version(value)
data["packages"].append(item)
return data
def _get_registry_platform_data( # pylint: disable=unused-argument
platform, with_boards=True, expose_packages=True
):
_data = None
for p in _get_registry_platforms():
if p["name"] == platform:
_data = p
break
if not _data:
return None
data = dict(
ownername=_data.get("ownername"),
name=_data["name"],
title=_data["title"],
description=_data["description"],
homepage=_data["homepage"],
repository=_data["repository"],
url=_data["url"],
license=_data["license"],
forDesktop=_data["forDesktop"],
frameworks=_data["frameworks"],
packages=_data["packages"],
versions=_data.get("versions"),
)
if with_boards:
data["boards"] = [
board
for board in PlatformPackageManager().get_registered_boards()
if board["platform"] == _data["name"]
]
return data

View File

@@ -12,434 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments,too-many-locals, too-many-branches
import os
import click
from tabulate import tabulate
from platformio import exception, fs
from platformio.commands.platform import platform_install as cli_platform_install
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
from platformio.project.helpers import is_platformio_project
from platformio.project.commands.config import project_config_cmd
from platformio.project.commands.init import project_init_cmd
from platformio.project.commands.metadata import project_metadata_cmd
@click.group(short_help="Project Manager")
@click.group(
"project",
commands=[
project_config_cmd,
project_init_cmd,
project_metadata_cmd,
],
short_help="Project Manager",
)
def cli():
pass
@cli.command("config", short_help="Show computed configuration")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("--json-output", is_flag=True)
def project_config(project_dir, json_output):
if not is_platformio_project(project_dir):
raise NotPlatformIOProjectError(project_dir)
with fs.cd(project_dir):
config = ProjectConfig.get_instance()
if json_output:
return click.echo(config.to_json())
click.echo(
"Computed project configuration for %s" % click.style(project_dir, fg="cyan")
)
for section, options in config.as_tuple():
click.echo()
click.secho(section, fg="cyan")
click.echo("-" * len(section))
click.echo(
tabulate(
[
(name, "=", "\n".join(value) if isinstance(value, list) else value)
for name, value in options
],
tablefmt="plain",
)
)
return None
def validate_boards(ctx, param, value): # pylint: disable=W0613
pm = PlatformManager()
for id_ in value:
try:
pm.board_config(id_)
except exception.UnknownBoard:
raise click.BadParameter(
"`%s`. Please search for board ID using `platformio boards` "
"command" % id_
)
return value
@cli.command("init", short_help="Initialize a project or update existing")
@click.option(
"--project-dir",
"-d",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.option("-O", "--project-option", multiple=True)
@click.option("--env-prefix", default="")
@click.option("-s", "--silent", is_flag=True)
@click.pass_context
def project_init(
ctx, # pylint: disable=R0913
project_dir,
board,
ide,
project_option,
env_prefix,
silent,
):
if not silent:
if project_dir == os.getcwd():
click.secho("\nThe current working directory", fg="yellow", nl=False)
click.secho(" %s " % project_dir, fg="cyan", nl=False)
click.secho("will be used for the project.", fg="yellow")
click.echo("")
click.echo(
"The next files/directories have been created in %s"
% click.style(project_dir, fg="cyan")
)
click.echo(
"%s - Put project header files here" % click.style("include", fg="cyan")
)
click.echo(
"%s - Put here project specific (private) libraries"
% click.style("lib", fg="cyan")
)
click.echo("%s - Put project source files here" % click.style("src", fg="cyan"))
click.echo(
"%s - Project Configuration File" % click.style("platformio.ini", fg="cyan")
)
is_new_project = not is_platformio_project(project_dir)
if is_new_project:
init_base_project(project_dir)
if board:
fill_project_envs(
ctx, project_dir, board, project_option, env_prefix, ide is not None
)
if ide:
pg = ProjectGenerator(project_dir, ide, board)
pg.generate()
if is_new_project:
init_ci_conf(project_dir)
init_cvs_ignore(project_dir)
if silent:
return
if ide:
click.secho(
"\nProject has been successfully %s including configuration files "
"for `%s` IDE." % ("initialized" if is_new_project else "updated", ide),
fg="green",
)
else:
click.secho(
"\nProject has been successfully %s! Useful commands:\n"
"`pio run` - process/build project from the current directory\n"
"`pio run --target upload` or `pio run -t upload` "
"- upload firmware to a target\n"
"`pio run --target clean` - clean project (remove compiled files)"
"\n`pio run --help` - additional information"
% ("initialized" if is_new_project else "updated"),
fg="green",
)
def init_base_project(project_dir):
with fs.cd(project_dir):
config = ProjectConfig()
config.save()
dir_to_readme = [
(config.get_optional_dir("src"), None),
(config.get_optional_dir("include"), init_include_readme),
(config.get_optional_dir("lib"), init_lib_readme),
(config.get_optional_dir("test"), init_test_readme),
]
for (path, cb) in dir_to_readme:
if os.path.isdir(path):
continue
os.makedirs(path)
if cb:
cb(path)
def init_include_readme(include_dir):
fs.write_file_contents(
os.path.join(include_dir, "README"),
"""
This directory is intended for project header files.
A header file is a file containing C declarations and macro definitions
to be shared between several project source files. You request the use of a
header file in your project source file (C, C++, etc) located in `src` folder
by including it, with the C preprocessing directive `#include'.
```src/main.c
#include "header.h"
int main (void)
{
...
}
```
Including a header file produces the same results as copying the header file
into each source file that needs it. Such copying would be time-consuming
and error-prone. With a header file, the related declarations appear
in only one place. If they need to be changed, they can be changed in one
place, and programs that include the header file will automatically use the
new version when next recompiled. The header file eliminates the labor of
finding and changing all the copies as well as the risk that a failure to
find one copy will result in inconsistencies within a program.
In C, the usual convention is to give header files names that end with `.h'.
It is most portable to use only letters, digits, dashes, and underscores in
header file names, and at most one dot.
Read more about using header files in official GCC documentation:
* Include Syntax
* Include Operation
* Once-Only Headers
* Computed Includes
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
""",
)
def init_lib_readme(lib_dir):
# pylint: disable=line-too-long
fs.write_file_contents(
os.path.join(lib_dir, "README"),
"""
This directory is intended for project specific (private) libraries.
PlatformIO will compile them to static libraries and link into executable file.
The source code of each library should be placed in a an own separate directory
("lib/your_library_name/[here are source files]").
For example, see a structure of the following two libraries `Foo` and `Bar`:
|--lib
| |
| |--Bar
| | |--docs
| | |--examples
| | |--src
| | |- Bar.c
| | |- Bar.h
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
| |
| |--Foo
| | |- Foo.c
| | |- Foo.h
| |
| |- README --> THIS FILE
|
|- platformio.ini
|--src
|- main.c
and a contents of `src/main.c`:
```
#include <Foo.h>
#include <Bar.h>
int main (void)
{
...
}
```
PlatformIO Library Dependency Finder will find automatically dependent
libraries scanning project source files.
More information about PlatformIO Library Dependency Finder
- https://docs.platformio.org/page/librarymanager/ldf.html
""",
)
def init_test_readme(test_dir):
fs.write_file_contents(
os.path.join(test_dir, "README"),
"""
This directory is intended for PIO Unit Testing and project tests.
Unit Testing is a software testing method by which individual units of
source code, sets of one or more MCU program modules together with associated
control data, usage procedures, and operating procedures, are tested to
determine whether they are fit for use. Unit testing finds problems early
in the development cycle.
More information about PIO Unit Testing:
- https://docs.platformio.org/page/plus/unit-testing.html
""",
)
def init_ci_conf(project_dir):
conf_path = os.path.join(project_dir, ".travis.yml")
if os.path.isfile(conf_path):
return
fs.write_file_contents(
conf_path,
"""# Continuous Integration (CI) is the practice, in software
# engineering, of merging all developer working copies with a shared mainline
# several times a day < https://docs.platformio.org/page/ci/index.html >
#
# Documentation:
#
# * Travis CI Embedded Builds with PlatformIO
# < https://docs.travis-ci.com/user/integration/platformio/ >
#
# * PlatformIO integration with Travis CI
# < https://docs.platformio.org/page/ci/travis.html >
#
# * User Guide for `platformio ci` command
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
#
#
# Please choose one of the following templates (proposed below) and uncomment
# it (remove "# " before each line) or use own configuration according to the
# Travis CI documentation (see above).
#
#
# Template #1: General project. Test it using existing `platformio.ini`.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio run
#
# Template #2: The project is intended to be used as a library with examples.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# env:
# - PLATFORMIO_CI_SRC=path/to/test/file.c
# - PLATFORMIO_CI_SRC=examples/file.ino
# - PLATFORMIO_CI_SRC=path/to/test/directory
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
""",
)
def init_cvs_ignore(project_dir):
conf_path = os.path.join(project_dir, ".gitignore")
if os.path.isfile(conf_path):
return
fs.write_file_contents(conf_path, ".pio\n")
def fill_project_envs(
ctx, project_dir, board_ids, project_option, env_prefix, force_download
):
config = ProjectConfig(
os.path.join(project_dir, "platformio.ini"), parse_extra=False
)
used_boards = []
for section in config.sections():
cond = [section.startswith("env:"), config.has_option(section, "board")]
if all(cond):
used_boards.append(config.get(section, "board"))
pm = PlatformManager()
used_platforms = []
modified = False
for id_ in board_ids:
board_config = pm.board_config(id_)
used_platforms.append(board_config["platform"])
if id_ in used_boards:
continue
used_boards.append(id_)
modified = True
envopts = {"platform": board_config["platform"], "board": id_}
# find default framework for board
frameworks = board_config.get("frameworks")
if frameworks:
envopts["framework"] = frameworks[0]
for item in project_option:
if "=" not in item:
continue
_name, _value = item.split("=", 1)
envopts[_name.strip()] = _value.strip()
section = "env:%s%s" % (env_prefix, id_)
config.add_section(section)
for option, value in envopts.items():
config.set(section, option, value)
if force_download and used_platforms:
_install_dependent_platforms(ctx, used_platforms)
if modified:
config.save()
def _install_dependent_platforms(ctx, platforms):
installed_platforms = [p["name"] for p in PlatformManager().get_installed()]
if set(platforms) <= set(installed_platforms):
return
ctx.invoke(
cli_platform_install, platforms=list(set(platforms) - set(installed_platforms))
)

View File

@@ -1,229 +0,0 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import threading
from tempfile import mkdtemp
from time import sleep
import click
from platformio import exception, fs
from platformio.commands import device
from platformio.managers.core import pioplus_call
from platformio.project.exception import NotPlatformIOProjectError
# pylint: disable=unused-argument
@click.group("remote", short_help="PIO Remote")
@click.option("-a", "--agent", multiple=True)
def cli(**kwargs):
pass
@cli.group("agent", short_help="Start new agent or list active")
def remote_agent():
pass
@remote_agent.command("start", short_help="Start agent")
@click.option("-n", "--name")
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
@click.option(
"-d",
"--working-dir",
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
def remote_agent_start(**kwargs):
pioplus_call(sys.argv[1:])
@remote_agent.command("reload", short_help="Reload agents")
def remote_agent_reload():
pioplus_call(sys.argv[1:])
@remote_agent.command("list", short_help="List active agents")
def remote_agent_list():
pioplus_call(sys.argv[1:])
@cli.command("update", short_help="Update installed Platforms, Packages and Libraries")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
def remote_update(only_check, dry_run):
pioplus_call(sys.argv[1:])
@cli.command("run", short_help="Process project environments remotely")
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("--disable-auto-clean", is_flag=True)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
def remote_run(**kwargs):
pioplus_call(sys.argv[1:])
@cli.command("test", short_help="Remote Unit Testing")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
@click.option("--upload-port")
@click.option("--test-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--verbose", "-v", is_flag=True)
def remote_test(**kwargs):
pioplus_call(sys.argv[1:])
@cli.group("device", short_help="Monitor remote device or list existing")
def remote_device():
pass
@remote_device.command("list", short_help="List remote devices")
@click.option("--json-output", is_flag=True)
def device_list(json_output):
pioplus_call(sys.argv[1:])
@remote_device.command("monitor", short_help="Monitor remote device")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment",
)
@click.pass_context
def device_monitor(ctx, **kwargs):
project_options = {}
try:
with fs.cd(kwargs["project_dir"]):
project_options = device.get_project_options(kwargs["environment"])
kwargs = device.apply_project_monitor_options(kwargs, project_options)
except NotPlatformIOProjectError:
pass
kwargs["baud"] = kwargs["baud"] or 9600
def _tx_target(sock_dir):
pioplus_argv = ["remote", "device", "monitor"]
pioplus_argv.extend(device.options_to_argv(kwargs, project_options))
pioplus_argv.extend(["--sock", sock_dir])
try:
pioplus_call(pioplus_argv)
except exception.ReturnErrorCode:
pass
sock_dir = mkdtemp(suffix="pioplus")
sock_file = os.path.join(sock_dir, "sock")
try:
t = threading.Thread(target=_tx_target, args=(sock_dir,))
t.start()
while t.is_alive() and not os.path.isfile(sock_file):
sleep(0.1)
if not t.is_alive():
return
kwargs["port"] = fs.get_file_contents(sock_file)
ctx.invoke(device.device_monitor, **kwargs)
t.join(2)
finally:
fs.rmtree(sock_dir)

View File

@@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,91 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
class AsyncCommandBase(object):
MAX_BUFFER_SIZE = 1024 * 1024 # 1Mb
def __init__(self, options=None, on_end_callback=None):
self.options = options or {}
self.on_end_callback = on_end_callback
self._buffer = b""
self._return_code = None
self._d = None
self._paused = False
try:
self.start()
except Exception as e:
raise pb.Error(str(e))
@property
def id(self):
return id(self)
def pause(self):
self._paused = True
self.stop()
def unpause(self):
self._paused = False
self.start()
def start(self):
raise NotImplementedError
def stop(self):
self.transport.loseConnection() # pylint: disable=no-member
def _ac_ended(self):
if self.on_end_callback:
self.on_end_callback()
if not self._d or self._d.called:
self._d = None
return
if self._buffer:
self._d.callback(self._buffer)
else:
self._d.callback(None)
def _ac_ondata(self, data):
self._buffer += data
if len(self._buffer) > self.MAX_BUFFER_SIZE:
self._buffer = self._buffer[-1 * self.MAX_BUFFER_SIZE :]
if self._paused:
return
if self._d and not self._d.called:
self._d.callback(self._buffer)
self._buffer = b""
def ac_read(self):
if self._buffer:
result = self._buffer
self._buffer = b""
return result
if self._return_code is None:
self._d = defer.Deferred()
return self._d
return None
def ac_write(self, data):
self.transport.write(data) # pylint: disable=no-member
return len(data)
def ac_close(self):
self.stop()
return self._return_code

View File

@@ -0,0 +1,42 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from twisted.internet import protocol, reactor # pylint: disable=import-error
from platformio.commands.remote.ac.base import AsyncCommandBase
class ProcessAsyncCmd(protocol.ProcessProtocol, AsyncCommandBase):
def start(self):
env = dict(os.environ).copy()
env.update({"PLATFORMIO_FORCE_ANSI": "true"})
reactor.spawnProcess(
self, self.options["executable"], self.options["args"], env
)
def outReceived(self, data):
self._ac_ondata(data)
def errReceived(self, data):
self._ac_ondata(data)
def processExited(self, reason):
self._return_code = reason.value.exitCode
def processEnded(self, reason):
if self._return_code is None:
self._return_code = reason.value.exitCode
self._ac_ended()

View File

@@ -0,0 +1,66 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import zlib
from io import BytesIO
from platformio.commands.remote.ac.base import AsyncCommandBase
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
class ProjectSyncAsyncCmd(AsyncCommandBase):
def __init__(self, *args, **kwargs):
self.psync = None
self._upstream = None
super().__init__(*args, **kwargs)
def start(self):
project_dir = os.path.join(
self.options["agent_working_dir"], "projects", self.options["id"]
)
self.psync = ProjectSync(project_dir)
for name in self.options["items"]:
self.psync.add_item(os.path.join(project_dir, name), name)
def stop(self):
self.psync = None
self._upstream = None
self._return_code = PROJECT_SYNC_STAGE.COMPLETED.value
def ac_write(self, data):
stage = PROJECT_SYNC_STAGE.lookupByValue(data.get("stage"))
if stage is PROJECT_SYNC_STAGE.DBINDEX:
self.psync.rebuild_dbindex()
return zlib.compress(json.dumps(self.psync.get_dbindex()).encode())
if stage is PROJECT_SYNC_STAGE.DELETE:
return self.psync.delete_dbindex(
json.loads(zlib.decompress(data["dbindex"]))
)
if stage is PROJECT_SYNC_STAGE.UPLOAD:
if not self._upstream:
self._upstream = BytesIO()
self._upstream.write(data["chunk"])
if self._upstream.tell() == data["total"]:
self.psync.decompress_items(self._upstream)
self._upstream = None
return PROJECT_SYNC_STAGE.EXTRACTED.value
return PROJECT_SYNC_STAGE.UPLOAD.value
return None

View File

@@ -0,0 +1,60 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import sleep
from twisted.internet import protocol, reactor # pylint: disable=import-error
from twisted.internet.serialport import SerialPort # pylint: disable=import-error
from platformio.commands.remote.ac.base import AsyncCommandBase
class SerialPortAsyncCmd(protocol.Protocol, AsyncCommandBase):
def start(self):
SerialPort(
self,
reactor=reactor,
**{
"deviceNameOrPortNumber": self.options["port"],
"baudrate": self.options["baud"],
"parity": self.options["parity"],
"rtscts": 1 if self.options["rtscts"] else 0,
"xonxoff": 1 if self.options["xonxoff"] else 0,
}
)
def connectionMade(self):
self.reset_device()
if self.options.get("rts", None) is not None:
self.transport.setRTS(self.options.get("rts"))
if self.options.get("dtr", None) is not None:
self.transport.setDTR(self.options.get("dtr"))
def reset_device(self):
self.transport.flushInput()
self.transport.setDTR(False)
self.transport.setRTS(False)
sleep(0.1)
self.transport.setDTR(True)
self.transport.setRTS(True)
sleep(0.1)
def dataReceived(self, data):
self._ac_ondata(data)
def connectionLost(self, reason): # pylint: disable=unused-argument
if self._paused:
return
self._return_code = 0
self._ac_ended()

View File

@@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,38 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import click
from platformio.commands.remote.client.base import RemoteClientBase
class AgentListClient(RemoteClientBase):
def agent_pool_ready(self):
d = self.agentpool.callRemote("list", True)
d.addCallback(self._cbResult)
d.addErrback(self.cb_global_error)
def _cbResult(self, result):
for item in result:
click.secho(item["name"], fg="cyan")
click.echo("-" * len(item["name"]))
click.echo("ID: %s" % item["id"])
click.echo(
"Started: %s"
% datetime.fromtimestamp(item["started"]).strftime("%Y-%m-%d %H:%M:%S")
)
click.echo("")
self.disconnect()

View File

@@ -0,0 +1,226 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from twisted.logger import LogLevel # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio import proc
from platformio.commands.remote.ac.process import ProcessAsyncCmd
from platformio.commands.remote.ac.psync import ProjectSyncAsyncCmd
from platformio.commands.remote.ac.serial import SerialPortAsyncCmd
from platformio.commands.remote.client.base import RemoteClientBase
from platformio.device.list import list_serial_ports
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
class RemoteAgentService(RemoteClientBase):
def __init__(self, name, share, working_dir=None):
RemoteClientBase.__init__(self)
self.log_level = LogLevel.info
self.working_dir = working_dir or os.path.join(
ProjectConfig.get_instance().get("platformio", "core_dir"), "remote"
)
if not os.path.isdir(self.working_dir):
os.makedirs(self.working_dir)
if name:
self.name = str(name)[:50]
self.join_options.update(
{"agent": True, "share": [s.lower().strip()[:50] for s in share]}
)
self._acs = {}
def agent_pool_ready(self):
pass
def cb_disconnected(self, reason):
for ac in self._acs.values():
ac.ac_close()
RemoteClientBase.cb_disconnected(self, reason)
def remote_acread(self, ac_id):
self.log.debug("Async Read: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return self._acs[ac_id].ac_read()
def remote_acwrite(self, ac_id, data):
self.log.debug("Async Write: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return self._acs[ac_id].ac_write(data)
def remote_acclose(self, ac_id):
self.log.debug("Async Close: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return_code = self._acs[ac_id].ac_close()
del self._acs[ac_id]
return return_code
def remote_cmd(self, cmd, options):
self.log.info("Remote command received: {cmd}", cmd=cmd)
self.log.debug("Command options: {options!r}", options=options)
callback = "_process_cmd_%s" % cmd.replace(".", "_")
return getattr(self, callback)(options)
def _defer_async_cmd(self, ac, pass_agent_name=True):
self._acs[ac.id] = ac
if pass_agent_name:
return (self.id, ac.id, self.name)
return (self.id, ac.id)
def _process_cmd_device_list(self, _):
return (self.name, list_serial_ports())
def _process_cmd_device_monitor(self, options):
if not options["port"]:
for item in list_serial_ports():
if "VID:PID" in item["hwid"]:
options["port"] = item["port"]
break
# terminate opened monitors
if options["port"]:
for ac in list(self._acs.values()):
if (
isinstance(ac, SerialPortAsyncCmd)
and ac.options["port"] == options["port"]
):
self.log.info(
"Terminate previously opened monitor at {port}",
port=options["port"],
)
ac.ac_close()
del self._acs[ac.id]
if not options["port"]:
raise pb.Error("Please specify serial port using `--port` option")
self.log.info("Starting serial monitor at {port}", port=options["port"])
return self._defer_async_cmd(SerialPortAsyncCmd(options), pass_agent_name=False)
def _process_cmd_psync(self, options):
for ac in list(self._acs.values()):
if (
isinstance(ac, ProjectSyncAsyncCmd)
and ac.options["id"] == options["id"]
):
self.log.info("Terminate previous Project Sync process")
ac.ac_close()
del self._acs[ac.id]
options["agent_working_dir"] = self.working_dir
return self._defer_async_cmd(
ProjectSyncAsyncCmd(options), pass_agent_name=False
)
def _process_cmd_run(self, options):
return self._process_cmd_run_or_test("run", options)
def _process_cmd_test(self, options):
return self._process_cmd_run_or_test("test", options)
def _process_cmd_run_or_test( # pylint: disable=too-many-locals,too-many-branches
self, command, options
):
assert options and "project_id" in options
project_dir = os.path.join(self.working_dir, "projects", options["project_id"])
origin_pio_ini = os.path.join(project_dir, "platformio.ini")
back_pio_ini = os.path.join(project_dir, "platformio.ini.bak")
# remove insecure project options
try:
conf = ProjectConfig(origin_pio_ini)
if os.path.isfile(back_pio_ini):
os.remove(back_pio_ini)
os.rename(origin_pio_ini, back_pio_ini)
# cleanup
if conf.has_section("platformio"):
for opt in conf.options("platformio"):
if opt.endswith("_dir"):
conf.remove_option("platformio", opt)
else:
conf.add_section("platformio")
conf.set("platformio", "build_dir", ".pio/build")
conf.save(origin_pio_ini)
# restore A/M times
os.utime(
origin_pio_ini,
(os.path.getatime(back_pio_ini), os.path.getmtime(back_pio_ini)),
)
except NotPlatformIOProjectError as e:
raise pb.Error(str(e))
cmd_args = ["platformio", "--force", command, "-d", project_dir]
for env in options.get("environment", []):
cmd_args.extend(["-e", env])
for target in options.get("target", []):
cmd_args.extend(["-t", target])
for ignore in options.get("ignore", []):
cmd_args.extend(["-i", ignore])
if options.get("upload_port", False):
cmd_args.extend(["--upload-port", options.get("upload_port")])
if options.get("test_port", False):
cmd_args.extend(["--test-port", options.get("test_port")])
if options.get("disable_auto_clean", False):
cmd_args.append("--disable-auto-clean")
if options.get("without_building", False):
cmd_args.append("--without-building")
if options.get("without_uploading", False):
cmd_args.append("--without-uploading")
if options.get("silent", False):
cmd_args.append("-s")
if options.get("verbose", False):
cmd_args.append("-v")
paused_acs = []
for ac in self._acs.values():
if not isinstance(ac, SerialPortAsyncCmd):
continue
self.log.info("Pause active monitor at {port}", port=ac.options["port"])
ac.pause()
paused_acs.append(ac)
def _cb_on_end():
if os.path.isfile(back_pio_ini):
if os.path.isfile(origin_pio_ini):
os.remove(origin_pio_ini)
os.rename(back_pio_ini, origin_pio_ini)
for ac in paused_acs:
ac.unpause()
self.log.info(
"Unpause active monitor at {port}", port=ac.options["port"]
)
return self._defer_async_cmd(
ProcessAsyncCmd(
{"executable": proc.where_is_program("platformio"), "args": cmd_args},
on_end_callback=_cb_on_end,
)
)
def _process_cmd_update(self, options):
cmd_args = ["platformio", "--force", "update"]
if options.get("only_check"):
cmd_args.append("--only-check")
return self._defer_async_cmd(
ProcessAsyncCmd(
{"executable": proc.where_is_program("platformio"), "args": cmd_args}
)
)

View File

@@ -0,0 +1,65 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from twisted.spread import pb # pylint: disable=import-error
from platformio.commands.remote.client.base import RemoteClientBase
class AsyncClientBase(RemoteClientBase):
def __init__(self, command, agents, options):
RemoteClientBase.__init__(self)
self.command = command
self.agents = agents
self.options = options
self._acs_total = 0
self._acs_ended = 0
def agent_pool_ready(self):
pass
def cb_async_result(self, result):
if self._acs_total == 0:
self._acs_total = len(result)
for (success, value) in result:
if not success:
raise pb.Error(value)
self.acread_data(*value)
def acread_data(self, agent_id, ac_id, agent_name=None):
d = self.agentpool.callRemote("acread", agent_id, ac_id)
d.addCallback(self.cb_acread_result, agent_id, ac_id, agent_name)
d.addErrback(self.cb_global_error)
def cb_acread_result(self, result, agent_id, ac_id, agent_name):
if result is None:
self.acclose(agent_id, ac_id)
else:
if self._acs_total > 1 and agent_name:
click.echo("[%s] " % agent_name, nl=False)
click.echo(result, nl=False)
self.acread_data(agent_id, ac_id, agent_name)
def acclose(self, agent_id, ac_id):
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
d.addCallback(self.cb_acclose_result)
d.addErrback(self.cb_global_error)
def cb_acclose_result(self, exit_code):
self._acs_ended += 1
if self._acs_ended != self._acs_total:
return
self.disconnect(exit_code)

View File

@@ -0,0 +1,193 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from time import time
import click
from twisted.internet import defer, endpoints, reactor # pylint: disable=import-error
from twisted.logger import ILogObserver # pylint: disable=import-error
from twisted.logger import Logger # pylint: disable=import-error
from twisted.logger import LogLevel # pylint: disable=import-error
from twisted.logger import formatEvent # pylint: disable=import-error
from twisted.python import failure # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from zope.interface import provider # pylint: disable=import-error
from platformio import __pioremote_endpoint__, __version__, app, exception, maintenance
from platformio.commands.remote.factory.client import RemoteClientFactory
from platformio.commands.remote.factory.ssl import SSLContextFactory
class RemoteClientBase( # pylint: disable=too-many-instance-attributes
pb.Referenceable
):
PING_DELAY = 60
PING_MAX_FAILURES = 3
DEBUG = False
def __init__(self):
self.log_level = LogLevel.warn
self.log = Logger(namespace="remote", observer=self._log_observer)
self.id = app.get_host_id()
self.name = app.get_host_name()
self.join_options = {"corever": __version__}
self.perspective = None
self.agentpool = None
self._ping_id = 0
self._ping_caller = None
self._ping_counter = 0
self._reactor_stopped = False
self._exit_code = 0
@provider(ILogObserver)
def _log_observer(self, event):
if not self.DEBUG and (
event["log_namespace"] != self.log.namespace
or self.log_level > event["log_level"]
):
return
msg = formatEvent(event)
click.echo(
"%s [%s] %s"
% (
datetime.fromtimestamp(event["log_time"]).strftime("%Y-%m-%d %H:%M:%S"),
event["log_level"].name,
msg,
)
)
def connect(self):
self.log.info("Name: {name}", name=self.name)
self.log.info("Connecting to PlatformIO Remote Development Cloud")
# pylint: disable=protected-access
proto, options = endpoints._parse(__pioremote_endpoint__)
proto = proto[0]
factory = RemoteClientFactory()
factory.remote_client = self
factory.sslContextFactory = None
if proto == "ssl":
factory.sslContextFactory = SSLContextFactory(options["host"])
reactor.connectSSL(
options["host"],
int(options["port"]),
factory,
factory.sslContextFactory,
)
elif proto == "tcp":
reactor.connectTCP(options["host"], int(options["port"]), factory)
else:
raise exception.PlatformioException("Unknown PIO Remote Cloud protocol")
reactor.run()
if self._exit_code != 0:
raise exception.ReturnErrorCode(self._exit_code)
def cb_client_authorization_failed(self, err):
msg = "Bad account credentials"
if err.check(pb.Error):
msg = err.getErrorMessage()
self.log.error(msg)
self.disconnect(exit_code=1)
def cb_client_authorization_made(self, perspective):
self.log.info("Successfully authorized")
self.perspective = perspective
d = perspective.callRemote("join", self.id, self.name, self.join_options)
d.addCallback(self._cb_client_join_made)
d.addErrback(self.cb_global_error)
def _cb_client_join_made(self, result):
code = result[0]
if code == 1:
self.agentpool = result[1]
self.agent_pool_ready()
self.restart_ping()
elif code == 2:
self.remote_service(*result[1:])
def remote_service(self, command, options):
if command == "disconnect":
self.log.error(
"PIO Remote Cloud disconnected: {msg}", msg=options.get("message")
)
self.disconnect()
def restart_ping(self, reset_counter=True):
# stop previous ping callers
self.stop_ping(reset_counter)
self._ping_caller = reactor.callLater(self.PING_DELAY, self._do_ping)
def _do_ping(self):
self._ping_counter += 1
self._ping_id = int(time())
d = self.perspective.callRemote("service", "ping", {"id": self._ping_id})
d.addCallback(self._cb_pong)
d.addErrback(self._cb_pong)
def stop_ping(self, reset_counter=True):
if reset_counter:
self._ping_counter = 0
if not self._ping_caller or not self._ping_caller.active():
return
self._ping_caller.cancel()
self._ping_caller = None
def _cb_pong(self, result):
if not isinstance(result, failure.Failure) and self._ping_id == result:
self.restart_ping()
return
if self._ping_counter >= self.PING_MAX_FAILURES:
self.stop_ping()
self.perspective.broker.transport.loseConnection()
else:
self.restart_ping(reset_counter=False)
def agent_pool_ready(self):
raise NotImplementedError
def disconnect(self, exit_code=None):
self.stop_ping()
if exit_code is not None:
self._exit_code = exit_code
if reactor.running and not self._reactor_stopped:
self._reactor_stopped = True
reactor.stop()
def cb_disconnected(self, _):
self.stop_ping()
self.perspective = None
self.agentpool = None
def cb_global_error(self, err):
if err.check(pb.PBConnectionLost, defer.CancelledError):
return
msg = err.getErrorMessage()
if err.check(pb.DeadReferenceError):
msg = "Remote Client has been terminated"
elif "PioAgentNotStartedError" in str(err.type):
msg = (
"Could not find active agents. Please start it before on "
"a remote machine using `pio remote agent start` command.\n"
"See http://docs.platformio.org/page/plus/pio-remote.html"
)
else:
maintenance.on_platformio_exception(Exception(err.type))
click.secho(msg, fg="red", err=True)
self.disconnect(exit_code=1)

View File

@@ -0,0 +1,54 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import click
from platformio.commands.remote.client.base import RemoteClientBase
class DeviceListClient(RemoteClientBase):
def __init__(self, agents, json_output):
RemoteClientBase.__init__(self)
self.agents = agents
self.json_output = json_output
def agent_pool_ready(self):
d = self.agentpool.callRemote("cmd", self.agents, "device.list")
d.addCallback(self._cbResult)
d.addErrback(self.cb_global_error)
def _cbResult(self, result):
data = {}
for (success, value) in result:
if not success:
click.secho(value, fg="red", err=True)
continue
(agent_name, devlist) = value
data[agent_name] = devlist
if self.json_output:
click.echo(json.dumps(data))
else:
for agent_name, devlist in data.items():
click.echo("Agent %s" % click.style(agent_name, fg="cyan", bold=True))
click.echo("=" * (6 + len(agent_name)))
for item in devlist:
click.secho(item["port"], fg="cyan")
click.echo("-" * len(item["port"]))
click.echo("Hardware ID: %s" % item["hwid"])
click.echo("Description: %s" % item["description"])
click.echo("")
self.disconnect()

View File

@@ -0,0 +1,240 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from fnmatch import fnmatch
import click
from twisted.internet import protocol, reactor, task # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio.commands.remote.client.base import RemoteClientBase
class SMBridgeProtocol(protocol.Protocol): # pylint: disable=no-init
def connectionMade(self):
self.factory.add_client(self)
def connectionLost(self, reason): # pylint: disable=unused-argument
self.factory.remove_client(self)
def dataReceived(self, data):
self.factory.send_to_server(data)
class SMBridgeFactory(protocol.ServerFactory):
def __init__(self, cdm):
self.cdm = cdm
self._clients = []
def buildProtocol(self, addr): # pylint: disable=unused-argument
p = SMBridgeProtocol()
p.factory = self # pylint: disable=attribute-defined-outside-init
return p
def add_client(self, client):
self.cdm.log.debug("SMBridge: Client connected")
self._clients.append(client)
self.cdm.acread_data()
def remove_client(self, client):
self.cdm.log.debug("SMBridge: Client disconnected")
self._clients.remove(client)
if not self._clients:
self.cdm.client_terminal_stopped()
def has_clients(self):
return len(self._clients)
def send_to_clients(self, data):
if not self._clients:
return None
for client in self._clients:
client.transport.write(data)
return len(data)
def send_to_server(self, data):
self.cdm.acwrite_data(data)
class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
RemoteClientBase
):
MAX_BUFFER_SIZE = 1024 * 1024
def __init__(self, agents, **kwargs):
RemoteClientBase.__init__(self)
self.agents = agents
self.cmd_options = kwargs
self._bridge_factory = SMBridgeFactory(self)
self._agent_id = None
self._ac_id = None
self._d_acread = None
self._d_acwrite = None
self._acwrite_buffer = b""
def agent_pool_ready(self):
d = task.deferLater(
reactor, 1, self.agentpool.callRemote, "cmd", self.agents, "device.list"
)
d.addCallback(self._cb_device_list)
d.addErrback(self.cb_global_error)
def _cb_device_list(self, result):
devices = []
hwid_devindexes = []
for (success, value) in result:
if not success:
click.secho(value, fg="red", err=True)
continue
(agent_name, ports) = value
for item in ports:
if "VID:PID" in item["hwid"]:
hwid_devindexes.append(len(devices))
devices.append((agent_name, item))
if len(result) == 1 and self.cmd_options["port"]:
if set(["*", "?", "[", "]"]) & set(self.cmd_options["port"]):
for agent, item in devices:
if fnmatch(item["port"], self.cmd_options["port"]):
return self.start_remote_monitor(agent, item["port"])
return self.start_remote_monitor(result[0][1][0], self.cmd_options["port"])
device = None
if len(hwid_devindexes) == 1:
device = devices[hwid_devindexes[0]]
else:
click.echo("Available ports:")
for i, device in enumerate(devices):
click.echo(
"{index}. {host}{port} \t{description}".format(
index=i + 1,
host=device[0] + ":" if len(result) > 1 else "",
port=device[1]["port"],
description=device[1]["description"]
if device[1]["description"] != "n/a"
else "",
)
)
device_index = click.prompt(
"Please choose a port (number in the list above)",
type=click.Choice([str(i + 1) for i, _ in enumerate(devices)]),
)
device = devices[int(device_index) - 1]
self.start_remote_monitor(device[0], device[1]["port"])
return None
def start_remote_monitor(self, agent, port):
options = {"port": port}
for key in ("baud", "parity", "rtscts", "xonxoff", "rts", "dtr"):
options[key] = self.cmd_options[key]
click.echo(
"Starting Serial Monitor on {host}:{port}".format(
host=agent, port=options["port"]
)
)
d = self.agentpool.callRemote("cmd", [agent], "device.monitor", options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)
def cb_async_result(self, result):
if len(result) != 1:
raise pb.Error("Invalid response from Remote Cloud")
success, value = result[0]
if not success:
raise pb.Error(value)
reconnected = self._agent_id is not None
self._agent_id, self._ac_id = value
if reconnected:
self.acread_data(force=True)
self.acwrite_data("", force=True)
return
# start bridge
port = reactor.listenTCP(0, self._bridge_factory)
address = port.getHost()
self.log.debug("Serial Bridge is started on {address!r}", address=address)
if "sock" in self.cmd_options:
with open(
os.path.join(self.cmd_options["sock"], "sock"),
mode="w",
encoding="utf8",
) as fp:
fp.write("socket://localhost:%d" % address.port)
def client_terminal_stopped(self):
try:
d = self.agentpool.callRemote("acclose", self._agent_id, self._ac_id)
d.addCallback(lambda r: self.disconnect())
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def acread_data(self, force=False):
if force and self._d_acread:
self._d_acread.cancel()
self._d_acread = None
if (
self._d_acread and not self._d_acread.called
) or not self._bridge_factory.has_clients():
return
try:
self._d_acread = self.agentpool.callRemote(
"acread", self._agent_id, self._ac_id
)
self._d_acread.addCallback(self.cb_acread_result)
self._d_acread.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_acread_result(self, result):
if result is None:
self.disconnect(exit_code=1)
else:
self._bridge_factory.send_to_clients(result)
self.acread_data()
def acwrite_data(self, data, force=False):
if force and self._d_acwrite:
self._d_acwrite.cancel()
self._d_acwrite = None
self._acwrite_buffer += data
if len(self._acwrite_buffer) > self.MAX_BUFFER_SIZE:
self._acwrite_buffer = self._acwrite_buffer[-1 * self.MAX_BUFFER_SIZE :]
if (self._d_acwrite and not self._d_acwrite.called) or not self._acwrite_buffer:
return
data = self._acwrite_buffer
self._acwrite_buffer = b""
try:
d = self.agentpool.callRemote("acwrite", self._agent_id, self._ac_id, data)
d.addCallback(self.cb_acwrite_result)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_acwrite_result(self, result):
assert result > 0
if self._acwrite_buffer:
self.acwrite_data(b"")

View File

@@ -0,0 +1,272 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import os
import zlib
from io import BytesIO
from twisted.spread import pb # pylint: disable=import-error
from platformio import fs
from platformio.commands.remote.client.async_base import AsyncClientBase
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
from platformio.compat import hashlib_encode_data
from platformio.project.config import ProjectConfig
class RunOrTestClient(AsyncClientBase):
MAX_ARCHIVE_SIZE = 50 * 1024 * 1024 # 50Mb
UPLOAD_CHUNK_SIZE = 256 * 1024 # 256Kb
PSYNC_SRC_EXTS = [
"c",
"cpp",
"S",
"spp",
"SPP",
"sx",
"s",
"asm",
"ASM",
"h",
"hpp",
"ipp",
"ino",
"pde",
"json",
"properties",
]
PSYNC_SKIP_DIRS = (".git", ".svn", ".hg", "example", "examples", "test", "tests")
def __init__(self, *args, **kwargs):
AsyncClientBase.__init__(self, *args, **kwargs)
self.project_id = self.generate_project_id(self.options["project_dir"])
self.psync = ProjectSync(self.options["project_dir"])
def generate_project_id(self, path):
h = hashlib.sha1(hashlib_encode_data(self.id))
h.update(hashlib_encode_data(path))
return "%s-%s" % (os.path.basename(path), h.hexdigest())
def add_project_items(self, psync):
with fs.cd(self.options["project_dir"]):
cfg = ProjectConfig.get_instance(
os.path.join(self.options["project_dir"], "platformio.ini")
)
psync.add_item(cfg.path, "platformio.ini")
psync.add_item(cfg.get("platformio", "shared_dir"), "shared")
psync.add_item(cfg.get("platformio", "boards_dir"), "boards")
if self.options["force_remote"]:
self._add_project_source_items(cfg, psync)
else:
self._add_project_binary_items(cfg, psync)
if self.command == "test":
psync.add_item(cfg.get("platformio", "test_dir"), "test")
def _add_project_source_items(self, cfg, psync):
psync.add_item(cfg.get("platformio", "lib_dir"), "lib")
psync.add_item(
cfg.get("platformio", "include_dir"),
"include",
cb_filter=self._cb_tarfile_filter,
)
psync.add_item(
cfg.get("platformio", "src_dir"), "src", cb_filter=self._cb_tarfile_filter
)
if set(["buildfs", "uploadfs", "uploadfsota"]) & set(
self.options.get("target", [])
):
psync.add_item(cfg.get("platformio", "data_dir"), "data")
@staticmethod
def _add_project_binary_items(cfg, psync):
build_dir = cfg.get("platformio", "build_dir")
for env_name in os.listdir(build_dir):
env_dir = os.path.join(build_dir, env_name)
if not os.path.isdir(env_dir):
continue
for fname in os.listdir(env_dir):
bin_file = os.path.join(env_dir, fname)
bin_exts = (".elf", ".bin", ".hex", ".eep", "program")
if os.path.isfile(bin_file) and fname.endswith(bin_exts):
psync.add_item(
bin_file, os.path.join(".pio", "build", env_name, fname)
)
def _cb_tarfile_filter(self, path):
if (
os.path.isdir(path)
and os.path.basename(path).lower() in self.PSYNC_SKIP_DIRS
):
return None
if os.path.isfile(path) and not self.is_file_with_exts(
path, self.PSYNC_SRC_EXTS
):
return None
return path
@staticmethod
def is_file_with_exts(path, exts):
if path.endswith(tuple(".%s" % e for e in exts)):
return True
return False
def agent_pool_ready(self):
self.psync_init()
def psync_init(self):
self.add_project_items(self.psync)
d = self.agentpool.callRemote(
"cmd",
self.agents,
"psync",
dict(id=self.project_id, items=[i[1] for i in self.psync.get_items()]),
)
d.addCallback(self.cb_psync_init_result)
d.addErrback(self.cb_global_error)
# build db index while wait for result from agent
self.psync.rebuild_dbindex()
def cb_psync_init_result(self, result):
self._acs_total = len(result)
for (success, value) in result:
if not success:
raise pb.Error(value)
agent_id, ac_id = value
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(stage=PROJECT_SYNC_STAGE.DBINDEX.value),
)
d.addCallback(self.cb_psync_dbindex_result, agent_id, ac_id)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_dbindex_result(self, result, agent_id, ac_id):
result = set(json.loads(zlib.decompress(result)))
dbindex = set(self.psync.get_dbindex())
delete = list(result - dbindex)
delta = list(dbindex - result)
self.log.debug(
"PSync: stats, total={total}, delete={delete}, delta={delta}",
total=len(dbindex),
delete=len(delete),
delta=len(delta),
)
if not delete and not delta:
return self.psync_finalize(agent_id, ac_id)
if not delete:
return self.psync_upload(agent_id, ac_id, delta)
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(
stage=PROJECT_SYNC_STAGE.DELETE.value,
dbindex=zlib.compress(json.dumps(delete).encode()),
),
)
d.addCallback(self.cb_psync_delete_result, agent_id, ac_id, delta)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
return None
def cb_psync_delete_result(self, result, agent_id, ac_id, dbindex):
assert result
self.psync_upload(agent_id, ac_id, dbindex)
def psync_upload(self, agent_id, ac_id, dbindex):
assert dbindex
fileobj = BytesIO()
compressed = self.psync.compress_items(fileobj, dbindex, self.MAX_ARCHIVE_SIZE)
fileobj.seek(0)
self.log.debug(
"PSync: upload project, size={size}", size=len(fileobj.getvalue())
)
self.psync_upload_chunk(
agent_id, ac_id, list(set(dbindex) - set(compressed)), fileobj
)
def psync_upload_chunk(self, agent_id, ac_id, dbindex, fileobj):
offset = fileobj.tell()
total = fileobj.seek(0, os.SEEK_END)
# unwind
fileobj.seek(offset)
chunk = fileobj.read(self.UPLOAD_CHUNK_SIZE)
assert chunk
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(
stage=PROJECT_SYNC_STAGE.UPLOAD.value,
chunk=chunk,
length=len(chunk),
total=total,
),
)
d.addCallback(
self.cb_psync_upload_chunk_result, agent_id, ac_id, dbindex, fileobj
)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments
self, result, agent_id, ac_id, dbindex, fileobj
):
result = PROJECT_SYNC_STAGE.lookupByValue(result)
self.log.debug("PSync: upload chunk result {r}", r=str(result))
assert result & (PROJECT_SYNC_STAGE.UPLOAD | PROJECT_SYNC_STAGE.EXTRACTED)
if result is PROJECT_SYNC_STAGE.EXTRACTED:
if dbindex:
self.psync_upload(agent_id, ac_id, dbindex)
else:
self.psync_finalize(agent_id, ac_id)
else:
self.psync_upload_chunk(agent_id, ac_id, dbindex, fileobj)
def psync_finalize(self, agent_id, ac_id):
try:
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
d.addCallback(self.cb_psync_completed_result, agent_id)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_completed_result(self, result, agent_id):
assert PROJECT_SYNC_STAGE.lookupByValue(result)
options = self.options.copy()
del options["project_dir"]
options["project_id"] = self.project_id
d = self.agentpool.callRemote("cmd", [agent_id], self.command, options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)

View File

@@ -0,0 +1,22 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.remote.client.async_base import AsyncClientBase
class UpdateCoreClient(AsyncClientBase):
def agent_pool_ready(self):
d = self.agentpool.callRemote("cmd", self.agents, self.command, self.options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)

View File

@@ -0,0 +1,389 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments, import-outside-toplevel
# pylint: disable=inconsistent-return-statements
import os
import subprocess
import threading
from tempfile import mkdtemp
from time import sleep
import click
from platformio import fs, proc
from platformio.commands.run.command import cli as cmd_run
from platformio.device.commands.monitor import (
apply_project_monitor_options,
device_monitor_cmd,
get_project_options,
project_options_to_monitor_argv,
)
from platformio.package.manager.core import inject_contrib_pysite
from platformio.project.exception import NotPlatformIOProjectError
from platformio.project.options import ProjectOptions
from platformio.test.command import test_cmd
@click.group("remote", short_help="Remote Development")
@click.option("-a", "--agent", multiple=True)
@click.pass_context
def cli(ctx, agent):
ctx.obj = agent
inject_contrib_pysite()
@cli.group("agent", short_help="Start a new agent or list active")
def remote_agent():
pass
@remote_agent.command("start", short_help="Start agent")
@click.option("-n", "--name")
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
@click.option(
"-d",
"--working-dir",
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
def remote_agent_start(name, share, working_dir):
from platformio.commands.remote.client.agent_service import RemoteAgentService
RemoteAgentService(name, share, working_dir).connect()
@remote_agent.command("list", short_help="List active agents")
def remote_agent_list():
from platformio.commands.remote.client.agent_list import AgentListClient
AgentListClient().connect()
@cli.command("update", short_help="Update installed Platforms, Packages and Libraries")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.pass_obj
def remote_update(agents, only_check, dry_run):
from platformio.commands.remote.client.update_core import UpdateCoreClient
UpdateCoreClient("update", agents, dict(only_check=only_check or dry_run)).connect()
@cli.command("run", short_help="Process project environments remotely")
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("--disable-auto-clean", is_flag=True)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.pass_obj
@click.pass_context
def remote_run(
ctx,
agents,
environment,
target,
upload_port,
project_dir,
disable_auto_clean,
force_remote,
silent,
verbose,
):
from platformio.commands.remote.client.run_or_test import RunOrTestClient
cr = RunOrTestClient(
"run",
agents,
dict(
environment=environment,
target=target,
upload_port=upload_port,
project_dir=project_dir,
disable_auto_clean=disable_auto_clean,
force_remote=force_remote,
silent=silent,
verbose=verbose,
),
)
if force_remote:
return cr.connect()
click.secho("Building project locally", bold=True)
local_targets = []
if "clean" in target:
local_targets = ["clean"]
elif set(["buildfs", "uploadfs", "uploadfsota"]) & set(target):
local_targets = ["buildfs"]
else:
local_targets = ["checkprogsize", "buildprog"]
ctx.invoke(
cmd_run,
environment=environment,
target=local_targets,
project_dir=project_dir,
# disable_auto_clean=True,
silent=silent,
verbose=verbose,
)
if any(["upload" in t for t in target] + ["program" in target]):
click.secho("Uploading firmware remotely", bold=True)
cr.options["target"] += ("nobuild",)
cr.options["disable_auto_clean"] = True
cr.connect()
return True
@cli.command("test", short_help="Remote Unit Testing")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option(
"--filter",
"-f",
multiple=True,
metavar="<pattern>",
help="Filter tests by a pattern",
)
@click.option(
"--ignore",
"-i",
multiple=True,
metavar="<pattern>",
help="Ignore tests by a pattern",
)
@click.option("--upload-port")
@click.option("--test-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--verbose", "-v", is_flag=True)
@click.pass_obj
@click.pass_context
def remote_test( # pylint: disable=redefined-builtin
ctx,
agents,
environment,
filter,
ignore,
upload_port,
test_port,
project_dir,
force_remote,
without_building,
without_uploading,
verbose,
):
from platformio.commands.remote.client.run_or_test import RunOrTestClient
cr = RunOrTestClient(
"test",
agents,
dict(
environment=environment,
filter=filter,
ignore=ignore,
upload_port=upload_port,
test_port=test_port,
project_dir=project_dir,
force_remote=force_remote,
without_building=without_building,
without_uploading=without_uploading,
verbose=verbose,
),
)
if force_remote:
return cr.connect()
click.secho("Building project locally", bold=True)
ctx.invoke(
test_cmd,
environment=environment,
filter=filter,
ignore=ignore,
project_dir=project_dir,
without_uploading=True,
without_testing=True,
verbose=verbose,
)
click.secho("Testing project remotely", bold=True)
cr.options["without_building"] = True
cr.connect()
return True
@cli.group("device", short_help="Monitor remote device or list existing")
def remote_device():
pass
@remote_device.command("list", short_help="List remote devices")
@click.option("--json-output", is_flag=True)
@click.pass_obj
def device_list(agents, json_output):
from platformio.commands.remote.client.device_list import DeviceListClient
DeviceListClient(agents, json_output).connect()
@remote_device.command("monitor", short_help="Monitor remote device")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option(
"--baud",
"-b",
type=int,
help="Set baud rate, default=%d" % ProjectOptions["env.monitor_speed"].default,
)
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment",
)
@click.option(
"--sock",
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.pass_obj
@click.pass_context
def device_monitor(ctx, agents, **kwargs):
from platformio.commands.remote.client.device_monitor import DeviceMonitorClient
if kwargs["sock"]:
return DeviceMonitorClient(agents, **kwargs).connect()
project_options = {}
try:
with fs.cd(kwargs["project_dir"]):
project_options = get_project_options(kwargs["environment"])
kwargs = apply_project_monitor_options(kwargs, project_options)
except NotPlatformIOProjectError:
pass
kwargs["baud"] = kwargs["baud"] or ProjectOptions["env.monitor_speed"].default
def _tx_target(sock_dir):
subcmd_argv = ["remote"]
for agent in agents:
subcmd_argv.extend(["--agent", agent])
subcmd_argv.extend(["device", "monitor"])
subcmd_argv.extend(project_options_to_monitor_argv(kwargs, project_options))
subcmd_argv.extend(["--sock", sock_dir])
subprocess.call([proc.where_is_program("platformio")] + subcmd_argv)
sock_dir = mkdtemp(suffix="pio")
sock_file = os.path.join(sock_dir, "sock")
try:
t = threading.Thread(target=_tx_target, args=(sock_dir,))
t.start()
while t.is_alive() and not os.path.isfile(sock_file):
sleep(0.1)
if not t.is_alive():
return
with open(sock_file, encoding="utf8") as fp:
kwargs["port"] = fp.read()
ctx.invoke(device_monitor_cmd, **kwargs)
t.join(2)
finally:
fs.rmtree(sock_dir)
return True

View File

@@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,86 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.cred import credentials # pylint: disable=import-error
from twisted.internet import defer, protocol, reactor # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio.app import get_host_id
from platformio.clients.account import AccountClient
class RemoteClientFactory(pb.PBClientFactory, protocol.ReconnectingClientFactory):
def clientConnectionMade(self, broker):
if self.sslContextFactory and not self.sslContextFactory.certificate_verified:
self.remote_client.log.error(
"A remote cloud could not prove that its security certificate is "
"from {host}. This may cause a misconfiguration or an attacker "
"intercepting your connection.",
host=self.sslContextFactory.host,
)
return self.remote_client.disconnect()
pb.PBClientFactory.clientConnectionMade(self, broker)
protocol.ReconnectingClientFactory.resetDelay(self)
self.remote_client.log.info("Successfully connected")
self.remote_client.log.info("Authenticating")
auth_token = None
try:
auth_token = AccountClient().fetch_authentication_token()
except Exception as e: # pylint:disable=broad-except
d = defer.Deferred()
d.addErrback(self.clientAuthorizationFailed)
d.errback(pb.Error(e))
return d
d = self.login(
credentials.UsernamePassword(
auth_token.encode(),
get_host_id().encode(),
),
client=self.remote_client,
)
d.addCallback(self.remote_client.cb_client_authorization_made)
d.addErrback(self.clientAuthorizationFailed)
return d
def clientAuthorizationFailed(self, err):
AccountClient.delete_local_session()
self.remote_client.cb_client_authorization_failed(err)
def clientConnectionFailed(self, connector, reason):
self.remote_client.log.warn(
"Could not connect to PIO Remote Cloud. Reconnecting..."
)
self.remote_client.cb_disconnected(reason)
protocol.ReconnectingClientFactory.clientConnectionFailed(
self, connector, reason
)
def clientConnectionLost( # pylint: disable=arguments-differ
self, connector, unused_reason
):
if not reactor.running:
self.remote_client.log.info("Successfully disconnected")
return
self.remote_client.log.warn(
"Connection is lost to PIO Remote Cloud. Reconnecting"
)
pb.PBClientFactory.clientConnectionLost(
self, connector, unused_reason, reconnecting=1
)
self.remote_client.cb_disconnected(unused_reason)
protocol.ReconnectingClientFactory.clientConnectionLost(
self, connector, unused_reason
)

View File

@@ -0,0 +1,41 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import certifi
from OpenSSL import SSL # pylint: disable=import-error
from twisted.internet import ssl # pylint: disable=import-error
class SSLContextFactory(ssl.ClientContextFactory):
def __init__(self, host):
self.host = host
self.certificate_verified = False
def getContext(self):
ctx = super().getContext()
ctx.set_verify(
SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, self.verifyHostname
)
ctx.load_verify_locations(certifi.where())
return ctx
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments
self, connection, x509, errno, depth, status
):
cn = x509.get_subject().commonName
if cn.startswith("*"):
cn = cn[1:]
if self.host.endswith(cn):
self.certificate_verified = True
return status

View File

@@ -0,0 +1,117 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tarfile
from binascii import crc32
from os.path import getmtime, getsize, isdir, isfile, join
from twisted.python import constants # pylint: disable=import-error
from platformio.compat import hashlib_encode_data
class PROJECT_SYNC_STAGE(constants.Flags):
INIT = constants.FlagConstant()
DBINDEX = constants.FlagConstant()
DELETE = constants.FlagConstant()
UPLOAD = constants.FlagConstant()
EXTRACTED = constants.FlagConstant()
COMPLETED = constants.FlagConstant()
class ProjectSync(object):
def __init__(self, path):
self.path = path
if not isdir(self.path):
os.makedirs(self.path)
self.items = []
self._db = {}
def add_item(self, path, relpath, cb_filter=None):
self.items.append((path, relpath, cb_filter))
def get_items(self):
return self.items
def rebuild_dbindex(self):
self._db = {}
for (path, relpath, cb_filter) in self.items:
if cb_filter and not cb_filter(path):
continue
self._insert_to_db(path, relpath)
if not isdir(path):
continue
for (root, _, files) in os.walk(path, followlinks=True):
for name in files:
self._insert_to_db(
join(root, name), join(relpath, root[len(path) + 1 :], name)
)
def _insert_to_db(self, path, relpath):
if not isfile(path):
return
index_hash = "%s-%s-%s" % (relpath, getmtime(path), getsize(path))
index = crc32(hashlib_encode_data(index_hash))
self._db[index] = (path, relpath)
def get_dbindex(self):
return list(self._db.keys())
def delete_dbindex(self, dbindex):
for index in dbindex:
if index not in self._db:
continue
path = self._db[index][0]
if isfile(path):
os.remove(path)
del self._db[index]
self.delete_empty_folders()
return True
def delete_empty_folders(self):
deleted = False
for item in self.items:
if not isdir(item[0]):
continue
for root, dirs, files in os.walk(item[0]):
if not dirs and not files and root != item[0]:
deleted = True
os.rmdir(root)
if deleted:
return self.delete_empty_folders()
return True
def compress_items(self, fileobj, dbindex, max_size):
compressed = []
total_size = 0
tar_opts = dict(fileobj=fileobj, mode="w:gz", bufsize=0, dereference=True)
with tarfile.open(**tar_opts) as tgz:
for index in dbindex:
compressed.append(index)
if index not in self._db:
continue
path, relpath = self._db[index]
tgz.add(path, relpath)
total_size += getsize(path)
if total_size > max_size:
break
return compressed
def decompress_items(self, fileobj):
fileobj.seek(0)
with tarfile.open(fileobj=fileobj, mode="r:gz") as tgz:
tgz.extractall(self.path)
return True

View File

@@ -12,21 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import operator
import os
import shutil
from multiprocessing import cpu_count
from os import getcwd
from os.path import isfile
from time import time
import click
from tabulate import tabulate
from platformio import app, exception, fs, util
from platformio.commands.device import device_monitor as cmd_device_monitor
from platformio.commands.run.helpers import clean_build_dir, handle_legacy_libdeps
from platformio.commands.run.processor import EnvironmentProcessor
from platformio.commands.test.processor import CTX_META_TEST_IS_RUNNING
from platformio.device.commands.monitor import device_monitor_cmd
from platformio.project.config import ProjectConfig
from platformio.project.helpers import find_project_dir_above
from platformio.project.helpers import find_project_dir_above, load_build_metadata
from platformio.test.runners.base import CTX_META_TEST_IS_RUNNING
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
@@ -36,14 +37,14 @@ except NotImplementedError:
DEFAULT_JOB_NUMS = 1
@click.command("run", short_help="Process project environments")
@click.command("run", short_help="Run project targets (build, upload, clean, etc.)")
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option(
"-d",
"--project-dir",
default=getcwd,
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
@@ -65,9 +66,17 @@ except NotImplementedError:
"Default is a number of CPUs in a system (N=%d)" % DEFAULT_JOB_NUMS
),
)
@click.option(
"-a",
"--program-arg",
"program_args",
multiple=True,
help="A program argument (multiple are allowed)",
)
@click.option("--disable-auto-clean", is_flag=True)
@click.option("--list-targets", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.option("--disable-auto-clean", is_flag=True)
@click.pass_context
def cli(
ctx,
@@ -77,25 +86,31 @@ def cli(
project_dir,
project_conf,
jobs,
program_args,
disable_auto_clean,
list_targets,
silent,
verbose,
disable_auto_clean,
):
app.set_session_var("custom_project_conf", project_conf)
# find project directory on upper level
if isfile(project_dir):
if os.path.isfile(project_dir):
project_dir = find_project_dir_above(project_dir)
is_test_running = CTX_META_TEST_IS_RUNNING in ctx.meta
results = []
with fs.cd(project_dir):
config = ProjectConfig.get_instance(project_conf)
config.validate(environment)
if list_targets:
return print_target_list(list(environment) or config.envs())
# clean obsolete build dir
if not disable_auto_clean:
build_dir = config.get_optional_dir("build")
build_dir = config.get("platformio", "build_dir")
try:
clean_build_dir(build_dir, config)
except: # pylint: disable=bare-except
@@ -108,7 +123,6 @@ def cli(
handle_legacy_libdeps(project_dir, config)
default_envs = config.default_envs()
results = []
for env in config.envs():
skipenv = any(
[
@@ -132,21 +146,25 @@ def cli(
environment,
target,
upload_port,
jobs,
program_args,
is_test_running,
silent,
verbose,
jobs,
is_test_running,
)
)
command_failed = any(r.get("succeeded") is False for r in results)
command_failed = any(r.get("succeeded") is False for r in results)
if not is_test_running and (command_failed or not silent) and len(results) > 1:
print_processing_summary(results)
if not is_test_running and (command_failed or not silent) and len(results) > 1:
print_processing_summary(results, verbose)
if command_failed:
raise exception.ReturnErrorCode(1)
return True
# Reset custom project config
app.set_session_var("custom_project_conf", None)
if command_failed:
raise exception.ReturnErrorCode(1)
return True
def process_env(
@@ -156,16 +174,25 @@ def process_env(
environments,
targets,
upload_port,
jobs,
program_args,
is_test_running,
silent,
verbose,
jobs,
is_test_running,
):
if not is_test_running and not silent:
print_processing_header(name, config, verbose)
ep = EnvironmentProcessor(
ctx, name, config, targets, upload_port, silent, verbose, jobs
ctx,
name,
config,
targets,
upload_port,
jobs,
program_args,
silent,
verbose,
)
result = {"env": name, "duration": time(), "succeeded": ep.process()}
result["duration"] = time() - result["duration"]
@@ -180,7 +207,7 @@ def process_env(
and "nobuild" not in ep.get_build_targets()
):
ctx.invoke(
cmd_device_monitor, environment=environments[0] if environments else None
device_monitor_cmd, environment=environments[0] if environments else None
)
return result
@@ -195,7 +222,7 @@ def print_processing_header(env, config, verbose=False):
"Processing %s (%s)"
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
)
terminal_width, _ = click.get_terminal_size()
terminal_width, _ = shutil.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@@ -215,7 +242,7 @@ def print_processing_footer(result):
)
def print_processing_summary(results):
def print_processing_summary(results, verbose=False):
tabular_data = []
succeeded_nums = 0
failed_nums = 0
@@ -227,6 +254,8 @@ def print_processing_summary(results):
failed_nums += 1
status_str = click.style("FAILED", fg="red")
elif result.get("succeeded") is None:
if not verbose:
continue
status_str = "IGNORED"
else:
succeeded_nums += 1
@@ -261,3 +290,33 @@ def print_processing_summary(results):
is_error=failed_nums,
fg="red" if failed_nums else "green",
)
def print_target_list(envs):
tabular_data = []
for env, data in load_build_metadata(os.getcwd(), envs).items():
tabular_data.extend(
sorted(
[
(
click.style(env, fg="cyan"),
t["group"],
click.style(t.get("name"), fg="yellow"),
t["title"],
t.get("description"),
)
for t in data.get("targets", [])
],
key=operator.itemgetter(1, 2),
)
)
tabular_data.append((None, None, None, None, None))
click.echo(
tabulate(
tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Environment", "Group", "Name", "Title", "Description")
],
),
)

Some files were not shown because too many files have changed in this diff Show More